config.name,config.backend.name,config.backend.version,config.backend._target_,config.backend.task,config.backend.library,config.backend.model_type,config.backend.model,config.backend.processor,config.backend.device,config.backend.device_ids,config.backend.seed,config.backend.inter_op_num_threads,config.backend.intra_op_num_threads,config.backend.model_kwargs.trust_remote_code,config.backend.no_weights,config.backend.device_map,config.backend.torch_dtype,config.backend.eval_mode,config.backend.to_bettertransformer,config.backend.low_cpu_mem_usage,config.backend.attn_implementation,config.backend.cache_implementation,config.backend.autocast_enabled,config.backend.autocast_dtype,config.backend.torch_compile,config.backend.torch_compile_target,config.backend.quantization_scheme,config.backend.quantization_config.llm_int8_threshold,config.backend.quantization_config.load_in_8bit,config.backend.deepspeed_inference,config.backend.peft_type,config.scenario.name,config.scenario._target_,config.scenario.iterations,config.scenario.duration,config.scenario.warmup_runs,config.scenario.input_shapes.batch_size,config.scenario.input_shapes.num_choices,config.scenario.input_shapes.sequence_length,config.scenario.new_tokens,config.scenario.memory,config.scenario.latency,config.scenario.energy,config.scenario.generate_kwargs.max_new_tokens,config.scenario.generate_kwargs.min_new_tokens,config.launcher.name,config.launcher._target_,config.launcher.device_isolation,config.launcher.device_isolation_action,config.launcher.numactl,config.launcher.start_method,config.environment.cpu,config.environment.cpu_count,config.environment.cpu_ram_mb,config.environment.system,config.environment.machine,config.environment.platform,config.environment.processor,config.environment.python_version,config.environment.gpu,config.environment.gpu_count,config.environment.gpu_vram_mb,config.environment.optimum_benchmark_version,config.environment.optimum_benchmark_commit,config.environment.transformers_version,config.environment.transformers_commit,config.environment.accelerate_version,config.environment.accelerate_commit,config.environment.diffusers_version,config.environment.diffusers_commit,config.environment.optimum_version,config.environment.optimum_commit,config.environment.timm_version,config.environment.timm_commit,config.environment.peft_version,config.environment.peft_commit,report.traceback,config.backend.quantization_config.load_in_4bit,report.load.memory.unit,report.load.memory.max_ram,report.load.memory.max_global_vram,report.load.memory.max_process_vram,report.load.memory.max_reserved,report.load.memory.max_allocated,report.load.latency.unit,report.load.latency.count,report.load.latency.total,report.load.latency.mean,report.load.latency.stdev,report.load.latency.p50,report.load.latency.p90,report.load.latency.p95,report.load.latency.p99,report.load.latency.values,report.load.throughput,report.load.energy.unit,report.load.energy.cpu,report.load.energy.ram,report.load.energy.gpu,report.load.energy.total,report.load.efficiency,report.prefill.memory.unit,report.prefill.memory.max_ram,report.prefill.memory.max_global_vram,report.prefill.memory.max_process_vram,report.prefill.memory.max_reserved,report.prefill.memory.max_allocated,report.prefill.latency.unit,report.prefill.latency.count,report.prefill.latency.total,report.prefill.latency.mean,report.prefill.latency.stdev,report.prefill.latency.p50,report.prefill.latency.p90,report.prefill.latency.p95,report.prefill.latency.p99,report.prefill.latency.values,report.prefill.throughput.unit,report.prefill.throughput.value,report.prefill.energy.unit,report.prefill.energy.cpu,report.prefill.energy.ram,report.prefill.energy.gpu,report.prefill.energy.total,report.prefill.efficiency.unit,report.prefill.efficiency.value,report.decode.memory.unit,report.decode.memory.max_ram,report.decode.memory.max_global_vram,report.decode.memory.max_process_vram,report.decode.memory.max_reserved,report.decode.memory.max_allocated,report.decode.latency.unit,report.decode.latency.count,report.decode.latency.total,report.decode.latency.mean,report.decode.latency.stdev,report.decode.latency.p50,report.decode.latency.p90,report.decode.latency.p95,report.decode.latency.p99,report.decode.latency.values,report.decode.throughput.unit,report.decode.throughput.value,report.decode.energy.unit,report.decode.energy.cpu,report.decode.energy.ram,report.decode.energy.gpu,report.decode.energy.total,report.decode.efficiency.unit,report.decode.efficiency.value,report.per_token.memory,report.per_token.latency.unit,report.per_token.latency.count,report.per_token.latency.total,report.per_token.latency.mean,report.per_token.latency.stdev,report.per_token.latency.p50,report.per_token.latency.p90,report.per_token.latency.p95,report.per_token.latency.p99,report.per_token.latency.values,report.per_token.throughput.unit,report.per_token.throughput.value,report.per_token.energy,report.per_token.efficiency 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpf_cr4yb8/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp9y8l_0ry/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpnff0r8c1/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1185.23904,5163.057152,0.0,4760.53504,4751.079424,s,1,14.94036328125,14.94036328125,0.0,14.94036328125,14.94036328125,14.94036328125,14.94036328125,[14.94036328125],,kWh,0.0002182510419249714,2.406748612108059e-05,7.40345036719936e-05,0.0003163530317180456,,MB,1420.34944,6037.569536,0.0,5622.464512,5351.551488,s,10,10.14493957519531,1.014493957519531,0.004734176680295073,1.0142422485351563,1.0201883666992186,1.02152353515625,1.022591669921875,"[1.005478759765625, 1.0106629638671876, 1.014055908203125, 1.016022216796875, 1.0102464599609375, 1.0173555297851562, 1.0144285888671876, 1.0198916625976562, 1.0139387817382812, 1.0228587036132812]",tokens/s,252.34255768849314,kWh,2.949806521084307e-05,3.2530037151633017e-06,1.9563210094997174e-05,5.2314279021003555e-05,tokens/kWh,4893501.445317044,MB,1445.51936,6039.666688,0.0,5622.464512,5351.554048,s,10,52.9172783203125,5.29172783203125,0.0034535335675357527,5.291695556640625,5.295021484375,5.29724951171875,5.29903193359375,"[5.2994775390625, 5.2945263671875, 5.29284814453125, 5.29277001953125, 5.2920380859375, 5.28944287109375, 5.2885126953125, 5.29135302734375, 5.2901181640625, 5.28619140625]",tokens/s,11.905374199076526,kWh,0.0001546980114078936,1.706448688435445e-05,9.969277419860645e-05,0.00027145527249085457,tokens/kWh,232082.43266714405,,s,630,52.91357545471189,0.08398980230906654,0.001441532851248458,0.08377779388427734,0.0842175163269043,0.08469334907531738,0.09434643432617187,"[0.0945827865600586, 0.08477069091796875, 0.08407667541503906, 0.08369107055664063, 0.08349683380126953, 0.08351702117919922, 0.08362608337402344, 0.08406105804443359, 0.08435507202148437, 0.08355990600585937, 0.08374736022949218, 0.08347238159179687, 0.08353382110595703, 0.08368742370605468, 0.08346969604492188, 0.08391449737548828, 0.08369036865234375, 0.08366899108886719, 0.08369766235351563, 0.0837592010498047, 0.08380406188964844, 0.08363549041748047, 0.08367382049560547, 0.08363008117675781, 0.08373452758789063, 0.0836909408569336, 0.08372799682617188, 0.08399139404296875, 0.08400086212158203, 0.08370381164550782, 0.08387145233154297, 0.08424211120605468, 0.08366960144042969, 0.08468041229248047, 0.08393865966796875, 0.08392131042480469, 0.08369967651367187, 0.08390681457519532, 0.0837799072265625, 0.08410320281982422, 0.08466185760498048, 0.08383321380615234, 0.08409292602539062, 0.08450457763671874, 0.08405117034912109, 0.08413868713378907, 0.08381244659423828, 0.08399462127685547, 0.08392499542236329, 0.08393318176269532, 0.0841352310180664, 0.08410294342041015, 0.08389453125, 0.08392976379394532, 0.08401443481445313, 0.08518109130859375, 0.084106689453125, 0.08414598083496094, 0.08397881317138672, 0.08413817596435547, 0.0842977294921875, 0.08405769348144532, 0.08440370941162109, 0.09423753356933594, 0.08462070465087891, 0.08399113464355469, 0.08378880310058594, 0.08447020721435547, 0.08397471618652344, 0.08350080108642578, 0.08336204528808594, 0.08363212585449219, 0.08364441680908204, 0.08346137237548829, 0.084015869140625, 0.08349273681640625, 0.08390457916259765, 0.08372230529785156, 0.08355958557128906, 0.0835654067993164, 0.08356044769287109, 0.08351251220703125, 0.08342508697509765, 0.08362086486816406, 0.08356044769287109, 0.08358000183105468, 0.08405289459228515, 0.08375929260253906, 0.08372310638427734, 0.08373551940917968, 0.08385536193847656, 0.08421536254882812, 0.08385478210449218, 0.08366566467285157, 0.08430966186523438, 0.08381507110595703, 0.08412767791748046, 0.08380950164794922, 0.08392755126953125, 0.08376930999755859, 0.08384559631347656, 0.08383209228515626, 0.08372077178955079, 0.08408182525634765, 0.08419538879394531, 0.08387049865722657, 0.08377139282226563, 0.08388813018798828, 0.08432025909423828, 0.08409487915039063, 0.0839017562866211, 0.08389097595214844, 0.0840293731689453, 0.083714111328125, 0.08377458953857422, 0.0839210205078125, 0.08380105590820312, 0.08391401672363281, 0.08397055816650391, 0.08398400115966796, 0.08414473724365235, 0.08407017517089843, 0.08406425476074218, 0.0841944351196289, 0.08422694396972656, 0.08411750030517579, 0.09439091491699218, 0.0845758056640625, 0.08477945709228515, 0.0837152328491211, 0.08348684692382813, 0.08332361602783203, 0.08358220672607422, 0.08333123016357422, 0.08336649322509766, 0.08342111968994141, 0.08349702453613281, 0.08349696350097656, 0.08333910369873047, 0.0838054428100586, 0.08349378967285156, 0.08336486053466798, 0.08349737548828125, 0.08341974639892578, 0.08354611206054688, 0.08359049224853515, 0.08368339538574218, 0.08342556762695312, 0.08354847717285156, 0.08391065979003906, 0.0835072021484375, 0.08361779022216796, 0.08360150146484376, 0.08357286071777344, 0.08364009857177734, 0.08365055847167968, 0.08383663940429688, 0.08387203216552734, 0.08390166473388672, 0.08374761962890626, 0.08363593292236328, 0.08374301147460937, 0.08461052703857422, 0.08370639801025391, 0.08380006408691407, 0.08364646148681641, 0.08378163146972656, 0.0835788803100586, 0.08398400115966796, 0.0838611831665039, 0.08379052734375, 0.08370790100097657, 0.08385536193847656, 0.08387407684326172, 0.08610684967041016, 0.08419395446777343, 0.08391414642333984, 0.0837825927734375, 0.08387789154052734, 0.08384102630615234, 0.08428348541259766, 0.08399247741699219, 0.08406425476074218, 0.08391302490234374, 0.08393421173095703, 0.0840035171508789, 0.08655667114257813, 0.08399871826171874, 0.08389945220947266, 0.09507635498046875, 0.08472774505615234, 0.0838881607055664, 0.0839618911743164, 0.08361090850830079, 0.08335024261474609, 0.08346553802490235, 0.08335619354248047, 0.08349929809570313, 0.0835578842163086, 0.08343795013427735, 0.08342098999023438, 0.08343939208984374, 0.08394502258300782, 0.08360361480712891, 0.08370191955566406, 0.08355641937255859, 0.08348915100097656, 0.08341718292236328, 0.08359497833251953, 0.08362217712402344, 0.08422585296630859, 0.08376707458496094, 0.08341133117675781, 0.08363625335693359, 0.08381148529052734, 0.08354029083251953, 0.08377008056640625, 0.08369337463378906, 0.08360313415527344, 0.0836344985961914, 0.08357170867919922, 0.08375958251953125, 0.0836480941772461, 0.08380716705322265, 0.08360902404785156, 0.08381292724609375, 0.08398438262939453, 0.08383487701416016, 0.08390860748291015, 0.08405840301513672, 0.0837624282836914, 0.08396438598632812, 0.08386265563964844, 0.08398230743408203, 0.08378460693359376, 0.08381581115722657, 0.084714111328125, 0.0838656005859375, 0.08384512329101562, 0.08371737670898438, 0.08412441253662109, 0.08395366668701172, 0.0839024658203125, 0.08397414398193359, 0.08394547271728516, 0.08412569427490234, 0.08451615905761718, 0.08449648284912109, 0.08439008331298828, 0.08389059448242188, 0.08406156921386719, 0.0849086685180664, 0.09357164764404297, 0.08494889831542969, 0.08417485046386719, 0.08353587341308594, 0.08365261077880859, 0.08344102478027343, 0.08329865264892578, 0.08339689636230468, 0.08343052673339844, 0.08368422698974609, 0.08338982391357422, 0.08329689788818359, 0.08352726745605468, 0.0835050277709961, 0.08382518768310547, 0.08346147155761718, 0.08371881866455078, 0.08496742248535157, 0.08398233795166016, 0.08339046478271485, 0.08365670776367187, 0.08347622680664063, 0.083521728515625, 0.0835864028930664, 0.08377827453613282, 0.08365817260742188, 0.08419795227050782, 0.08398809814453125, 0.08368128204345703, 0.08375062561035156, 0.08377747344970703, 0.08365129852294922, 0.08383283233642579, 0.08369152069091797, 0.08373977661132813, 0.08365350341796875, 0.0840273895263672, 0.08401622772216796, 0.08393596649169922, 0.08384719848632813, 0.083859619140625, 0.0839557113647461, 0.0838359375, 0.08398738861083985, 0.0844288330078125, 0.08399871826171874, 0.08389427185058594, 0.08367513275146485, 0.08403968048095703, 0.08389017486572266, 0.08399462127685547, 0.08384841918945313, 0.08384703826904297, 0.0837784652709961, 0.08396185302734376, 0.08388198089599609, 0.08383033752441406, 0.08395616149902344, 0.08397209930419922, 0.08408185577392578, 0.08433542633056641, 0.08498726654052734, 0.08397068786621094, 0.09345321655273438, 0.08472764587402344, 0.08399635314941406, 0.08359561920166016, 0.08326121520996094, 0.083279296875, 0.0834439697265625, 0.08347510528564453, 0.08337200164794922, 0.08372396850585938, 0.08374479675292969, 0.0833766098022461, 0.0834085464477539, 0.08345961761474609, 0.08334095764160156, 0.08344380950927735, 0.0846181411743164, 0.08356041717529297, 0.0834901123046875, 0.08361033630371094, 0.08386557006835937, 0.08377117156982422, 0.08349094390869141, 0.08355811309814454, 0.0835588150024414, 0.08351058959960937, 0.08363654327392578, 0.08363884735107421, 0.08358819580078125, 0.08355254364013671, 0.08365872192382813, 0.08359574127197265, 0.08370111846923828, 0.08376998138427734, 0.08374217224121094, 0.08371459197998046, 0.08390860748291015, 0.08368128204345703, 0.08363622283935547, 0.0836987533569336, 0.08377811431884766, 0.08369570922851563, 0.08377577972412109, 0.08373862457275391, 0.08382998657226562, 0.08388687896728515, 0.08384716796875, 0.0839024658203125, 0.08380316925048828, 0.08423693084716796, 0.0836918716430664, 0.08391065979003906, 0.08393657684326172, 0.08507052612304687, 0.0837550048828125, 0.0839415054321289, 0.08411289978027343, 0.08421746826171875, 0.0839626235961914, 0.08397382354736328, 0.08413120269775391, 0.08479430389404297, 0.08542960357666016, 0.09631053161621093, 0.08476866912841798, 0.08404668426513671, 0.0835805435180664, 0.083233154296875, 0.0834867172241211, 0.08339228820800781, 0.08340092468261719, 0.0832142105102539, 0.08337430572509766, 0.08330025482177734, 0.0835317153930664, 0.08382061004638672, 0.08415801239013672, 0.0835506591796875, 0.08357443237304688, 0.08353826904296875, 0.08355020904541016, 0.08356034851074219, 0.08359232330322265, 0.08347698974609374, 0.08344624328613282, 0.08357628631591797, 0.08366134643554687, 0.08361984252929687, 0.08370585632324219, 0.08377859497070313, 0.08358191680908203, 0.08378777313232422, 0.08370995330810548, 0.08390962982177734, 0.08364102172851562, 0.08418656158447266, 0.08365068817138673, 0.08361241912841796, 0.08358707427978515, 0.08357628631591797, 0.08371663665771484, 0.08365987396240235, 0.08372077178955079, 0.08373452758789063, 0.08367318725585937, 0.08390035247802734, 0.08373709106445312, 0.08372614288330078, 0.08372361755371094, 0.0840257568359375, 0.08382694244384765, 0.08372745513916016, 0.08374979400634766, 0.08391606140136719, 0.08388272094726562, 0.08386121368408203, 0.08389004516601563, 0.08389673614501954, 0.0840571517944336, 0.0838153305053711, 0.08382864379882812, 0.08401699066162109, 0.08391299438476563, 0.08404991912841797, 0.0844257583618164, 0.08413692474365235, 0.09608396911621093, 0.08495718383789062, 0.08403721618652343, 0.08379638671875, 0.08331263732910156, 0.08357036590576172, 0.08336006164550781, 0.08333312225341796, 0.08336764526367188, 0.0832713623046875, 0.08332553863525391, 0.08343497467041015, 0.08325379180908203, 0.08337203216552734, 0.08358255767822266, 0.08333148956298828, 0.08362393951416015, 0.08365987396240235, 0.0835798110961914, 0.08457376098632813, 0.08341280364990235, 0.08332966613769531, 0.08352973175048828, 0.08337747192382812, 0.08366944122314453, 0.08354841613769531, 0.08371401977539063, 0.083674560546875, 0.08363683319091797, 0.08393523406982421, 0.08510857391357422, 0.08356060791015625, 0.08369766235351563, 0.08414173126220703, 0.08351273345947266, 0.0840098876953125, 0.08349289703369141, 0.08417279815673828, 0.08386969757080077, 0.08387702178955078, 0.08377225494384766, 0.08381059265136719, 0.08391241455078124, 0.08400041961669921, 0.08367731475830079, 0.08381462097167969, 0.08389017486572266, 0.08369264221191407, 0.08384604644775391, 0.08409430694580078, 0.08401484680175782, 0.08398941040039062, 0.08382028961181641, 0.08400303649902344, 0.08398339080810546, 0.08392806243896485, 0.08408064270019532, 0.0845660171508789, 0.08389222717285157, 0.08389401245117188, 0.08394163513183593, 0.08397414398193359, 0.08426700592041016, 0.09636598205566406, 0.08481648254394532, 0.0842619857788086, 0.08362681579589844, 0.08325660705566407, 0.08336227416992187, 0.0831982421875, 0.08331219482421875, 0.08327423858642578, 0.08321228790283203, 0.08336793518066406, 0.08340838623046876, 0.08340326690673829, 0.08329734039306641, 0.08344467163085938, 0.08342937469482421, 0.08348262023925782, 0.08331059265136719, 0.08354611206054688, 0.08351334381103516, 0.08336383819580079, 0.08341401672363281, 0.08348774719238282, 0.08364646148681641, 0.08355391693115234, 0.0837741470336914, 0.08365805053710937, 0.08369388580322265, 0.08380735778808594, 0.08355731201171875, 0.0847455062866211, 0.08398102569580078, 0.08361984252929687, 0.0835525131225586, 0.0837930908203125, 0.08368800354003907, 0.08374886322021484, 0.08373270416259766, 0.08411087799072266, 0.08363033294677734, 0.0840273895263672, 0.08372659301757812, 0.08401462554931641, 0.08390188598632813, 0.08381520080566406, 0.0839208984375, 0.08388607788085937, 0.08391270446777344, 0.08397004699707031, 0.08451881408691406, 0.08404761505126954, 0.08395549011230469, 0.08401103973388672, 0.0840792007446289, 0.08392237091064453, 0.08395609283447265, 0.08405538940429688, 0.0840056610107422, 0.08421794891357422, 0.08403343963623047, 0.08409907531738281, 0.08405948638916015, 0.08415692901611328, 0.0952376937866211, 0.08470393371582031, 0.08439952087402344, 0.08358751678466797, 0.08350300598144532, 0.08343551635742187, 0.08337318420410156, 0.08375331115722656, 0.08346675109863282, 0.08346937561035156, 0.0833914566040039, 0.08376834869384765, 0.0833545913696289, 0.08346797180175781, 0.0834358367919922, 0.08335359954833985, 0.08338841247558594, 0.08343682861328125, 0.08359542083740235, 0.08343199920654297, 0.08335561370849609, 0.0835436782836914, 0.08344822692871094, 0.08346009826660156, 0.08340019226074219, 0.08383334350585937, 0.08410675048828126, 0.08367314910888672, 0.08364220428466797, 0.08354876708984375, 0.08364441680908204, 0.08399667358398437, 0.08366284942626953, 0.08379952239990235, 0.08360809326171875, 0.08376019287109375, 0.0835816650390625, 0.08364259338378906, 0.08357683563232422, 0.08359843444824219, 0.0837344970703125, 0.08372665405273437, 0.08378636932373047, 0.08383615875244141, 0.08392572784423828, 0.08383071899414063, 0.08404796600341796, 0.08471094512939453, 0.08387427520751953, 0.08381132507324218, 0.0837396469116211, 0.08386914825439454, 0.08401155090332031, 0.08377958679199218, 0.08384921264648437, 0.08401305389404297, 0.08377721405029297, 0.08390278625488282, 0.08376934051513672, 0.08388722991943359, 0.08377843475341797, 0.08387174224853515, 0.0838485107421875]",tokens/s,11.906207331220875,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1198.42816,10582.09792,0.0,10179.575808,10067.3536,s,1,22.42828125,22.42828125,0.0,22.42828125,22.42828125,22.42828125,22.42828125,[22.42828125],,kWh,0.0004372041403334757,4.821937248782358e-05,0.00014937817505800144,0.0006348016878793008,,MB,1424.93696,12521.96352,0.0,12106.858496,11264.997888,s,10,22.090213867187497,2.2090213867187503,0.010471384234128741,2.210894287109375,2.2203266845703125,2.221620983886719,2.222656423339844,"[2.18874658203125, 2.198097900390625, 2.199285400390625, 2.204990966796875, 2.210638671875, 2.216657958984375, 2.21114990234375, 2.217692138671875, 2.2200390625, 2.222915283203125]",tokens/s,115.88842079082758,kWh,6.431290549539578e-05,7.0934506415022125e-06,4.2801284241006375e-05,0.00011420764037790438,tokens/kWh,2241531.2946919794,MB,1450.033152,12526.157824,0.0,12108.955648,11265.000448,s,10,107.49920507812499,10.7499205078125,0.008715339646672197,10.75004541015625,10.75592548828125,10.763210302734375,10.769038154296874,"[10.7506728515625, 10.7704951171875, 10.73638671875, 10.747787109375, 10.7409951171875, 10.754306640625, 10.7442392578125, 10.74941796875, 10.7538173828125, 10.7510869140625]",tokens/s,5.860508452524349,kWh,0.000314682098170427,3.4711838762016114e-05,0.00020861736133819255,0.0005580112982706357,tokens/kWh,112900.93981115948,,s,630,107.49460023498541,0.1706263495793418,0.0017908268108433622,0.17054641723632813,0.1715880157470703,0.17185429306030273,0.18206420257568362,"[0.18065388488769532, 0.16943327331542968, 0.16819815063476562, 0.167841796875, 0.16939622497558593, 0.16981385803222657, 0.1706907196044922, 0.17213031005859375, 0.16752230834960938, 0.1673502655029297, 0.16960511779785156, 0.1701312713623047, 0.17048594665527345, 0.17135206604003905, 0.1692569580078125, 0.16885328674316405, 0.1687779541015625, 0.17099501037597656, 0.17052668762207032, 0.17086717224121092, 0.1717986602783203, 0.16881219482421875, 0.1692512969970703, 0.17066557312011718, 0.1706766357421875, 0.17081503295898437, 0.1713704071044922, 0.16944320678710936, 0.1717480010986328, 0.17005158996582032, 0.17196441650390626, 0.17012265014648437, 0.1715779571533203, 0.16936959838867188, 0.17237811279296875, 0.16985087585449218, 0.17110546875, 0.17093305969238282, 0.1712578582763672, 0.1703321533203125, 0.1707841339111328, 0.17090623474121094, 0.1712168884277344, 0.16948019409179688, 0.17112835693359374, 0.17124729919433593, 0.1702665557861328, 0.17160482788085937, 0.1719910430908203, 0.16943513488769532, 0.17202482604980468, 0.17022218322753907, 0.17182252502441406, 0.1712076416015625, 0.17248255920410155, 0.1697643585205078, 0.16957081604003907, 0.17150157165527344, 0.17182106018066406, 0.1717760009765625, 0.17165251159667969, 0.17089801025390625, 0.17003109741210937, 0.18375750732421875, 0.1697568359375, 0.17092604064941405, 0.16946975708007814, 0.16835606384277343, 0.16970294189453125, 0.1716966094970703, 0.17013877868652344, 0.17063999938964844, 0.16950294494628906, 0.16841317749023438, 0.17092713928222655, 0.17006008911132814, 0.1715350036621094, 0.16995907592773438, 0.17164323425292968, 0.16960905456542968, 0.1718312072753906, 0.1694087677001953, 0.17232630920410155, 0.17023788452148436, 0.17056617736816407, 0.16985845947265624, 0.17130160522460938, 0.170485595703125, 0.17187855529785157, 0.17122099304199218, 0.1716155548095703, 0.16923103332519532, 0.17243136596679687, 0.1700287322998047, 0.1719544982910156, 0.1708257293701172, 0.17025584411621095, 0.17059689331054687, 0.170250244140625, 0.17245797729492188, 0.17038540649414063, 0.17094656372070313, 0.16899789428710937, 0.17151429748535157, 0.1717165069580078, 0.17115737915039062, 0.1711295623779297, 0.17107565307617187, 0.17126400756835938, 0.1688924102783203, 0.17185792541503905, 0.1710735321044922, 0.17131254577636718, 0.1716414337158203, 0.17056973266601563, 0.17266893005371095, 0.171546630859375, 0.17045408630371095, 0.17119500732421875, 0.17124557495117188, 0.1702914581298828, 0.17089337158203124, 0.1703810272216797, 0.1708751678466797, 0.17094390869140624, 0.17120045471191406, 0.1837431640625, 0.16877772521972656, 0.16783360290527344, 0.16942454528808593, 0.16817132568359375, 0.1700663299560547, 0.17204591369628905, 0.16954342651367188, 0.1687945556640625, 0.16980320739746094, 0.17024490356445313, 0.1685419158935547, 0.17115586853027343, 0.17022157287597656, 0.16912733459472656, 0.16989039611816406, 0.17020518493652342, 0.168527099609375, 0.1688640899658203, 0.17146922302246093, 0.16985292053222656, 0.17050979614257813, 0.16868202209472657, 0.1706762237548828, 0.1708375701904297, 0.16973049926757813, 0.16981964111328124, 0.17030400085449218, 0.17020509338378906, 0.17032406616210938, 0.171399169921875, 0.17164399719238282, 0.17045497131347656, 0.17025471496582031, 0.16870448303222657, 0.16973426818847656, 0.17144195556640626, 0.17061871337890624, 0.17067193603515626, 0.16964639282226562, 0.1693534698486328, 0.17040771484375, 0.17081776428222656, 0.17032502746582032, 0.17119232177734375, 0.17025721740722657, 0.17052188110351563, 0.17071807861328125, 0.1703751678466797, 0.17125375366210938, 0.17084416198730468, 0.1704857635498047, 0.16930406188964844, 0.17063935852050782, 0.17040985107421874, 0.17170352172851563, 0.17024642944335938, 0.1704802551269531, 0.1701273651123047, 0.1703751678466797, 0.17165721130371095, 0.1711366424560547, 0.17135420227050782, 0.1838726348876953, 0.16901618957519532, 0.16958416748046876, 0.17036540222167967, 0.16971366882324218, 0.1706967010498047, 0.17056460571289062, 0.16989082336425781, 0.16984268188476562, 0.1689803466796875, 0.16991448974609374, 0.1701530303955078, 0.17073452758789062, 0.17071270751953124, 0.17022604370117186, 0.17052581787109375, 0.16821746826171874, 0.17027276611328124, 0.1703539276123047, 0.17052339172363282, 0.17077203369140626, 0.16839456176757814, 0.1682449951171875, 0.17081753540039063, 0.16979852294921874, 0.1711472625732422, 0.1702825927734375, 0.17034486389160156, 0.17021302795410156, 0.1706455078125, 0.17003289794921875, 0.17174700927734374, 0.16941055297851562, 0.16876431274414064, 0.17065907287597656, 0.17022828674316406, 0.170423583984375, 0.17102735900878907, 0.17081549072265625, 0.16969305419921876, 0.17004147338867187, 0.1702904052734375, 0.1710436096191406, 0.17089447021484375, 0.1699866180419922, 0.17017599487304688, 0.17090995788574218, 0.17052922058105469, 0.1713992919921875, 0.17129008483886718, 0.17131951904296874, 0.1705331573486328, 0.17086671447753907, 0.17044685363769532, 0.1717509765625, 0.1712310791015625, 0.1702836151123047, 0.17192892456054687, 0.17103634643554688, 0.17078073120117188, 0.17106211853027345, 0.1704878387451172, 0.1714749755859375, 0.18251898193359375, 0.16995960998535156, 0.16954637145996093, 0.1681776580810547, 0.16937164306640626, 0.17014962768554687, 0.17089356994628907, 0.17017372131347655, 0.16997654724121095, 0.16858522033691406, 0.16773939514160155, 0.17064317321777345, 0.17175120544433595, 0.16991690063476564, 0.17009190368652344, 0.16796739196777344, 0.16788070678710937, 0.17039295959472656, 0.17180892944335938, 0.17005206298828124, 0.1706999969482422, 0.1696612091064453, 0.16896966552734374, 0.1707321014404297, 0.17011712646484375, 0.17083389282226563, 0.16927500915527344, 0.16972227478027344, 0.16917503356933594, 0.1700720672607422, 0.16947955322265626, 0.1704776611328125, 0.17067062377929687, 0.17009036254882812, 0.16892941284179688, 0.17036595153808592, 0.1714977569580078, 0.17021600341796875, 0.17022377014160156, 0.17048886108398437, 0.16986550903320313, 0.17008915710449218, 0.1707720642089844, 0.1719218292236328, 0.1710425567626953, 0.17027302551269533, 0.16996966552734374, 0.17139097595214844, 0.17094245910644532, 0.17245542907714845, 0.17030166625976562, 0.16992051696777344, 0.1711516418457031, 0.17135174560546876, 0.1712356414794922, 0.1733750457763672, 0.16954617309570313, 0.17030758666992188, 0.17125730895996094, 0.17047952270507813, 0.17141824340820314, 0.17165260314941405, 0.17054566955566405, 0.1822544708251953, 0.16856253051757814, 0.1684114227294922, 0.16934877014160157, 0.16943280029296875, 0.17133827209472657, 0.17150303649902343, 0.16991299438476562, 0.1700843505859375, 0.16797596740722656, 0.16964816284179687, 0.1711826629638672, 0.17030387878417969, 0.1705695343017578, 0.1704792022705078, 0.16854444885253905, 0.1697508544921875, 0.1703034210205078, 0.17075820922851562, 0.17089955139160157, 0.16998095703125, 0.1685919647216797, 0.17005810546875, 0.17088511657714844, 0.17077247619628907, 0.17068850708007813, 0.17023794555664062, 0.17110585021972657, 0.1698360595703125, 0.16999722290039063, 0.1709916229248047, 0.17082162475585938, 0.17070057678222655, 0.17151167297363282, 0.17034275817871095, 0.169459716796875, 0.17108303833007812, 0.17032981872558595, 0.17151898193359374, 0.17005567932128907, 0.17078668212890624, 0.17022169494628905, 0.17118003845214844, 0.17069465637207032, 0.17146060180664063, 0.17026422119140625, 0.17154287719726563, 0.17107763671875, 0.17107763671875, 0.1712512969970703, 0.17130741882324219, 0.17102642822265626, 0.17146435546875, 0.1710037384033203, 0.17126170349121095, 0.17097190856933595, 0.17142169189453124, 0.17050624084472657, 0.1709629364013672, 0.17103208923339844, 0.17067362976074218, 0.17122122192382813, 0.17126889038085938, 0.18115890502929688, 0.1690900115966797, 0.1675960693359375, 0.16928153991699219, 0.1689373779296875, 0.17130096435546874, 0.17135002136230468, 0.1691414794921875, 0.1708876495361328, 0.16844422912597656, 0.1672428741455078, 0.16989474487304687, 0.16991424560546875, 0.16975888061523436, 0.16952934265136718, 0.17049971008300782, 0.16977250671386718, 0.17065257263183595, 0.17020518493652342, 0.16987327575683595, 0.17057148742675782, 0.16951321411132814, 0.17016015625, 0.17057395935058595, 0.1706475524902344, 0.17051033020019532, 0.17068605041503906, 0.1695768585205078, 0.17007002258300782, 0.17019241333007812, 0.17037974548339843, 0.17060044860839843, 0.17059635925292968, 0.17029324340820312, 0.16970956420898436, 0.1707806701660156, 0.17037513732910156, 0.16990946960449219, 0.17063990783691407, 0.17023207092285156, 0.1710182647705078, 0.17031680297851562, 0.1698310089111328, 0.1704961853027344, 0.17056163024902343, 0.17059852600097655, 0.17132339477539063, 0.17056562805175782, 0.17143193054199218, 0.1706577911376953, 0.17057160949707031, 0.17110617065429687, 0.17163906860351563, 0.1715460205078125, 0.17120927429199218, 0.1710059814453125, 0.17074995422363282, 0.171040771484375, 0.17195213317871094, 0.17180262756347656, 0.17200930786132812, 0.1711588134765625, 0.1707098846435547, 0.18318576049804688, 0.16949452209472657, 0.16808755493164063, 0.1687244873046875, 0.16889651489257812, 0.17084416198730468, 0.17106533813476563, 0.1695945281982422, 0.16934742736816405, 0.16934892272949217, 0.1698891143798828, 0.17044566345214843, 0.17123916625976562, 0.16857113647460936, 0.17041714477539063, 0.16882710266113282, 0.17039637756347656, 0.1707459259033203, 0.17027072143554686, 0.16951705932617187, 0.17047142028808593, 0.17077452087402345, 0.1704462432861328, 0.17081199645996092, 0.17059635925292968, 0.16919879150390624, 0.17030181884765624, 0.17001628112792969, 0.16829122924804688, 0.17160191345214842, 0.17000653076171876, 0.17016998291015625, 0.1709776611328125, 0.1699983367919922, 0.1711366729736328, 0.1711353302001953, 0.17036463928222656, 0.17096937561035155, 0.17088511657714844, 0.1701595458984375, 0.17135600280761717, 0.1705028839111328, 0.17092608642578125, 0.17042550659179687, 0.1703942108154297, 0.17076658630371094, 0.17109306335449218, 0.1708756866455078, 0.17064735412597656, 0.17106979370117187, 0.1701908416748047, 0.17153750610351562, 0.1715864715576172, 0.1705594940185547, 0.17056562805175782, 0.171385986328125, 0.17114749145507813, 0.1713772735595703, 0.17166339111328124, 0.17028300476074218, 0.17088716125488282, 0.17098751831054687, 0.17157734680175782, 0.18298783874511718, 0.1690603485107422, 0.1692230987548828, 0.1699102783203125, 0.16849510192871095, 0.17103257751464843, 0.1712004852294922, 0.1695801544189453, 0.17022198486328124, 0.16895794677734374, 0.1697232666015625, 0.17105369567871093, 0.17038671875, 0.17048445129394532, 0.17130252075195312, 0.1688125762939453, 0.1696300506591797, 0.16939007568359374, 0.17059225463867186, 0.17027183532714843, 0.16987228393554688, 0.16966636657714843, 0.16996949768066405, 0.17107183837890624, 0.170612060546875, 0.17033680725097655, 0.17004905700683592, 0.17092445373535156, 0.17078700256347656, 0.17152975463867187, 0.17097116088867187, 0.17148153686523437, 0.1705779266357422, 0.17091789245605468, 0.16871218872070312, 0.17119993591308594, 0.17130758666992188, 0.1698201599121094, 0.17041818237304687, 0.17055850219726562, 0.17009939575195313, 0.17087091064453125, 0.17095286560058592, 0.1708358154296875, 0.17176290893554688, 0.16929478454589844, 0.17154252624511718, 0.1708687286376953, 0.1705963134765625, 0.1714268798828125, 0.1699786834716797, 0.17061494445800782, 0.17147415161132812, 0.17110426330566406, 0.17132194519042968, 0.17106962585449217, 0.17068173217773439, 0.17129945373535158, 0.17134970092773438, 0.17006828308105468, 0.17106454467773438, 0.17137464904785157, 0.1706646728515625, 0.18159837341308593, 0.16844361877441405, 0.16949891662597658, 0.1679237060546875, 0.16992633056640624, 0.17025669860839843, 0.1712387237548828, 0.1692571563720703, 0.16967523193359374, 0.16919349670410155, 0.16937983703613282, 0.17117593383789062, 0.17004544067382812, 0.16912384033203126, 0.16997366333007813, 0.17023574829101562, 0.1704020538330078, 0.17227116394042968, 0.1707211151123047, 0.17095945739746093, 0.17027072143554686, 0.1705858917236328, 0.1680754852294922, 0.16975462341308595, 0.17183538818359376, 0.17054716491699218, 0.17025640869140626, 0.17047142028808593, 0.17016012573242187, 0.17003240966796876, 0.17151795959472657, 0.1705581817626953, 0.17065370178222655, 0.16777215576171875, 0.17163043212890625, 0.17019715881347655, 0.17097727966308593, 0.1701416931152344, 0.17171865844726564, 0.1698201599121094, 0.17053042602539062, 0.17095718383789063, 0.17028489685058593, 0.17120066833496095, 0.17126605224609376, 0.17063731384277345, 0.17004066467285156, 0.17110467529296874, 0.17042253112792968, 0.17117776489257813, 0.171849853515625, 0.17075209045410156, 0.16999763488769531, 0.17140118408203125, 0.170968994140625, 0.17081837463378907, 0.17170623779296876, 0.16934454345703126, 0.17105520629882812, 0.17128678894042967, 0.17102259826660157, 0.17152339172363282, 0.17252780151367186]",tokens/s,5.860759504410523,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpvtpnyo64/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,14375.452672,8040.349696,0.0,7637.827584,7627.927552,s,1,33.94328515625,33.94328515625,0.0,33.94328515625,33.94328515625,33.94328515625,33.94328515625,[33.94328515625],,kWh,0.0007475653904250066,8.245464403535007e-05,0.00025754909492803346,0.0010875691293883902,,MB,1413.484544,8436.711424,0.0,8013.217792,7904.390656,s,10,1.3587834930419922,0.13587834930419923,0.00036145304152474244,0.13583846282958983,0.13635609588623046,0.13637512283325195,0.13639034439086914,"[0.13635186767578125, 0.13630764770507814, 0.13602838134765624, 0.13563165283203124, 0.13577912902832032, 0.13639414978027345, 0.13537718200683593, 0.1355872344970703, 0.13542845153808594, 0.13589779663085938]",tokens/s,1884.0381952747823,kWh,4.014269578140694e-06,4.4252425546264653e-07,2.664024201232357e-06,7.120818034835698e-06,tokens/kWh,35950925.686855696,MB,1438.49472,8480.751616,0.0,8055.160832,8005.6832,s,10,59.8951201171875,5.98951201171875,0.03501860925379614,5.977143554687499,6.01166826171875,6.04993056640625,6.08054041015625,"[5.95624365234375, 5.989306640625, 5.97554443359375, 5.97740673828125, 5.98745361328125, 5.96985986328125, 5.97106640625, 5.97688037109375, 6.00316552734375, 6.08819287109375]",tokens/s,10.518386118391225,kWh,0.00017633640983227467,1.9450797180338487e-05,8.798399276157531e-05,0.00028377119977418845,tokens/kWh,222009.84472748605,,s,630,59.891306854248,0.09506556643531436,0.0010075733578412635,0.09478129577636718,0.0966001091003418,0.09713879585266112,0.09868731147766113,"[0.0958371810913086, 0.094876220703125, 0.09499180603027343, 0.09528797149658202, 0.09508783721923827, 0.0941588134765625, 0.09407324981689454, 0.09431005096435546, 0.0948743667602539, 0.09481011199951171, 0.09483673858642579, 0.0941137924194336, 0.09454182434082031, 0.09493094635009766, 0.09484230041503906, 0.09492742156982421, 0.09696038055419921, 0.09420732879638671, 0.09459951782226562, 0.0954044189453125, 0.09424492645263671, 0.09409945678710938, 0.09384345245361328, 0.09464979553222656, 0.09411190032958984, 0.09542854309082031, 0.09398070526123047, 0.09443577575683594, 0.09444351959228516, 0.09401337432861329, 0.09396640014648437, 0.09374050903320312, 0.09407849884033204, 0.09460633850097656, 0.0946495361328125, 0.09418956756591797, 0.09430217742919922, 0.09397049713134766, 0.09659593963623046, 0.09506208038330079, 0.09426531219482422, 0.09471030426025391, 0.09504998779296875, 0.09449676513671874, 0.09458480072021484, 0.09443126678466797, 0.094455810546875, 0.09448566436767578, 0.09422525024414062, 0.0938270721435547, 0.09412403106689453, 0.09417430114746093, 0.09444563293457031, 0.09424163055419922, 0.09459302520751953, 0.09427728271484374, 0.09396854400634766, 0.09364908599853515, 0.09356492614746094, 0.09399491119384766, 0.09503123474121093, 0.09434333038330078, 0.0948674545288086, 0.09801321411132813, 0.0951357421875, 0.09488409423828124, 0.09492829132080079, 0.09435785675048829, 0.09462700653076171, 0.09486326599121093, 0.09420697784423829, 0.09490812683105469, 0.09481644439697266, 0.09482240295410156, 0.09518185424804687, 0.09698198699951172, 0.09486540985107422, 0.09474457550048829, 0.09475071716308593, 0.0949552001953125, 0.09452722930908203, 0.09519570922851563, 0.0946319351196289, 0.09485123443603516, 0.09454348754882813, 0.09463410949707031, 0.09655241394042968, 0.09686224365234375, 0.0950354232788086, 0.09440313720703125, 0.09440870666503906, 0.09455583953857422, 0.094716064453125, 0.09498617553710938, 0.09593468475341797, 0.09505942535400391, 0.09719657897949219, 0.09736994934082031, 0.09508000183105468, 0.09461615753173828, 0.09460940551757813, 0.0944534683227539, 0.09435068511962891, 0.09462271881103515, 0.09719801330566406, 0.0948674545288086, 0.09505177307128906, 0.0953504638671875, 0.09432505798339844, 0.09443122863769532, 0.09430204772949219, 0.09474614715576171, 0.09412793731689453, 0.09475769805908203, 0.09468313598632813, 0.09530524444580078, 0.09498057556152344, 0.09502047729492187, 0.09436627197265625, 0.0946851806640625, 0.09754214477539062, 0.09447817230224609, 0.09463350677490234, 0.09454450988769532, 0.0946638412475586, 0.09452614593505859, 0.09557705688476563, 0.09513164520263671, 0.09494089508056641, 0.09513215637207031, 0.097453857421875, 0.09492889404296875, 0.09461555480957032, 0.09464406585693359, 0.09462572479248046, 0.09448799896240234, 0.09514268493652343, 0.09581362915039063, 0.09457977294921875, 0.09441990661621094, 0.09470156860351563, 0.09481037139892579, 0.09463772583007812, 0.09478889465332031, 0.09453446197509766, 0.09546892547607422, 0.09441343688964844, 0.09551667022705078, 0.09500588989257812, 0.09429440307617187, 0.09454841613769531, 0.09476239776611328, 0.09442979431152344, 0.09390489959716797, 0.09500588989257812, 0.09432252502441406, 0.09508963012695312, 0.09437171173095703, 0.09609420776367188, 0.0943796157836914, 0.09494150543212891, 0.09447856140136719, 0.09397657775878906, 0.09423804473876952, 0.09418998718261719, 0.09433113861083985, 0.09471180725097657, 0.09672908782958985, 0.09604505920410156, 0.09589555358886719, 0.09470361328125, 0.0947089614868164, 0.09493283081054688, 0.0940283203125, 0.09439842987060547, 0.09550482940673828, 0.09448038482666016, 0.09498009490966797, 0.09451670074462891, 0.09623197174072265, 0.0947384033203125, 0.09406671905517579, 0.09418685150146484, 0.09431846618652344, 0.09505462646484375, 0.09472579193115234, 0.09425778961181641, 0.09503715515136718, 0.0941948471069336, 0.09503286743164062, 0.09499900817871093, 0.09426124572753906, 0.09439823913574219, 0.09434544372558594, 0.09446195220947265, 0.093876220703125, 0.09479167938232422, 0.09405379486083984, 0.09493769836425782, 0.09818726348876954, 0.09488575744628906, 0.09693321228027343, 0.09414530944824219, 0.09448210906982422, 0.0943616943359375, 0.09476963043212891, 0.09456204986572266, 0.09446195220947265, 0.09434521484375, 0.09472000122070312, 0.09486131286621094, 0.09501900482177734, 0.09442902374267578, 0.09527468872070312, 0.09450713348388672, 0.09438156890869141, 0.09551139068603516, 0.09440665435791015, 0.09508029174804687, 0.09437712097167969, 0.09511119842529298, 0.0946033935546875, 0.0963687973022461, 0.09663763427734375, 0.09436160278320313, 0.0940421142578125, 0.09467027282714843, 0.09405267333984375, 0.09549235534667969, 0.09436774444580077, 0.09484060668945313, 0.09499683380126953, 0.09456832122802734, 0.09508611297607422, 0.09434719848632812, 0.0944420166015625, 0.09428582763671875, 0.09463584136962891, 0.09416876983642578, 0.09429452514648437, 0.09582575988769532, 0.09505535888671875, 0.09898255920410157, 0.09481830596923828, 0.09503308868408203, 0.09458889770507813, 0.09442127990722657, 0.09492015838623047, 0.09461993408203125, 0.09529984283447265, 0.09455955505371094, 0.09469817352294922, 0.09487203216552734, 0.09478336334228515, 0.0944598388671875, 0.09448265838623048, 0.0949085464477539, 0.09470297241210937, 0.09549855804443359, 0.09483299255371094, 0.09530937957763672, 0.09510873413085938, 0.09499276733398437, 0.09460982513427735, 0.09492262268066406, 0.09475289916992187, 0.09470771026611328, 0.09522994995117187, 0.09526886749267578, 0.09510707092285156, 0.09853238677978515, 0.09548899078369141, 0.09484697723388671, 0.09498828887939453, 0.0952074203491211, 0.09452761840820313, 0.09456217956542969, 0.09480191802978516, 0.09488944244384766, 0.09483500671386719, 0.09464441680908203, 0.09574169921875, 0.09496991729736329, 0.09477552032470703, 0.09460530853271484, 0.09516236877441406, 0.09501081848144531, 0.09429961395263672, 0.09502159881591797, 0.09464988708496094, 0.0949329605102539, 0.09511084747314454, 0.0954596176147461, 0.09512770843505859, 0.09509721374511719, 0.09444351959228516, 0.09544499206542968, 0.09459881591796875, 0.09473878479003907, 0.09503084564208984, 0.09453353881835938, 0.09454236602783203, 0.0958315200805664, 0.09727849578857421, 0.09442278289794923, 0.09472019195556641, 0.09451091003417969, 0.09437184143066406, 0.09755264282226563, 0.09535897827148437, 0.09482975769042969, 0.09472083282470703, 0.09461663818359375, 0.095015869140625, 0.09468109130859376, 0.09468643188476562, 0.09625033569335938, 0.094330078125, 0.09565663909912109, 0.0947194595336914, 0.09491519927978516, 0.09469878387451172, 0.09541478729248047, 0.09527686309814454, 0.0950829086303711, 0.09657138824462891, 0.09453266906738281, 0.09411475372314453, 0.09548700714111329, 0.09418409729003906, 0.09492716979980469, 0.09491613006591797, 0.09422895812988281, 0.09453968048095703, 0.09505187225341796, 0.09490335845947266, 0.09447232055664062, 0.09545791625976563, 0.09415074920654297, 0.09471123504638672, 0.0944441909790039, 0.09429792022705077, 0.09442527770996094, 0.09422351837158204, 0.09470764923095704, 0.0946185302734375, 0.09463603210449219, 0.09397452545166016, 0.0947600326538086, 0.09458370971679687, 0.0951317138671875, 0.09448236846923828, 0.09443762969970704, 0.0946644515991211, 0.09454307556152344, 0.09450157165527344, 0.09469757080078126, 0.09425920104980469, 0.09428729248046876, 0.09470623779296874, 0.09508624267578125, 0.09477155303955079, 0.09501036834716797, 0.09442694091796874, 0.09500470733642578, 0.09490697479248048, 0.09428905487060547, 0.09394876861572266, 0.09417632293701172, 0.09486637115478516, 0.09455206298828125, 0.09461084747314454, 0.09481196594238281, 0.09441155242919921, 0.09457868957519532, 0.09777766418457032, 0.09441651153564454, 0.09416438293457031, 0.09409782409667969, 0.09412217712402343, 0.09434438323974609, 0.09476911926269531, 0.09513865661621093, 0.09460940551757813, 0.0964455337524414, 0.09505471801757813, 0.09537744140625, 0.09503094482421875, 0.09490057373046876, 0.09433689880371093, 0.09461280059814453, 0.09482733154296875, 0.09484678649902344, 0.09488607788085937, 0.09527910614013672, 0.09461350250244141, 0.09915325164794922, 0.09569039916992188, 0.09380556488037109, 0.09465241241455079, 0.09424076843261718, 0.0938045425415039, 0.09461350250244141, 0.09477033233642577, 0.09418428802490235, 0.09615074920654297, 0.09667196655273437, 0.09440723419189453, 0.09466470336914062, 0.09421366119384765, 0.09444640350341797, 0.09446115112304687, 0.0943333740234375, 0.09477308654785156, 0.09468860626220703, 0.09419245147705078, 0.09438003540039062, 0.09464988708496094, 0.09414089965820313, 0.09416041564941406, 0.09470409393310547, 0.09400265502929687, 0.09421485137939453, 0.09440444946289063, 0.09485459136962891, 0.09430662536621094, 0.09481855773925782, 0.09418956756591797, 0.09415264129638672, 0.09836124420166016, 0.09464028930664062, 0.0944189453125, 0.09414646148681641, 0.09436579132080078, 0.09480332946777344, 0.09505228424072265, 0.09488349151611328, 0.09476319885253906, 0.09445318603515625, 0.09435222625732421, 0.09428284454345703, 0.09437184143066406, 0.09454796600341797, 0.09405849456787109, 0.09480105590820312, 0.09470857238769531, 0.09552413177490235, 0.09483952331542969, 0.09477922821044922, 0.09461571502685547, 0.09480806732177735, 0.09414041900634766, 0.09435305786132812, 0.09463225555419921, 0.09442829132080079, 0.09478851318359376, 0.09567641448974609, 0.09545651245117187, 0.09394662475585938, 0.09413426971435547, 0.09508863830566407, 0.09508370971679687, 0.09473516845703125, 0.09419570922851563, 0.09412534332275391, 0.0951484146118164, 0.09500399780273437, 0.09555455780029297, 0.09514189147949219, 0.09432780456542969, 0.09444044494628906, 0.09508249664306641, 0.09477120208740235, 0.09434111785888671, 0.094406494140625, 0.09456620788574219, 0.09469577789306641, 0.09550847625732421, 0.10091913604736329, 0.09530767822265625, 0.09451856231689452, 0.09442998504638672, 0.09457398223876953, 0.09446685028076172, 0.09440025329589843, 0.09447449493408203, 0.09440048217773438, 0.0948920669555664, 0.09692121887207031, 0.09514620971679688, 0.09455625915527344, 0.09509855651855469, 0.09504793548583984, 0.09482044982910157, 0.09464835357666015, 0.09466047668457031, 0.09520550537109375, 0.09511660766601562, 0.09474674987792969, 0.0950252456665039, 0.09480028533935547, 0.09458403015136718, 0.0944884490966797, 0.09443222045898438, 0.09452425384521485, 0.09574217224121094, 0.09450768280029297, 0.09513504028320313, 0.10052166748046874, 0.09585526275634766, 0.09448483276367188, 0.09481798553466797, 0.09489401245117188, 0.09452947235107421, 0.095283203125, 0.09422073364257813, 0.09781024169921874, 0.09488380432128907, 0.09476236724853515, 0.09553596496582031, 0.09462169647216796, 0.09435135650634766, 0.09464575958251953, 0.09487993621826171, 0.09409772491455078, 0.09504515075683594, 0.09501744079589844, 0.09477670288085938, 0.09563814544677735, 0.09523391723632812, 0.09452761840820313, 0.09498397064208984, 0.09658963012695312, 0.09535894775390626, 0.09430470275878906, 0.09519913482666016, 0.09463561248779297, 0.09528355407714843, 0.0950512924194336, 0.09498687744140626, 0.09533401489257813, 0.09479971313476562, 0.09481391906738282, 0.09728470611572265, 0.09526870727539062, 0.09494969940185546, 0.0949881591796875, 0.09463766479492187, 0.09528749084472657, 0.09703404998779297, 0.09681878662109375, 0.09589353942871094, 0.09658467102050781, 0.09556787109375, 0.09525161743164062, 0.09490518188476563, 0.09520275115966798, 0.09517727661132812, 0.09518284606933594, 0.09518678283691406, 0.09565596771240234, 0.09487372589111329, 0.09527705383300782, 0.09474432373046875, 0.09575472259521485, 0.09484899139404297, 0.09472799682617188, 0.09525452423095702, 0.09563340759277343, 0.0960032958984375, 0.0952430419921875, 0.09793536376953126, 0.09577881622314453, 0.09558742523193359, 0.09504041290283204, 0.09510707092285156, 0.09459916687011719, 0.09446336364746094, 0.09509133148193359, 0.09563516998291016, 0.09962729644775391, 0.09806995391845703, 0.09691769409179687, 0.09716569519042968, 0.09652652740478515, 0.09860281372070312, 0.09748188781738282, 0.09718051147460938, 0.09682943725585938, 0.09627852630615234, 0.09721241760253906, 0.09529138946533203, 0.09623551940917968, 0.09590988922119141, 0.09710591888427735, 0.09778578948974609, 0.09689266967773437, 0.09664733123779297, 0.09566019439697265, 0.0961638412475586, 0.09741011047363281, 0.09690003204345703, 0.09599581146240234, 0.09669190216064454, 0.09692937469482422, 0.09718415832519531, 0.09709728240966797, 0.09669305419921875, 0.09872182464599609, 0.09633744049072265, 0.0983331527709961, 0.1004031982421875, 0.09704828643798828, 0.09642422485351562, 0.095557373046875, 0.09636172485351563, 0.09673420715332032, 0.09707081604003906, 0.0970564193725586, 0.09698925018310547, 0.09677267456054688, 0.09700287628173829, 0.09590847778320312, 0.09694207763671875, 0.09608000183105468, 0.09647296142578125, 0.09545423889160157, 0.09689186859130859, 0.09670655822753907, 0.09664717102050781]",tokens/s,10.519055821125642,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,965.349376,555.614208,0.0,153.092096,129.53344,s,1,9.3684150390625,9.3684150390625,0.0,9.3684150390625,9.3684150390625,9.3684150390625,9.3684150390625,[9.3684150390625],,kWh,1.689230952500793e-05,1.8522113296201466e-06,4.505836937999941e-06,2.3250357792628017e-05,,MB,1302.454272,658.374656,0.0,234.881024,196.997632,s,12,0.19094172859191896,0.015911810715993246,0.0002564974820662039,0.015805632114410398,0.016088947868347168,0.016350433921813966,0.016598086662292483,"[0.016097152709960937, 0.016015104293823242, 0.015778623580932617, 0.015961695671081542, 0.015763487815856934, 0.01567955207824707, 0.01572755241394043, 0.015808735847473145, 0.01665999984741211, 0.015914336204528808, 0.015732959747314455, 0.015802528381347655]",tokens/s,16088.678062433826,kWh,4.637176261573999e-07,5.114020691258732e-08,1.8637272405396e-07,7.012305571239472e-07,tokens/kWh,365072510.60188794,MB,1315.872768,668.860416,0.0,245.366784,196.672512,s,12,10.669718017578125,0.8891431681315104,0.004632445069287583,0.887816162109375,0.8962047241210938,0.8971108978271485,0.8979088006591797,"[0.8962948608398438, 0.8890794067382812, 0.887509033203125, 0.8880663452148437, 0.8825963745117188, 0.8858290405273438, 0.8953934936523438, 0.8881759643554688, 0.8875659790039062, 0.8855509643554688, 0.8855482788085938, 0.8981082763671875]",tokens/s,70.85473100174782,kWh,2.5795509793286452e-05,2.8448339029892348e-06,8.768217772946548e-06,3.740856146922224e-05,tokens/kWh,1684106.4592080885,,s,756,10.664022859573357,0.014105850343351012,0.0003255319721518257,0.014051727771759033,0.014206975936889648,0.014312495946884156,0.015492051410675051,"[0.013803744316101074, 0.014148159980773926, 0.014149632453918457, 0.01407692813873291, 0.014395520210266114, 0.014055392265319825, 0.014209792137145997, 0.013985952377319337, 0.014044416427612305, 0.014231295585632324, 0.014146559715270996, 0.014087552070617676, 0.014076543807983398, 0.014085472106933594, 0.014157535552978516, 0.014095552444458008, 0.014276351928710938, 0.014196224212646484, 0.014148223876953124, 0.014143360137939454, 0.014073856353759765, 0.01402783966064453, 0.014006976127624512, 0.014158080101013184, 0.014134783744812012, 0.014061823844909668, 0.013978143692016602, 0.014122367858886719, 0.014125696182250977, 0.014106335639953614, 0.014145536422729492, 0.01408614444732666, 0.014092351913452148, 0.014036928176879883, 0.014065024375915527, 0.013977984428405761, 0.014033056259155274, 0.013944064140319824, 0.014172608375549317, 0.01394115161895752, 0.01414748764038086, 0.014115008354187011, 0.014101856231689453, 0.014049856185913086, 0.01473094367980957, 0.0159007043838501, 0.01487228775024414, 0.014500415802001954, 0.014206975936889648, 0.014243200302124023, 0.014086784362792969, 0.014076000213623046, 0.014066656112670899, 0.013974464416503907, 0.014237919807434081, 0.01419878387451172, 0.017210975646972656, 0.014506048202514648, 0.014091744422912597, 0.014170783996582031, 0.014065664291381836, 0.014304991722106933, 0.01416220760345459, 0.013832192420959472, 0.013971199989318847, 0.01404748821258545, 0.013968511581420899, 0.014006272315979004, 0.014023551940917969, 0.01419264030456543, 0.014085151672363282, 0.014162272453308105, 0.01401046371459961, 0.014216032028198242, 0.014470848083496093, 0.014258175849914552, 0.014237215995788574, 0.014146016120910645, 0.014171296119689942, 0.014063615798950196, 0.014087295532226563, 0.014077664375305176, 0.014068767547607422, 0.014074463844299317, 0.014108768463134766, 0.01418064022064209, 0.014193856239318847, 0.01410108757019043, 0.014059136390686035, 0.014214816093444825, 0.01402732753753662, 0.014127488136291505, 0.014112000465393066, 0.014191455841064453, 0.014031871795654297, 0.014074496269226074, 0.014047231674194336, 0.014022784233093262, 0.014006431579589845, 0.0140697603225708, 0.014071455955505371, 0.014144927978515624, 0.01414355182647705, 0.014160767555236816, 0.014085791587829589, 0.014029151916503906, 0.014210783958435058, 0.014176480293273926, 0.014112832069396973, 0.0140348482131958, 0.014035039901733399, 0.014041152000427246, 0.014085887908935547, 0.014083328247070313, 0.014074943542480468, 0.014018112182617188, 0.014110719680786133, 0.014047552108764648, 0.014065695762634276, 0.014175904273986817, 0.01417632007598877, 0.01412940788269043, 0.014096384048461913, 0.014012096405029296, 0.01445241641998291, 0.014071840286254883, 0.013720735549926758, 0.013992671966552734, 0.014013440132141113, 0.013952992439270019, 0.01407590389251709, 0.01396678352355957, 0.014080927848815919, 0.01405452823638916, 0.014078495979309082, 0.01407795238494873, 0.014024703979492188, 0.014079999923706055, 0.014056768417358399, 0.013991007804870606, 0.014021535873413087, 0.014033311843872071, 0.014181856155395509, 0.013990912437438965, 0.013971263885498048, 0.014135104179382324, 0.014004416465759277, 0.013936351776123047, 0.014032928466796876, 0.013962688446044922, 0.014050047874450683, 0.013987903594970702, 0.01399177646636963, 0.01398806381225586, 0.014120896339416504, 0.014041088104248046, 0.013950847625732421, 0.01400592041015625, 0.014135775566101074, 0.014020480155944825, 0.01397977638244629, 0.01400534439086914, 0.01403996753692627, 0.014019904136657714, 0.01404207992553711, 0.014034655570983887, 0.014007743835449218, 0.01403551959991455, 0.014026432037353516, 0.014176223754882812, 0.014133536338806152, 0.014096447944641113, 0.014103584289550782, 0.014140192031860352, 0.014034144401550293, 0.014092831611633301, 0.014023103713989259, 0.014276448249816895, 0.014043295860290527, 0.013946944236755372, 0.0140283203125, 0.014020735740661622, 0.014032320022583008, 0.013988703727722167, 0.014049280166625976, 0.014058848381042481, 0.014691072463989258, 0.015077280044555665, 0.01508902359008789, 0.013935711860656739, 0.014050016403198242, 0.014050304412841797, 0.014281951904296875, 0.01396713638305664, 0.01391964817047119, 0.0139782075881958, 0.013961215972900391, 0.013981696128845214, 0.013901023864746094, 0.01405827236175537, 0.014035072326660156, 0.014038911819458008, 0.014045184135437011, 0.014268416404724121, 0.014404864311218261, 0.014375679969787598, 0.014270463943481445, 0.014206975936889648, 0.014112832069396973, 0.01412217617034912, 0.014133952140808105, 0.014118975639343261, 0.014063615798950196, 0.014057472229003906, 0.014010432243347169, 0.014298720359802246, 0.014049440383911134, 0.01402086353302002, 0.014065888404846192, 0.014075967788696289, 0.014015680313110351, 0.014039520263671876, 0.014018560409545898, 0.01415987205505371, 0.014048447608947754, 0.014117888450622559, 0.014054431915283203, 0.014215264320373535, 0.014095040321350097, 0.014136608123779298, 0.014033535957336425, 0.014126239776611328, 0.014133824348449706, 0.014025088310241698, 0.014065664291381836, 0.014055423736572266, 0.014, 0.014084159851074219, 0.014139455795288086, 0.014073856353759765, 0.014106623649597168, 0.014059712409973144, 0.014054271697998047, 0.014169024467468263, 0.01402406406402588, 0.014464960098266602, 0.01414560031890869, 0.013986111640930176, 0.014014464378356933, 0.013999808311462402, 0.014064255714416505, 0.014016608238220214, 0.013684736251831055, 0.013966367721557618, 0.01400111961364746, 0.013963295936584472, 0.013995936393737793, 0.013935872077941894, 0.013988672256469726, 0.013944831848144532, 0.01400169563293457, 0.014028927803039552, 0.01400864028930664, 0.013947936058044433, 0.013966336250305175, 0.01398960018157959, 0.014000351905822755, 0.014063039779663085, 0.013956928253173828, 0.013941568374633789, 0.013991935729980469, 0.013920224189758301, 0.013993951797485351, 0.013989376068115235, 0.014008255958557128, 0.013960927963256835, 0.014066592216491699, 0.013993727684020995, 0.01402246379852295, 0.01396889591217041, 0.014025407791137695, 0.014125311851501465, 0.014002559661865234, 0.01395852756500244, 0.014094592094421387, 0.014208191871643067, 0.01404419231414795, 0.013938272476196289, 0.014006591796875, 0.013962783813476563, 0.01401852798461914, 0.013959551811218262, 0.014038975715637208, 0.013959232330322266, 0.014043135643005371, 0.013973504066467286, 0.014003359794616699, 0.014159711837768554, 0.014111743927001954, 0.013958656311035156, 0.014023167610168457, 0.01393603229522705, 0.013986432075500488, 0.013950847625732421, 0.014032095909118652, 0.013980128288269044, 0.014041248321533204, 0.013968640327453614, 0.014031871795654297, 0.013948927879333496, 0.014112288475036622, 0.01397116756439209, 0.014061920166015625, 0.014049535751342773, 0.014151840209960938, 0.013659647941589356, 0.01399449634552002, 0.013928352355957031, 0.013983936309814453, 0.013959072113037109, 0.014018560409545898, 0.013944831848144532, 0.014053664207458495, 0.01397935962677002, 0.014000127792358399, 0.013953023910522461, 0.014014464378356933, 0.014032192230224609, 0.014105152130126954, 0.014054623603820801, 0.014089023590087891, 0.01404304027557373, 0.014160160064697266, 0.013980704307556152, 0.0141364164352417, 0.014163743972778321, 0.014112768173217773, 0.014178303718566895, 0.014110719680786133, 0.014069215774536133, 0.01401632022857666, 0.014015487670898438, 0.014075615882873534, 0.014020607948303223, 0.013989248275756837, 0.014027551651000976, 0.014067520141601562, 0.014093440055847168, 0.014058400154113769, 0.014108672142028808, 0.014192895889282226, 0.014095616340637208, 0.014008480072021484, 0.014076128005981445, 0.014028927803039552, 0.014271552085876466, 0.013980832099914551, 0.01404700756072998, 0.013973759651184083, 0.014083840370178223, 0.014024959564208985, 0.014087936401367188, 0.013987839698791504, 0.014076095581054687, 0.014286656379699707, 0.014239744186401367, 0.014018560409545898, 0.014024703979492188, 0.014337823867797852, 0.01405737590789795, 0.014055744171142578, 0.013991935729980469, 0.014030847549438476, 0.01402665615081787, 0.014075584411621093, 0.014015999794006348, 0.014062496185302734, 0.014013888359069825, 0.013707263946533203, 0.013946880340576171, 0.014030847549438476, 0.014002176284790039, 0.014133248329162598, 0.013964832305908203, 0.014139616012573242, 0.01401471996307373, 0.014078240394592285, 0.013934656143188477, 0.01402012825012207, 0.014114944458007813, 0.014006272315979004, 0.013981696128845214, 0.014019840240478516, 0.013964320182800293, 0.014012127876281739, 0.014005599975585937, 0.014058367729187012, 0.013950943946838379, 0.014053183555603027, 0.013964832305908203, 0.01397439956665039, 0.013984543800354004, 0.013959360122680665, 0.01396393585205078, 0.014054816246032715, 0.013939295768737793, 0.013989279747009278, 0.014027168273925781, 0.014059679985046387, 0.013979488372802735, 0.014041248321533204, 0.014086048126220703, 0.01410044765472412, 0.014032863616943359, 0.014057567596435547, 0.014009856224060058, 0.015014399528503418, 0.01831737518310547, 0.018462528228759767, 0.014535008430480957, 0.014161760330200196, 0.014091584205627441, 0.014168767929077149, 0.014110527992248535, 0.014123200416564941, 0.014213120460510254, 0.01416316795349121, 0.014054176330566406, 0.014030847549438476, 0.014036607742309571, 0.01404252815246582, 0.01400111961364746, 0.014061471939086915, 0.014041184425354004, 0.014229215621948243, 0.014024991989135743, 0.014149632453918457, 0.014356160163879394, 0.014053248405456542, 0.01410489559173584, 0.014022784233093262, 0.013738816261291503, 0.013983679771423339, 0.014051168441772462, 0.014069984436035157, 0.014055423736572266, 0.013976767539978027, 0.013988672256469726, 0.014061375617980957, 0.014044768333435058, 0.014019359588623046, 0.014238783836364746, 0.014070303916931152, 0.014112064361572266, 0.014097311973571777, 0.014157216072082519, 0.014098176002502441, 0.014149760246276856, 0.014068448066711425, 0.014096159934997559, 0.014061984062194824, 0.014110207557678223, 0.01399839973449707, 0.01414361572265625, 0.014128640174865722, 0.014032383918762208, 0.014101216316223145, 0.014049440383911134, 0.014133248329162598, 0.01408409595489502, 0.01415782356262207, 0.014218784332275391, 0.014296704292297364, 0.014113632202148438, 0.014098176002502441, 0.014098272323608399, 0.014149663925170898, 0.013992320060729981, 0.014267807960510253, 0.014092543601989745, 0.014023072242736816, 0.01410860824584961, 0.014045184135437011, 0.014104000091552734, 0.01403551959991455, 0.014047552108764648, 0.014108511924743652, 0.014096223831176757, 0.014020223617553711, 0.01414352035522461, 0.014067296028137208, 0.014166879653930663, 0.01406761646270752, 0.01415123176574707, 0.01409017562866211, 0.014306015968322754, 0.014067456245422364, 0.014055071830749511, 0.01401689624786377, 0.01419587230682373, 0.014078816413879395, 0.014092288017272948, 0.014104415893554687, 0.014114944458007813, 0.013711615562438965, 0.014169856071472168, 0.014311327934265136, 0.013930591583251953, 0.013991200447082519, 0.013941472053527831, 0.014389439582824707, 0.014059295654296875, 0.014055456161499023, 0.014136799812316895, 0.013955807685852051, 0.014026623725891113, 0.013965439796447755, 0.013995840072631835, 0.014053088188171386, 0.01414748764038086, 0.014135423660278321, 0.014364831924438477, 0.014147456169128419, 0.014431839942932128, 0.014058112144470215, 0.014211199760437011, 0.014012288093566895, 0.014056960105895995, 0.0140797119140625, 0.01401523208618164, 0.013963520050048828, 0.01398966407775879, 0.014006303787231446, 0.013979904174804688, 0.013993472099304198, 0.014059231758117676, 0.013965951919555663, 0.014757760047912597, 0.014014335632324218, 0.013971232414245605, 0.014037343978881836, 0.014058560371398925, 0.01499779224395752, 0.014060223579406738, 0.014056608200073243, 0.014105504035949706, 0.014069439888000489, 0.014115039825439452, 0.014065919876098633, 0.014053119659423827, 0.013965151786804198, 0.014035231590270996, 0.01397878360748291, 0.013990336418151856, 0.013988096237182618, 0.013976672172546386, 0.014031968116760253, 0.014030752182006835, 0.01399715232849121, 0.01399392032623291, 0.014002464294433593, 0.013943424224853515, 0.01391753578186035, 0.014123680114746094, 0.014026752471923828, 0.014151007652282714, 0.014234272003173828, 0.013856991767883301, 0.014014240264892577, 0.014046591758728027, 0.013984383583068848, 0.014028703689575196, 0.014596192359924317, 0.014171551704406739, 0.014135904312133789, 0.014396575927734375, 0.014087008476257325, 0.014016511917114258, 0.014110943794250489, 0.014064512252807616, 0.014001055717468262, 0.013993184089660644, 0.01406441593170166, 0.014007391929626465, 0.014027104377746583, 0.013943488121032716, 0.013992832183837891, 0.013976448059082032, 0.013958975791931153, 0.013994367599487304, 0.01401030445098877, 0.014200927734375, 0.01410812759399414, 0.014078080177307129, 0.014086463928222655, 0.014014464378356933, 0.014064928054809571, 0.01396553611755371, 0.01417251205444336, 0.013973152160644531, 0.013985535621643067, 0.014012288093566895, 0.014121312141418457, 0.014063424110412597, 0.01405196762084961, 0.013968799591064453, 0.014062015533447266, 0.013954463958740235, 0.01396412754058838, 0.013989472389221191, 0.014045599937438966, 0.014120960235595703, 0.014194463729858399, 0.014010592460632324, 0.014030943870544434, 0.013945856094360352, 0.014033823966979981, 0.013955072402954101, 0.014051487922668457, 0.013993824005126953, 0.014024671554565429, 0.014153759956359863, 0.014098431587219238, 0.014024703979492188, 0.013987839698791504, 0.013989376068115235, 0.014019071578979492, 0.014041088104248046, 0.014004416465759277, 0.01404089641571045, 0.01371622371673584, 0.013985792160034179, 0.014030847549438476, 0.014012415885925293, 0.014188544273376465, 0.0140513916015625, 0.014068960189819336, 0.014033823966979981, 0.014057279586791992, 0.015474656105041503, 0.01405737590789795, 0.013957375526428223, 0.014026623725891113, 0.014012415885925293, 0.014081727981567383, 0.014108991622924805, 0.014178303718566895, 0.014036992073059081, 0.014007743835449218, 0.013982272148132325, 0.014022784233093262, 0.013952896118164063, 0.013996031761169434, 0.014032896041870118, 0.014024703979492188, 0.013983136177062988, 0.014000703811645508, 0.013954719543457032, 0.014095999717712403, 0.014021087646484375, 0.01399126434326172, 0.014197183609008788, 0.013987648010253905, 0.014240415573120117, 0.014166048049926758, 0.014002176284790039, 0.014021823883056641, 0.013994720458984375, 0.01401039981842041, 0.013973535537719727, 0.014030879974365234, 0.014020159721374511, 0.01397110366821289, 0.013953408241271973, 0.014010784149169921, 0.014002176284790039, 0.014000127792358399, 0.013928447723388672, 0.01397270393371582, 0.014134048461914062, 0.014082048416137695, 0.014026368141174316, 0.014167712211608887, 0.014022687911987304, 0.014056127548217773, 0.013931967735290528, 0.0140065279006958, 0.013986111640930176, 0.01399721622467041, 0.014003040313720704, 0.014018560409545898, 0.014004223823547364, 0.014021727561950683, 0.013671296119689941, 0.013979167938232421, 0.013967840194702149, 0.014012479782104492, 0.013950912475585938, 0.014063424110412597, 0.013930080413818359, 0.014020256042480469, 0.014016768455505372, 0.014151424407958984, 0.014005215644836425, 0.014267775535583496, 0.014107232093811034, 0.01406156826019287, 0.014050559997558594, 0.014057408332824707, 0.014056063652038575, 0.014087424278259278, 0.014130111694335937, 0.014189632415771484, 0.014178239822387695, 0.014142784118652343, 0.01424351978302002, 0.014192288398742675, 0.014191103935241698, 0.013997920036315918, 0.014100000381469727, 0.014008031845092774, 0.014084416389465332, 0.014033120155334472, 0.014031071662902832, 0.01399407958984375, 0.01411638355255127, 0.013990240097045898, 0.014049311637878419, 0.01404867172241211, 0.014909536361694335, 0.01564095973968506, 0.015513312339782715, 0.014121024131774903, 0.014127455711364745, 0.014116864204406738, 0.014089695930480957, 0.01411516761779785, 0.014045439720153808, 0.01414345645904541, 0.014024671554565429, 0.014063327789306641, 0.01516163158416748, 0.01597856044769287, 0.014237152099609376, 0.014315999984741212, 0.014289312362670899, 0.014230688095092774, 0.014524703979492187, 0.014106240272521973, 0.014060064315795899, 0.015104000091552734, 0.014161919593811035, 0.014206175804138183, 0.014107680320739746, 0.014058783531188964, 0.015949855804443358]",tokens/s,70.89257121399731,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,858.046464,517.865472,0.0,115.34336,110.009344,s,1,9.0124755859375,9.0124755859375,0.0,9.0124755859375,9.0124755859375,9.0124755859375,9.0124755859375,[9.0124755859375],,kWh,1.4069797408336627e-05,1.5374224032325115e-06,4.483059142003365e-06,2.0090278953572504e-05,,MB,1306.656768,633.208832,0.0,218.103808,177.936896,s,17,0.22525884723663328,0.01325052042568431,7.9837501845923e-05,0.013222111701965331,0.013349126243591308,0.013403462600708007,0.013430925979614258,"[0.01329206371307373, 0.013269887924194337, 0.01331270408630371, 0.013318623542785644, 0.013217408180236817, 0.013394880294799804, 0.0131560640335083, 0.013222111701965331, 0.013198752403259278, 0.01316915225982666, 0.013185471534729003, 0.013285920143127442, 0.01343779182434082, 0.013264639854431153, 0.01318627166748047, 0.013178848266601562, 0.013168255805969238]",tokens/s,19319.99587757921,kWh,3.797503900520174e-07,4.1879425854729996e-08,2.480799557243161e-07,6.697097716310635e-07,tokens/kWh,382255136.2458362,MB,1340.12928,641.59744,0.0,226.492416,177.939456,s,17,9.829652404785158,0.5782148473403033,0.0031682555884745335,0.577907470703125,0.5824709594726563,0.5840972778320312,0.5847349340820313,"[0.5798839721679687, 0.5838980102539062, 0.5848943481445312, 0.580834716796875, 0.5761536254882812, 0.576032958984375, 0.5747373657226562, 0.5815195922851563, 0.5757628784179688, 0.579451171875, 0.5791591186523437, 0.577907470703125, 0.5787372436523438, 0.5768345336914062, 0.5757421264648438, 0.5745864868164062, 0.5735167846679687]",tokens/s,108.9560399387702,kWh,1.6625946381012254e-05,1.833562023255159e-06,5.9178200387197285e-06,2.437732844298714e-05,tokens/kWh,2584368.5105749895,,s,1071,9.821727472305296,0.009170613886372827,0.00016317588482356887,0.009143775939941406,0.009277567863464356,0.00936191987991333,0.009872832012176513,"[0.00896566390991211, 0.009402655601501464, 0.009194751739501954, 0.009192031860351562, 0.00920406436920166, 0.00918233585357666, 0.009138815879821778, 0.009176544189453125, 0.00924460792541504, 0.009218527793884277, 0.009238271713256837, 0.009257599830627442, 0.009182751655578613, 0.009227871894836426, 0.009462656021118164, 0.009197567939758301, 0.009279264450073241, 0.009298144340515136, 0.009369248390197754, 0.009265119552612305, 0.009361248016357421, 0.009179455757141113, 0.009173055648803712, 0.009250368118286133, 0.009285792350769043, 0.009194944381713867, 0.009181632041931152, 0.0091627197265625, 0.0091627197265625, 0.00919215965270996, 0.009226143836975098, 0.00913430404663086, 0.009145567893981934, 0.009138143539428711, 0.00911017608642578, 0.009082271575927734, 0.009095040321350098, 0.00913167953491211, 0.009120223999023437, 0.009209759712219238, 0.009310239791870118, 0.00914527988433838, 0.009123488426208496, 0.009162272453308105, 0.00917142391204834, 0.00911359977722168, 0.00911302375793457, 0.009152704238891602, 0.009152895927429199, 0.009168895721435547, 0.009134079933166504, 0.00933683204650879, 0.009209919929504394, 0.009148639678955078, 0.009264287948608398, 0.009196096420288086, 0.009245856285095215, 0.009192095756530762, 0.009184672355651855, 0.00913856029510498, 0.009103775978088379, 0.009207039833068848, 0.009213919639587403, 0.008936703681945801, 0.009301728248596191, 0.009285728454589843, 0.009331839561462402, 0.009207743644714356, 0.009245408058166505, 0.00923027229309082, 0.009254816055297852, 0.009210399627685547, 0.009270496368408203, 0.00921014404296875, 0.009201312065124511, 0.009201343536376954, 0.009261311531066895, 0.009483967781066895, 0.009254015922546387, 0.00929513645172119, 0.009222847938537598, 0.009233471870422363, 0.009323264122009277, 0.009257216453552247, 0.009209247589111328, 0.009277567863464356, 0.009214240074157715, 0.009276479721069335, 0.009249759674072265, 0.009232288360595703, 0.009220159530639649, 0.009254591941833496, 0.009247039794921875, 0.009199295997619629, 0.009219807624816894, 0.00925551986694336, 0.00923033618927002, 0.009227456092834473, 0.009276224136352538, 0.00921129608154297, 0.009198176383972167, 0.009324319839477539, 0.009246015548706054, 0.009423775672912598, 0.009303327560424804, 0.009267264366149903, 0.009198111534118652, 0.00923033618927002, 0.009230079650878907, 0.009304384231567382, 0.009288800239562989, 0.009227231979370117, 0.009209856033325196, 0.009214015960693359, 0.009348992347717286, 0.009273407936096191, 0.009261055946350098, 0.009252863883972168, 0.009233663558959961, 0.009222623825073242, 0.009415167808532715, 0.009439359664916993, 0.00920729637145996, 0.009299263954162598, 0.009533791542053223, 0.009265600204467773, 0.009004575729370117, 0.009207584381103516, 0.009420928001403808, 0.009228863716125489, 0.00943932819366455, 0.009283072471618652, 0.009180735588073731, 0.009187423706054687, 0.009216095924377441, 0.009300127983093261, 0.009184960365295411, 0.009261568069458008, 0.009226816177368164, 0.009170463562011719, 0.0091976318359375, 0.009227871894836426, 0.00924124813079834, 0.009195232391357422, 0.009236672401428222, 0.009197567939758301, 0.009236479759216308, 0.00922771167755127, 0.009228863716125489, 0.009363455772399902, 0.00959488010406494, 0.010530015945434571, 0.009416543960571289, 0.009619935989379883, 0.009771488189697265, 0.009291775703430176, 0.009230143547058105, 0.009360896110534669, 0.009267200469970703, 0.009265888214111328, 0.00924668788909912, 0.009268704414367676, 0.009208352088928222, 0.0092194881439209, 0.009345631599426269, 0.009374752044677734, 0.009194047927856445, 0.009246272087097168, 0.009160703659057617, 0.009270079612731933, 0.009170432090759278, 0.00916864013671875, 0.009149215698242187, 0.009172991752624511, 0.009220191955566406, 0.009446880340576173, 0.009208255767822266, 0.009240575790405273, 0.009331968307495117, 0.009162848472595214, 0.009144991874694823, 0.009172991752624511, 0.0091397123336792, 0.009142751693725587, 0.009267231941223145, 0.009244671821594238, 0.009160223960876466, 0.009171648025512695, 0.009172287940979004, 0.008933343887329102, 0.009192768096923829, 0.009157343864440918, 0.009197343826293945, 0.00921827220916748, 0.009191519737243652, 0.009202624320983887, 0.009300959587097169, 0.009197471618652343, 0.009197728157043457, 0.009303008079528809, 0.009196736335754395, 0.009180959701538085, 0.009192928314208985, 0.009217727661132812, 0.009185471534729005, 0.00920028781890869, 0.009301247596740723, 0.009207551956176757, 0.009226271629333496, 0.00921014404296875, 0.009253536224365234, 0.009183168411254883, 0.009218144416809081, 0.009142271995544434, 0.009209152221679687, 0.009212608337402345, 0.009214271545410156, 0.009217727661132812, 0.009287199974060058, 0.009340928077697755, 0.00932419204711914, 0.009199680328369141, 0.009363936424255372, 0.009191200256347657, 0.009279999732971191, 0.009261055946350098, 0.009242624282836913, 0.00930185604095459, 0.009308320045471191, 0.009203488349914551, 0.009172512054443359, 0.00922265625, 0.009234623908996582, 0.009152511596679687, 0.009178784370422364, 0.009332544326782226, 0.009355263710021973, 0.009214240074157715, 0.00953769588470459, 0.009177184104919434, 0.009162528038024902, 0.009139936447143554, 0.009148447990417481, 0.009121248245239258, 0.009179327964782715, 0.009111519813537597, 0.009190239906311036, 0.009045184135437012, 0.009167679786682129, 0.009101311683654785, 0.009152511596679687, 0.009090559959411621, 0.008945664405822755, 0.009166144371032714, 0.009191455841064454, 0.0091429443359375, 0.009119744300842286, 0.009184736251831055, 0.009136672019958497, 0.009078304290771484, 0.009106047630310058, 0.009131104469299316, 0.00916147232055664, 0.009168895721435547, 0.00921395206451416, 0.009111871719360351, 0.009225119590759277, 0.009087039947509765, 0.009222880363464355, 0.009152352333068848, 0.009101344108581542, 0.009152511596679687, 0.00907686424255371, 0.009089119911193847, 0.009168512344360351, 0.00923472023010254, 0.009148159980773925, 0.009191328048706055, 0.009134431838989258, 0.009119744300842286, 0.009131551742553711, 0.009096832275390624, 0.009182239532470703, 0.009105216026306153, 0.009127039909362793, 0.009161215782165527, 0.00916096019744873, 0.00919155216217041, 0.009134079933166504, 0.009196703910827636, 0.009115488052368163, 0.00907692813873291, 0.009155391693115234, 0.00911359977722168, 0.009142560005187988, 0.009148127555847168, 0.009281472206115722, 0.009123135566711425, 0.009124863624572753, 0.009133600234985351, 0.00911507225036621, 0.009186079978942871, 0.009082719802856446, 0.00923680019378662, 0.009117535591125488, 0.00908083152770996, 0.009101311683654785, 0.009173184394836427, 0.0091461763381958, 0.009064352035522461, 0.009161055564880371, 0.009127872467041016, 0.009047264099121094, 0.009091679573059081, 0.009103391647338867, 0.008850175857543946, 0.0092009916305542, 0.00912831974029541, 0.0091627836227417, 0.00923481559753418, 0.009162464141845704, 0.009086463928222656, 0.009116479873657227, 0.009100480079650879, 0.009304863929748535, 0.009084320068359375, 0.00912172794342041, 0.009294495582580567, 0.009166015625, 0.009171711921691894, 0.009145695686340332, 0.009130720138549805, 0.009070591926574707, 0.009355039596557617, 0.00910153579711914, 0.00905174446105957, 0.00906281566619873, 0.009103551864624023, 0.009111392021179198, 0.009113568305969239, 0.009144319534301757, 0.00916204833984375, 0.00921177577972412, 0.009163583755493164, 0.0090830078125, 0.009113471984863281, 0.009058303833007812, 0.00911087989807129, 0.009074527740478516, 0.009884480476379395, 0.009169919967651367, 0.009074879646301269, 0.009170944213867188, 0.009120736122131348, 0.009160544395446777, 0.009107199668884278, 0.009054495811462402, 0.009211872100830078, 0.009057920455932617, 0.009094592094421386, 0.009065183639526367, 0.009189087867736817, 0.00912822437286377, 0.009096511840820313, 0.009081727981567382, 0.009072928428649903, 0.009120863914489746, 0.009068896293640136, 0.009119711875915527, 0.009048416137695312, 0.009115263938903809, 0.009097408294677735, 0.009083071708679199, 0.009183232307434081, 0.009103360176086426, 0.009168831825256348, 0.00909932804107666, 0.009058015823364258, 0.008902400016784669, 0.00916659164428711, 0.009238975524902344, 0.009242015838623047, 0.009202336311340332, 0.009129983901977539, 0.009188703536987305, 0.009126655578613281, 0.009102304458618165, 0.009087360382080079, 0.009165216445922851, 0.009072575569152832, 0.009058272361755371, 0.009146880149841309, 0.009166015625, 0.009146271705627441, 0.009136063575744629, 0.00920035171508789, 0.009090751647949218, 0.009089344024658203, 0.009119744300842286, 0.009100607872009277, 0.009026080131530763, 0.00905235195159912, 0.009077952384948731, 0.00921065616607666, 0.009089152336120605, 0.009057472229003907, 0.009157183647155762, 0.009078944206237793, 0.009183199882507324, 0.009132032394409179, 0.00917478370666504, 0.009033856391906738, 0.009035327911376952, 0.009093152046203614, 0.00906726360321045, 0.009111295700073243, 0.009063936233520508, 0.009097920417785645, 0.009023551940917968, 0.009104479789733886, 0.009021951675415038, 0.009040063858032226, 0.009074687957763672, 0.009054528236389161, 0.00906383991241455, 0.00921615982055664, 0.009262528419494629, 0.009252896308898926, 0.009108127593994141, 0.00924623966217041, 0.009030367851257324, 0.009107328414916993, 0.009066240310668945, 0.009055392265319824, 0.009210751533508301, 0.009074272155761719, 0.00905014419555664, 0.009132672309875488, 0.009137824058532714, 0.009217535972595215, 0.009087679862976074, 0.008898624420166016, 0.009149408340454102, 0.009126879692077637, 0.009142175674438476, 0.009146464347839356, 0.009176639556884766, 0.009179231643676757, 0.009196224212646485, 0.009578335762023925, 0.00954047966003418, 0.009130528450012207, 0.009126399993896485, 0.009148320198059083, 0.009119775772094727, 0.009101056098937988, 0.009163071632385254, 0.009312159538269043, 0.009776543617248536, 0.009398079872131348, 0.009219103813171387, 0.009191455841064454, 0.009185279846191406, 0.009402079582214356, 0.009271295547485351, 0.009150464057922364, 0.009134079933166504, 0.009199007987976075, 0.009161312103271485, 0.009446751594543458, 0.009218720436096192, 0.009113280296325684, 0.00914902400970459, 0.009178848266601562, 0.009095168113708496, 0.009117823600769043, 0.009135616302490235, 0.009090432167053223, 0.009194496154785157, 0.009166624069213867, 0.009261280059814454, 0.009215359687805175, 0.009130080223083497, 0.009441823959350585, 0.009155936241149902, 0.009122464179992676, 0.009103360176086426, 0.009143936157226562, 0.009058688163757324, 0.011053055763244628, 0.009164799690246582, 0.009172991752624511, 0.009171199798583985, 0.009162591934204102, 0.009107359886169434, 0.009168895721435547, 0.009135231971740723, 0.009181440353393555, 0.009104096412658691, 0.009147487640380859, 0.009173664093017578, 0.009113759994506835, 0.009164575576782227, 0.009173215866088867, 0.008887935638427734, 0.00912940788269043, 0.009118816375732423, 0.009130111694335937, 0.009160320281982421, 0.009236448287963868, 0.009146271705627441, 0.009144288063049316, 0.009119999885559083, 0.009074687957763672, 0.009200896263122559, 0.009235424041748047, 0.009207584381103516, 0.00910268783569336, 0.00910636806488037, 0.009153440475463867, 0.00909603214263916, 0.009140192031860351, 0.009138303756713867, 0.009113471984863281, 0.009046015739440917, 0.009098367691040039, 0.009110400199890137, 0.009093119621276855, 0.009302016258239745, 0.009095328330993652, 0.009119808197021484, 0.009084063529968262, 0.00922390365600586, 0.009273344039916993, 0.009223072052001954, 0.00907084846496582, 0.009224191665649414, 0.00910918426513672, 0.009139616012573243, 0.009122688293457032, 0.009063776016235352, 0.009097663879394532, 0.009111552238464356, 0.00922214412689209, 0.009055871963500977, 0.009108991622924804, 0.009056447982788086, 0.00936415958404541, 0.009170656204223632, 0.00907033634185791, 0.009085472106933595, 0.009125280380249023, 0.009096896171569824, 0.009079168319702149, 0.009236384391784667, 0.009150464057922364, 0.00918387222290039, 0.009162752151489258, 0.009093119621276855, 0.009058303833007812, 0.009063872337341308, 0.009097375869750977, 0.009093055725097656, 0.00907107162475586, 0.009084927558898925, 0.009209216117858887, 0.009101280212402344, 0.008874239921569825, 0.009106847763061523, 0.009183839797973633, 0.00912179183959961, 0.009153632164001465, 0.009075743675231934, 0.009117568016052246, 0.009154720306396484, 0.009076576232910156, 0.00915187168121338, 0.009128576278686523, 0.009050111770629882, 0.009052160263061524, 0.009070303916931153, 0.009080767631530762, 0.009111071586608887, 0.00908297634124756, 0.010473343849182129, 0.010936415672302247, 0.009263872146606445, 0.009189375877380371, 0.009158656120300293, 0.009193056106567383, 0.009114015579223632, 0.009443360328674317, 0.009760224342346191, 0.009161343574523926, 0.009023360252380371, 0.00908249568939209, 0.009089407920837402, 0.009070112228393554, 0.009095647811889648, 0.009116671562194823, 0.009034751892089844, 0.009043968200683594, 0.009136128425598144, 0.009070591926574707, 0.009078944206237793, 0.009072287559509278, 0.009416864395141602, 0.009216032028198241, 0.009264320373535155, 0.009175135612487792, 0.009079520225524901, 0.009156479835510253, 0.009310336112976074, 0.009183327674865722, 0.00917686367034912, 0.009145567893981934, 0.009221023559570312, 0.009117695808410644, 0.00910540771484375, 0.009101311683654785, 0.009141375541687011, 0.009177311897277832, 0.009054495811462402, 0.009082719802856446, 0.009138112068176269, 0.009064160346984863, 0.00913871955871582, 0.009173472404479981, 0.009075936317443847, 0.00907260799407959, 0.008872575759887695, 0.00914457607269287, 0.009190303802490234, 0.009142560005187988, 0.009129599571228027, 0.009102304458618165, 0.009097184181213379, 0.009076736450195312, 0.009103008270263672, 0.009201248168945313, 0.009192192077636718, 0.009138175964355469, 0.0091278076171875, 0.009292192459106445, 0.009123135566711425, 0.0090928316116333, 0.00939840030670166, 0.009867839813232422, 0.009957375526428223, 0.009328864097595214, 0.009150495529174804, 0.009362591743469238, 0.009192255973815918, 0.00922396755218506, 0.00911577606201172, 0.009238400459289552, 0.009193152427673339, 0.009124159812927246, 0.009168895721435547, 0.00913759994506836, 0.009103103637695313, 0.009147487640380859, 0.009410271644592285, 0.009101311683654785, 0.009099488258361816, 0.00918716812133789, 0.009125663757324218, 0.009117471694946288, 0.009168448448181153, 0.009130816459655761, 0.009115648269653321, 0.009087072372436524, 0.00920361614227295, 0.009130080223083497, 0.009113247871398927, 0.009129504203796386, 0.009165535926818848, 0.009213312149047852, 0.009126527786254883, 0.009236479759216308, 0.009125472068786621, 0.009150879859924316, 0.009137568473815917, 0.009144927978515625, 0.00918064022064209, 0.00915715217590332, 0.00922214412689209, 0.009237536430358887, 0.009157376289367676, 0.009189599990844727, 0.009144672393798828, 0.009108832359313965, 0.009107775688171386, 0.008957599639892578, 0.009148127555847168, 0.00918182373046875, 0.009175200462341309, 0.009175135612487792, 0.0091911678314209, 0.009140416145324707, 0.009148415565490722, 0.009180224418640137, 0.00919046401977539, 0.009137855529785157, 0.009122943878173828, 0.009155455589294434, 0.00914192008972168, 0.009087615966796875, 0.009238240242004394, 0.009187328338623046, 0.009205375671386719, 0.009228096008300782, 0.009148991584777832, 0.009273632049560547, 0.009201408386230469, 0.009174336433410644, 0.009106080055236816, 0.009103360176086426, 0.009144319534301757, 0.00915004825592041, 0.00912166404724121, 0.00909887981414795, 0.009089344024658203, 0.009198176383972167, 0.009080320358276368, 0.00904640007019043, 0.009115200042724609, 0.009087039947509765, 0.009175583839416504, 0.00906441593170166, 0.009227744102478027, 0.009083423614501953, 0.009099455833435058, 0.009105216026306153, 0.009078783988952637, 0.00903987216949463, 0.009057951927185058, 0.009143775939941406, 0.009101920127868653, 0.009220735549926758, 0.009081536293029786, 0.009175647735595703, 0.009401823997497559, 0.009108384132385254, 0.009150303840637208, 0.00906595230102539, 0.009146240234375, 0.009114432334899902, 0.009932191848754882, 0.009378399848937988, 0.009156096458435058, 0.009806048393249512, 0.009089311599731446, 0.00911952018737793, 0.009102815628051758, 0.009083647727966309, 0.00885148811340332, 0.009172127723693848, 0.009102848052978516, 0.009183487892150878, 0.009321887969970704, 0.009179103851318359, 0.009271903991699219, 0.009240511894226075, 0.009117247581481934, 0.009182880401611328, 0.009158783912658692, 0.009128704071044921, 0.01005679988861084, 0.010041472434997558, 0.009115839958190918, 0.009155167579650878, 0.009132320404052735, 0.009082400321960449, 0.009226431846618652, 0.00922214412689209, 0.009072640419006347, 0.009125247955322265, 0.00914198398590088, 0.009092000007629395, 0.009315808296203614, 0.009148544311523437, 0.009144736289978026, 0.009101344108581542, 0.00911740779876709, 0.009165056228637695, 0.009091072082519532, 0.009121248245239258, 0.009094688415527344, 0.009227264404296874, 0.009074272155761719, 0.009148832321166991, 0.009132032394409179, 0.009082079887390137, 0.009177887916564941, 0.009133503913879395, 0.009122367858886718, 0.009129983901977539, 0.009107456207275391, 0.009197407722473145, 0.009080384254455566, 0.0091244478225708, 0.00917411231994629, 0.009151328086853027, 0.009094688415527344, 0.009089216232299805, 0.009167200088500976, 0.009181344032287597, 0.009135968208312988, 0.009150560379028321, 0.009156512260437013, 0.00908083152770996, 0.009095168113708496, 0.009281344413757324, 0.009359552383422851, 0.009138175964355469, 0.009177087783813476, 0.00929587173461914, 0.009134079933166504, 0.008893535614013673, 0.009122655868530274, 0.00911366367340088, 0.009177087783813476, 0.009197567939758301, 0.009280991554260255, 0.009123871803283692, 0.009030143737792968, 0.00903974437713623, 0.009106816291809082, 0.00915347194671631, 0.009135775566101074, 0.009091327667236328, 0.009133312225341797, 0.009091775894165039, 0.009222111701965331, 0.009252863883972168, 0.009148415565490722, 0.00910921573638916, 0.009119327545166016, 0.009105536460876465, 0.009167136192321778, 0.009076736450195312, 0.00915875244140625, 0.00908512020111084, 0.009037823677062988, 0.009045856475830078, 0.009063584327697754, 0.00914896011352539, 0.009069024085998535, 0.009047327995300293, 0.009089759826660156, 0.00906982421875, 0.009089152336120605, 0.009046784400939941, 0.009099136352539063, 0.009003007888793945, 0.009026880264282227, 0.009028287887573242, 0.00907855987548828, 0.009099488258361816, 0.00910364818572998, 0.009067584037780761, 0.00903878402709961, 0.009037407875061035, 0.009433216094970703, 0.009383935928344727, 0.009359583854675294, 0.01024726390838623, 0.00919212818145752, 0.009125663757324218, 0.00919382381439209, 0.009185152053833008, 0.009144031524658203, 0.009173407554626464, 0.009172032356262207, 0.009424863815307617, 0.009163840293884278, 0.00918835163116455, 0.009166687965393066, 0.00912889575958252, 0.00911366367340088, 0.009148127555847168, 0.008864383697509765, 0.00916044807434082, 0.009097599983215332, 0.00913651180267334, 0.009152383804321288, 0.009115584373474121, 0.009074687957763672, 0.009298144340515136, 0.009393343925476074, 0.009261664390563964, 0.009242143630981445, 0.009224672317504883, 0.009170944213867188, 0.00912384033203125, 0.00915395164489746, 0.00914902400970459, 0.010584063529968261, 0.009122079849243164, 0.00907875156402588, 0.009092864036560058, 0.009070207595825195, 0.009097344398498535, 0.009083135604858399, 0.009072064399719238, 0.009111519813537597, 0.009060671806335449, 0.00905843162536621, 0.009053600311279298, 0.009138239860534667, 0.009112256050109863, 0.009060416221618652, 0.009108927726745606, 0.009095680236816407, 0.009086976051330567, 0.00905174446105957, 0.009161120414733886, 0.009078783988952637, 0.009053759574890137, 0.00903980827331543, 0.009105376243591309, 0.009069087982177735, 0.009101311683654785, 0.009084927558898925, 0.009037983894348145, 0.009037376403808594, 0.009078175544738769, 0.009113887786865234, 0.009110015869140625, 0.009072735786437988, 0.009115584373474121, 0.009117759704589844, 0.009111552238464356, 0.009090463638305664, 0.009152640342712402, 0.009084383964538574, 0.009083680152893066, 0.009089247703552246, 0.009144351959228516, 0.009057855606079102, 0.009054047584533691, 0.009087552070617675, 0.00912992000579834, 0.009066559791564942, 0.00882323169708252, 0.009142175674438476, 0.00905020809173584, 0.009080384254455566, 0.00912019157409668, 0.009092960357666015, 0.009543328285217286, 0.00910972785949707, 0.009111007690429688, 0.009263936042785645, 0.009111359596252441, 0.009062623977661133, 0.009068511962890624, 0.009046015739440917, 0.009066495895385742, 0.009099200248718261, 0.00907475185394287, 0.009118720054626465, 0.009214176177978515, 0.009059103965759277, 0.009054207801818847, 0.009060352325439454, 0.009066495895385742, 0.009082816123962402, 0.009042240142822266, 0.009076671600341797, 0.009037728309631348, 0.009039775848388672, 0.009122048377990723, 0.009352543830871581, 0.009036191940307617, 0.009032064437866211, 0.009091808319091796, 0.009042847633361816, 0.00902143955230713, 0.009027584075927735, 0.009054047584533691, 0.009061887741088867, 0.009136799812316895, 0.009156607627868652, 0.00916703987121582, 0.009067328453063964, 0.009065024375915528, 0.009104960441589356, 0.009122559547424317, 0.009050016403198241, 0.009148127555847168, 0.009092639923095704, 0.009759872436523438, 0.009086112022399902, 0.009122015953063965, 0.009052160263061524, 0.00908131217956543, 0.009324480056762695, 0.009244735717773438, 0.009084927558898925, 0.009107456207275391, 0.009100799560546874, 0.009067008018493652, 0.009101119995117188, 0.00908512020111084, 0.009160703659057617, 0.009082176208496094, 0.008821120262145996, 0.009243488311767578, 0.009081664085388184, 0.009055487632751465, 0.009257120132446289, 0.00911417579650879, 0.00910540771484375, 0.009104960441589356, 0.009132415771484374, 0.009078847885131835, 0.009068544387817384, 0.009135295867919922, 0.009118656158447265, 0.009072799682617188, 0.00911740779876709, 0.00910153579711914, 0.009057087898254395, 0.009145312309265137, 0.009070591926574707, 0.009059328079223633, 0.00903270435333252, 0.00908249568939209, 0.009142975807189941, 0.009044704437255859, 0.009067487716674805, 0.009073920249938965, 0.009155263900756836, 0.009092384338378907, 0.009087776184082031, 0.009091072082519532, 0.009061568260192871, 0.009102144241333009, 0.009064000129699707, 0.009097439765930176, 0.00908694362640381, 0.009109760284423828, 0.009131775856018067, 0.009111455917358399, 0.009119487762451172, 0.009075200080871582, 0.009127840042114258, 0.009205951690673828, 0.009052160263061524, 0.00901904010772705, 0.009144672393798828, 0.009060576438903808, 0.00910108757019043, 0.009109472274780274, 0.009121824264526367, 0.009086976051330567, 0.009074975967407226, 0.009220928192138671, 0.009097567558288574, 0.009081119537353516, 0.00905446434020996, 0.009183263778686524, 0.009074687957763672, 0.009071616172790528, 0.009100159645080567, 0.009117823600769043, 0.009033727645874023, 0.009080320358276368, 0.009036319732666016]",tokens/s,109.04395413331716,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,963.35872,555.614208,0.0,153.092096,129.53344,s,1,9.14650390625,9.14650390625,0.0,9.14650390625,9.14650390625,9.14650390625,9.14650390625,[9.14650390625],,kWh,1.670583987916719e-05,1.8354166820039103e-06,5.442226576002351e-06,2.3983483137173454e-05,,MB,1333.1456,658.374656,0.0,234.881024,198.963712,s,11,0.19130998420715334,0.01739181674610485,0.00016017321944168414,0.017302751541137695,0.017563295364379884,0.01766543960571289,0.017747154998779296,"[0.0177675838470459, 0.017338943481445313, 0.0175578556060791, 0.017302751541137695, 0.017232511520385744, 0.01730080032348633, 0.017412128448486327, 0.017563295364379884, 0.017281312942504883, 0.017262624740600585, 0.01729017639160156]",tokens/s,14719.56631887436,kWh,5.080937402029028e-07,5.6033641895848394e-08,2.0670113154434301e-07,7.708285136430942e-07,tokens/kWh,332110184.65065765,MB,1365.95456,666.763264,0.0,243.269632,198.966272,s,11,10.677777893066406,0.9707070811878552,0.0038573408967589305,0.9714016723632812,0.9738226928710938,0.9767429504394531,0.9790791564941406,"[0.9796632080078125, 0.9722630615234376, 0.9718429565429687, 0.97007958984375, 0.9666900634765625, 0.9721900634765624, 0.9714016723632812, 0.9667803344726562, 0.9660859985351562, 0.966958251953125, 0.9738226928710938]",tokens/s,64.90114394025728,kWh,2.8120241654493367e-05,3.1012252898691526e-06,9.48651369372814e-06,4.070798063809066e-05,tokens/kWh,1547608.0859940911,,s,693,10.672597422599788,0.015400573481384983,0.000253804492295871,0.01535318374633789,0.015524812889099121,0.01565468807220459,0.016568851776123047,"[0.015170080184936523, 0.015390591621398926, 0.015372287750244141, 0.015376288414001465, 0.015355968475341797, 0.015385696411132813, 0.015403136253356934, 0.015435903549194337, 0.015433888435363769, 0.01564515209197998, 0.015383744239807129, 0.015459136009216308, 0.015394559860229492, 0.015409472465515137, 0.015382464408874513, 0.015416864395141602, 0.015409695625305175, 0.015452032089233398, 0.015418623924255371, 0.015446911811828614, 0.01541487979888916, 0.015681952476501466, 0.015362208366394042, 0.015633440017700194, 0.01919673538208008, 0.015471903800964356, 0.01555504035949707, 0.015372639656066894, 0.015351584434509277, 0.015337696075439453, 0.015345600128173828, 0.01533743953704834, 0.015437919616699219, 0.015344927787780762, 0.015313280105590821, 0.015349120140075683, 0.015286304473876954, 0.015317855834960937, 0.015337471961975097, 0.01545747184753418, 0.015373120307922363, 0.015431679725646973, 0.015318880081176758, 0.015453696250915527, 0.015422112464904785, 0.015789088249206544, 0.016567264556884766, 0.016517120361328123, 0.015501312255859375, 0.01578700828552246, 0.01569388771057129, 0.015590335845947266, 0.015634655952453613, 0.015517279624938965, 0.016562368392944334, 0.01541487979888916, 0.015400959968566894, 0.015319456100463867, 0.015429471969604492, 0.015368351936340332, 0.015374336242675781, 0.015355903625488282, 0.015330400466918945, 0.015056351661682128, 0.015335807800292969, 0.015310624122619628, 0.01534012794494629, 0.015341376304626465, 0.015526080131530761, 0.015593055725097657, 0.015681952476501466, 0.015751168251037596, 0.015431679725646973, 0.01569587230682373, 0.015393088340759278, 0.01531811237335205, 0.015337151527404785, 0.01546067237854004, 0.015426143646240235, 0.015402976036071778, 0.015375935554504394, 0.015352288246154784, 0.015433728218078613, 0.015513312339782715, 0.015587200164794921, 0.015562560081481934, 0.015491680145263672, 0.015428895950317384, 0.015346303939819336, 0.015322272300720214, 0.015334336280822754, 0.015439616203308106, 0.015351648330688477, 0.01533561611175537, 0.01535318374633789, 0.015317888259887695, 0.015364064216613769, 0.015382207870483399, 0.015409503936767578, 0.01536838436126709, 0.01534774398803711, 0.015327008247375488, 0.015531583786010742, 0.015354240417480469, 0.015382047653198241, 0.015362591743469238, 0.015335424423217774, 0.015345664024353027, 0.015298687934875488, 0.01541107177734375, 0.015430784225463868, 0.015456352233886719, 0.015429408073425292, 0.01537945556640625, 0.015402112007141113, 0.015454303741455079, 0.015438624382019043, 0.015453375816345215, 0.015450079917907715, 0.01596662425994873, 0.015425984382629394, 0.015634336471557618, 0.015443872451782227, 0.015382240295410157, 0.015413727760314942, 0.015429471969604492, 0.015027968406677247, 0.015302656173706054, 0.015308768272399902, 0.015321151733398437, 0.015287327766418456, 0.015294752120971679, 0.015266752243041993, 0.015357824325561523, 0.015394304275512695, 0.015800671577453615, 0.01530463981628418, 0.015310912132263184, 0.015234368324279785, 0.015287039756774902, 0.01528985595703125, 0.015329728126525879, 0.015314944267272949, 0.015628543853759766, 0.015668992042541505, 0.015382080078125, 0.015409600257873536, 0.015357952117919921, 0.015337471961975097, 0.015296607971191407, 0.015329183578491211, 0.015262847900390624, 0.015291423797607421, 0.01529190444946289, 0.015392671585083008, 0.015404831886291505, 0.015372960090637208, 0.015240960121154785, 0.01531868839263916, 0.015345696449279786, 0.015349696159362793, 0.015378144264221192, 0.015313311576843262, 0.015298272132873535, 0.01529094409942627, 0.015288415908813477, 0.01548083209991455, 0.015386752128601074, 0.015323136329650879, 0.015267583847045898, 0.01559785556793213, 0.0165130558013916, 0.015472576141357422, 0.015362048149108886, 0.015316160202026367, 0.015618880271911622, 0.01581670379638672, 0.01658710479736328, 0.015424703598022461, 0.015419872283935546, 0.015405055999755859, 0.015421440124511718, 0.015558015823364258, 0.015341183662414552, 0.015458463668823242, 0.015557472229003905, 0.015363200187683106, 0.015309696197509765, 0.015679776191711427, 0.015016480445861817, 0.01547264003753662, 0.015497983932495117, 0.01550227165222168, 0.015434528350830078, 0.015408351898193359, 0.015332032203674316, 0.015322400093078612, 0.01531167984008789, 0.01527830410003662, 0.015361184120178223, 0.01539955234527588, 0.015560864448547363, 0.015426912307739259, 0.015366656303405762, 0.015380319595336913, 0.015396960258483887, 0.015374400138854981, 0.015362048149108886, 0.015386176109313966, 0.015359871864318848, 0.015426112174987792, 0.01533296012878418, 0.015323552131652832, 0.015364095687866211, 0.015314944267272949, 0.01533238410949707, 0.01533232021331787, 0.015292415618896485, 0.015335424423217774, 0.015368096351623535, 0.01531488037109375, 0.015315103530883788, 0.015285759925842286, 0.01555014419555664, 0.015411840438842774, 0.015357407569885254, 0.015450207710266114, 0.01537497615814209, 0.015359999656677246, 0.015388671875, 0.01583513641357422, 0.015443967819213868, 0.01534291172027588, 0.015465120315551757, 0.015317024230957032, 0.015340864181518555, 0.015388640403747558, 0.01537731170654297, 0.015363327980041504, 0.015370816230773925, 0.015353568077087402, 0.015463775634765625, 0.015455007553100586, 0.015364255905151367, 0.0154967041015625, 0.015536224365234375, 0.01557091236114502, 0.015358464241027832, 0.015390560150146485, 0.015548800468444824, 0.015363136291503906, 0.015286815643310548, 0.01496940803527832, 0.015300191879272462, 0.015276320457458496, 0.015312288284301758, 0.015268575668334962, 0.015238752365112304, 0.015249823570251465, 0.015292415618896485, 0.015289376258850098, 0.01527238368988037, 0.015404640197753906, 0.01530361557006836, 0.015274080276489257, 0.015335007667541504, 0.015361568450927735, 0.015319808006286621, 0.015280159950256347, 0.015373600006103516, 0.015286720275878907, 0.0152740478515625, 0.015294272422790528, 0.015305120468139649, 0.015374336242675781, 0.015310848236083984, 0.015372287750244141, 0.015314463615417481, 0.015284064292907715, 0.015344223976135253, 0.015318655967712402, 0.015319456100463867, 0.015259072303771973, 0.015285152435302735, 0.01538326358795166, 0.01529747200012207, 0.01538419246673584, 0.015302528381347656, 0.01529088020324707, 0.01529651165008545, 0.015300224304199219, 0.015290847778320312, 0.015325087547302246, 0.015294015884399414, 0.015465951919555665, 0.0153220796585083, 0.015351807594299317, 0.01531276798248291, 0.015456640243530273, 0.01526963233947754, 0.015271936416625977, 0.01579212760925293, 0.015387935638427734, 0.01539145565032959, 0.015357952117919921, 0.015380031585693359, 0.015456704139709473, 0.0153754243850708, 0.015487808227539063, 0.015449600219726562, 0.015450752258300782, 0.015376383781433106, 0.015330559730529785, 0.01545088005065918, 0.015452159881591796, 0.015020511627197265, 0.01530470371246338, 0.015324383735656739, 0.015371040344238281, 0.015323455810546876, 0.015413087844848633, 0.015343647956848144, 0.015297663688659669, 0.015360639572143555, 0.015362112045288086, 0.015586496353149414, 0.015580063819885253, 0.01547663974761963, 0.015393024444580078, 0.015412927627563477, 0.015414336204528808, 0.01538697624206543, 0.015391008377075195, 0.015504863739013672, 0.01541318416595459, 0.015428095817565919, 0.015316672325134277, 0.015409952163696288, 0.015518112182617188, 0.015471232414245606, 0.015352959632873536, 0.015373503684997559, 0.015337344169616698, 0.015327391624450683, 0.015405695915222168, 0.01539087963104248, 0.01551308822631836, 0.015442463874816894, 0.015459296226501464, 0.015430496215820313, 0.01537177562713623, 0.015410847663879394, 0.015473504066467286, 0.015377984046936036, 0.015430080413818359, 0.015460224151611328, 0.015437919616699219, 0.01558460807800293, 0.01538649559020996, 0.01544480037689209, 0.015369664192199707, 0.015336159706115723, 0.015357791900634766, 0.01531708812713623, 0.015369536399841309, 0.015538880348205567, 0.015585184097290039, 0.015569055557250976, 0.015437664031982422, 0.015426560401916504, 0.01550438404083252, 0.015358176231384277, 0.015511327743530274, 0.015591423988342285, 0.015566656112670899, 0.015566559791564941, 0.01547935962677002, 0.015603615760803222, 0.015097920417785644, 0.015378432273864746, 0.015265791893005372, 0.015418975830078126, 0.015281920433044433, 0.015377023696899413, 0.015427007675170898, 0.016265695571899413, 0.01604416084289551, 0.01691993522644043, 0.01679043197631836, 0.015394623756408692, 0.015452223777770996, 0.015376223564147948, 0.015325183868408204, 0.015325183868408204, 0.015339232444763184, 0.015282464027404786, 0.015327520370483398, 0.015372127532958984, 0.01528223991394043, 0.015340767860412597, 0.015315168380737305, 0.015298080444335938, 0.01528816032409668, 0.015358559608459473, 0.015310527801513672, 0.015306528091430665, 0.015264575958251952, 0.015326784133911133, 0.015342368125915527, 0.01549903964996338, 0.015400192260742188, 0.0153221435546875, 0.015294336318969726, 0.015257439613342284, 0.015281311988830567, 0.015292672157287598, 0.015346272468566895, 0.015281311988830567, 0.015282879829406739, 0.015247520446777343, 0.015519743919372558, 0.015342752456665039, 0.015293279647827148, 0.015386624336242676, 0.015310848236083984, 0.015245311737060547, 0.015271967887878417, 0.015284480094909668, 0.01527177619934082, 0.015385600090026855, 0.0153056001663208, 0.015290240287780761, 0.015289664268493652, 0.015450528144836426, 0.015331135749816895, 0.015268447875976563, 0.015294464111328124, 0.015691776275634766, 0.01544332790374756, 0.015430399894714356, 0.015415103912353515, 0.015044896125793458, 0.01528377628326416, 0.015290464401245117, 0.01529798412322998, 0.015243904113769531, 0.015288607597351074, 0.01539891242980957, 0.015275903701782226, 0.015325311660766602, 0.015245311737060547, 0.015333375930786132, 0.015529408454895019, 0.015362624168395997, 0.01532908821105957, 0.015329471588134766, 0.015341567993164062, 0.015335647583007813, 0.015341343879699708, 0.015396063804626464, 0.01534828758239746, 0.015363424301147462, 0.015378527641296387, 0.015371040344238281, 0.015336992263793946, 0.015413215637207031, 0.015350560188293456, 0.01533513641357422, 0.015321087837219239, 0.01530675220489502, 0.015257599830627442, 0.015325471878051758, 0.015392607688903808, 0.015422847747802735, 0.015325695991516113, 0.015322463989257813, 0.0152740478515625, 0.015295071601867676, 0.015384703636169434, 0.015383999824523925, 0.015339520454406739, 0.015335840225219726, 0.01534988784790039, 0.015357855796813966, 0.015337087631225587, 0.015599424362182618, 0.015376959800720215, 0.015349984169006348, 0.015329055786132813, 0.015284223556518555, 0.015330304145812988, 0.015350784301757812, 0.015372287750244141, 0.015361472129821778, 0.015337823867797851, 0.015308832168579102, 0.015313088417053223, 0.015322336196899414, 0.015334176063537599, 0.015361023902893066, 0.015295488357543945, 0.015298272132873535, 0.015354144096374511, 0.015398431777954101, 0.015335424423217774, 0.015421440124511718, 0.01537548828125, 0.015354751586914062, 0.01527126407623291, 0.015229567527770996, 0.015279680252075196, 0.015344256401062012, 0.015302495956420898, 0.015297663688659669, 0.015292608261108398, 0.015247615814208984, 0.015251839637756347, 0.015308352470397949, 0.015312607765197755, 0.015270175933837891, 0.015468223571777344, 0.015301024436950684, 0.015327648162841797, 0.015308799743652344, 0.015375967979431152, 0.015384991645812989, 0.015380479812622071, 0.015357952117919921, 0.015365951538085938, 0.01535814380645752, 0.015357695579528808, 0.015307007789611816, 0.01536415958404541, 0.015378368377685548, 0.015311903953552246, 0.01527888011932373, 0.01533561611175537, 0.015327232360839844, 0.015292448043823242, 0.015282367706298828, 0.015281824111938477, 0.015320575714111329, 0.015327872276306153, 0.01530675220489502, 0.015259360313415527, 0.015271488189697266, 0.015356639862060546, 0.015382304191589355, 0.015314559936523437, 0.01536233615875244, 0.015269503593444825, 0.01573699188232422, 0.015346464157104492, 0.015386176109313966, 0.01530611228942871, 0.015292832374572754, 0.015276448249816894, 0.01526959991455078, 0.015265312194824219, 0.015327263832092285, 0.01529417610168457, 0.015288960456848145, 0.015284607887268067, 0.015281408309936524, 0.015311615943908691, 0.015415295600891114, 0.015328351974487304, 0.015035136222839356, 0.015565952301025391, 0.015317983627319336, 0.015400287628173828, 0.015378527641296387, 0.015450559616088867, 0.015497280120849609, 0.015505599975585937, 0.015406975746154785, 0.015467807769775391, 0.015280927658081055, 0.01526534366607666, 0.015294336318969726, 0.01531715202331543, 0.01529798412322998, 0.015327136039733886, 0.015354496002197265, 0.01526313591003418, 0.01525836753845215, 0.015276255607604981, 0.015258912086486817, 0.015289055824279784, 0.01651055908203125, 0.015409855842590333, 0.015302271842956542, 0.01534985637664795, 0.015292415618896485, 0.015332415580749511, 0.015313376426696777, 0.015368672370910644, 0.015335424423217774, 0.015308799743652344, 0.015300607681274414, 0.015256768226623535, 0.015314047813415527, 0.015252960205078126, 0.015221023559570312, 0.015261631965637207, 0.015279232025146484, 0.015402112007141113, 0.01526963233947754, 0.015285887718200684, 0.015248831748962402, 0.01530339241027832, 0.01528444766998291, 0.015273983955383302, 0.015583231925964355, 0.015339039802551269, 0.015298944473266601, 0.015373536109924316, 0.015379327774047852, 0.015363327980041504, 0.015436287879943847, 0.015276512145996094, 0.015238944053649903, 0.015289600372314454, 0.01530070400238037, 0.015297183990478516, 0.015273664474487305, 0.015277567863464356, 0.01523583984375, 0.015254752159118652, 0.015292991638183593, 0.015013888359069824, 0.015367679595947266, 0.015693344116210938, 0.015401951789855957, 0.0153274564743042, 0.015336992263793946, 0.01530844783782959, 0.015321503639221192, 0.015350239753723145, 0.015357760429382325, 0.01530191993713379, 0.01527667236328125, 0.015294464111328124, 0.01531289577484131, 0.015325183868408204, 0.015327232360839844, 0.015321056365966796, 0.015296799659729004, 0.015283967971801758, 0.015263744354248047, 0.015265567779541016, 0.01542576026916504, 0.015316800117492676, 0.015276063919067382, 0.015259807586669921, 0.015291999816894532, 0.015345919609069824, 0.015263968467712403, 0.015310784339904786, 0.015319040298461914, 0.015334527969360351, 0.015292896270751952, 0.015309375762939453, 0.015892319679260254, 0.016809087753295898, 0.01634921646118164, 0.015401503562927247, 0.015360320091247558, 0.015355584144592285, 0.015377056121826172, 0.015359647750854492, 0.017171680450439455, 0.015366944313049316, 0.015378432273864746, 0.01581875228881836, 0.015353856086730957, 0.015292415618896485, 0.015293791770935058, 0.015372832298278809, 0.01527359962463379, 0.015433759689331054, 0.01532975959777832, 0.015357184410095214, 0.015315391540527344, 0.01579212760925293, 0.01678371238708496, 0.015396063804626464, 0.015493056297302247, 0.015319392204284668, 0.015343903541564941, 0.015355232238769532, 0.01536464023590088, 0.01535155200958252]",tokens/s,64.93264690491704,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,856.338432,517.865472,0.0,115.34336,110.009344,s,1,9.2312412109375,9.2312412109375,0.0,9.2312412109375,9.2312412109375,9.2312412109375,9.2312412109375,[9.2312412109375],,kWh,1.4064317370828878e-05,1.5439288619701453e-06,4.451948006002626e-06,2.006019423880165e-05,,MB,1288.192,633.208832,0.0,218.103808,177.936896,s,19,0.23129113388061526,0.01217321757266396,0.00011122361903895703,0.012147295951843261,0.012317132568359375,0.012371308708190918,0.01242001522064209,"[0.012100255966186524, 0.012432191848754883, 0.012151935577392577, 0.012096320152282715, 0.012297183990478515, 0.012088255882263184, 0.012081503868103027, 0.012147295951843261, 0.01206339168548584, 0.012364543914794923, 0.012305279731750489, 0.012172575950622558, 0.012096575736999512, 0.012206975936889648, 0.012024991989135743, 0.01230463981628418, 0.01209171199798584, 0.012166912078857423, 0.012098591804504394]",tokens/s,21029.772816586366,kWh,3.5123614180705653e-07,3.8735155387618717e-08,2.3125436336943038e-07,6.212256605641057e-07,tokens/kWh,412088579.4825965,MB,1301.46304,641.59744,0.0,226.492416,177.939456,s,19,9.816568420410157,0.5166614958110609,0.0028018415807339915,0.516629150390625,0.5187433349609375,0.5205914611816406,0.5248543762207031,"[0.516720947265625, 0.5167571411132813, 0.5184293212890625, 0.5164871215820312, 0.5154718017578125, 0.5156202392578125, 0.5146138916015625, 0.51624755859375, 0.5175775756835937, 0.5167236328125, 0.5259201049804687, 0.517488037109375, 0.5199993896484375, 0.5147380981445312, 0.5169304809570312, 0.5128339233398438, 0.5121649169921875, 0.515215087890625, 0.516629150390625]",tokens/s,121.93670422662696,kWh,1.4991059501109534e-05,1.6531043836972231e-06,5.397889028830817e-06,2.2042052913637573e-05,tokens/kWh,2858172.9772103694,,s,1197,9.807489690780649,0.008193391554536874,0.00017742084596856621,0.008167519569396972,0.008266815948486328,0.008360198211669922,0.008937992820739744,"[0.007964928150177002, 0.008308064460754395, 0.008363743782043458, 0.008273887634277343, 0.008310784339904785, 0.00822758388519287, 0.00820633602142334, 0.00825990390777588, 0.008314559936523438, 0.008240896224975586, 0.00826598358154297, 0.008180031776428223, 0.008244256019592285, 0.008278783798217773, 0.008213824272155761, 0.008213184356689452, 0.008202272415161133, 0.008221856117248535, 0.008248031616210938, 0.00825654411315918, 0.00817801570892334, 0.008085151672363282, 0.008295392036437988, 0.008310784339904785, 0.00819542407989502, 0.008179936408996582, 0.00820406436920166, 0.008169599533081055, 0.00816767978668213, 0.00821673583984375, 0.008153504371643066, 0.0081528959274292, 0.008110048294067383, 0.008180800437927247, 0.008156000137329101, 0.00814851188659668, 0.008169055938720703, 0.008121248245239257, 0.008112128257751466, 0.008142848014831543, 0.008146944046020508, 0.008157535552978516, 0.008123616218566894, 0.008151488304138184, 0.008284159660339355, 0.008154335975646973, 0.00821123218536377, 0.008115903854370117, 0.008200511932373047, 0.008202239990234375, 0.008189567565917969, 0.008224384307861328, 0.0081692476272583, 0.008178144454956056, 0.008206687927246093, 0.008167840003967286, 0.008181023597717284, 0.008163776397705078, 0.008097696304321288, 0.008257663726806641, 0.008113375663757324, 0.008167424201965333, 0.008124608039855957, 0.00791599988937378, 0.008240127563476562, 0.008200480461120605, 0.008297375679016113, 0.008202207565307618, 0.008241344451904296, 0.008246527671813966, 0.008200032234191895, 0.008225215911865234, 0.008253567695617676, 0.008261759757995606, 0.008222592353820802, 0.008180864334106445, 0.008227711677551269, 0.008179712295532226, 0.008241151809692383, 0.008196096420288086, 0.00825113582611084, 0.00840550422668457, 0.008206080436706543, 0.008479935646057129, 0.008362144470214843, 0.008211104393005372, 0.008179455757141114, 0.008187968254089356, 0.00819219207763672, 0.008200287818908691, 0.008172896385192871, 0.00816368007659912, 0.008124671936035156, 0.008110112190246582, 0.008119520187377929, 0.008125503540039063, 0.008206239700317383, 0.008134400367736817, 0.008142848014831543, 0.00821452808380127, 0.00811030387878418, 0.00812332820892334, 0.008100895881652833, 0.008117440223693848, 0.008132287979125977, 0.00812758445739746, 0.008120160102844238, 0.008126144409179687, 0.008118623733520508, 0.008099807739257812, 0.008123456001281738, 0.008212800025939942, 0.008485183715820313, 0.008182080268859863, 0.008221952438354493, 0.008174112319946289, 0.008184032440185547, 0.008138751983642578, 0.008148192405700683, 0.008131391525268554, 0.008126432418823243, 0.008511487960815429, 0.008134655952453614, 0.008175423622131348, 0.00824284839630127, 0.00819974422454834, 0.00794371223449707, 0.008242655754089355, 0.008131808280944824, 0.00821008014678955, 0.008196096420288086, 0.008224896430969238, 0.00840294361114502, 0.009002335548400879, 0.008178367614746093, 0.008429439544677734, 0.00823423957824707, 0.008524928092956543, 0.00860745620727539, 0.00824300765991211, 0.00819219207763672, 0.00830025577545166, 0.008226655960083007, 0.00819859218597412, 0.008204287528991699, 0.008138303756713866, 0.008190400123596191, 0.0082107515335083, 0.008265215873718262, 0.00817580795288086, 0.008130463600158691, 0.008201760292053223, 0.008237631797790527, 0.00818777561187744, 0.008236895561218262, 0.008160927772521972, 0.00814252758026123, 0.008170432090759277, 0.008224960327148437, 0.008175423622131348, 0.008226431846618653, 0.008116607666015624, 0.008184927940368653, 0.008195232391357422, 0.008195743560791016, 0.008231007575988769, 0.008209728240966796, 0.00818073558807373, 0.008318655967712403, 0.00821452808380127, 0.00815833568572998, 0.008133440017700196, 0.00820025634765625, 0.008156319618225098, 0.008198656082153321, 0.008166943550109863, 0.00821724796295166, 0.008185279846191407, 0.008341279983520508, 0.008203231811523437, 0.008171487808227539, 0.00815708827972412, 0.008106304168701171, 0.008163104057312012, 0.00818380832672119, 0.008162816047668458, 0.008126976013183594, 0.008087552070617676, 0.008204383850097656, 0.008049471855163574, 0.00820201587677002, 0.008241375923156739, 0.008202239990234375, 0.008163328170776368, 0.008224767684936523, 0.00819814395904541, 0.008191264152526856, 0.008147680282592773, 0.008183615684509277, 0.008359711647033691, 0.008220191955566405, 0.008254528045654297, 0.008200032234191895, 0.008489024162292481, 0.008301535606384277, 0.008293087959289552, 0.008213888168334961, 0.008215392112731933, 0.008242815971374511, 0.008151424407958985, 0.008237055778503418, 0.008208415985107421, 0.008200063705444335, 0.008777312278747559, 0.008157407760620117, 0.00820678424835205, 0.008202239990234375, 0.008173407554626465, 0.008130335807800293, 0.00810371208190918, 0.008225215911865234, 0.008136704444885253, 0.008135935783386231, 0.00813043212890625, 0.008069888114929199, 0.008124832153320313, 0.008123488426208495, 0.008144767761230468, 0.008108192443847657, 0.008137248039245605, 0.008183872222900391, 0.008263680458068847, 0.008316415786743164, 0.008102399826049805, 0.008113696098327636, 0.008114912033081055, 0.008120063781738282, 0.008132384300231934, 0.008156607627868653, 0.008125215530395509, 0.008155232429504394, 0.00816528034210205, 0.008162655830383302, 0.008276543617248534, 0.008107775688171387, 0.008111776351928711, 0.00813535976409912, 0.008118399620056152, 0.008144800186157226, 0.008089119911193848, 0.008118720054626464, 0.008310784339904785, 0.007857024192810058, 0.008142463684082031, 0.008172063827514649, 0.008173279762268067, 0.008099840164184571, 0.008193280220031738, 0.008145567893981933, 0.008154303550720214, 0.008158207893371582, 0.008185888290405273, 0.008216447830200195, 0.008159232139587403, 0.008177663803100586, 0.008131679534912109, 0.008088255882263183, 0.008140512466430664, 0.008172032356262206, 0.008142304420471192, 0.008761887550354003, 0.00818124771118164, 0.008134400367736817, 0.008297375679016113, 0.008185407638549805, 0.008140352249145508, 0.008122591972351075, 0.00813212776184082, 0.008137696266174317, 0.008137951850891114, 0.00814566421508789, 0.008195391654968262, 0.008139488220214843, 0.008124223709106445, 0.0081778564453125, 0.008153247833251952, 0.008208224296569824, 0.0081779203414917, 0.008138496398925782, 0.008181759834289551, 0.008138751983642578, 0.008164735794067382, 0.008094400405883788, 0.008128095626831054, 0.008200544357299805, 0.008160896301269531, 0.008174304008483887, 0.008103775978088378, 0.008247200012207032, 0.008212672233581544, 0.008176383972167968, 0.00821491241455078, 0.00821116828918457, 0.008140671730041503, 0.008209664344787598, 0.008181535720825195, 0.008201184272766114, 0.008225919723510743, 0.008145855903625489, 0.00820627212524414, 0.008267775535583496, 0.00822265625, 0.00823078441619873, 0.008150495529174805, 0.00820297622680664, 0.007864319801330566, 0.008192095756530762, 0.008216704368591309, 0.008675104141235351, 0.008226816177368163, 0.008223967552185059, 0.008223520278930665, 0.00821987247467041, 0.008173600196838379, 0.008525759696960448, 0.00832800006866455, 0.00820143985748291, 0.008215104103088379, 0.008276191711425782, 0.008202112197875977, 0.008156800270080566, 0.008159744262695312, 0.008154208183288575, 0.008272671699523925, 0.008164959907531738, 0.008116095542907716, 0.008106623649597168, 0.008097887992858887, 0.008075072288513183, 0.008101152420043946, 0.00810694408416748, 0.008218879699707032, 0.008115296363830566, 0.00814303970336914, 0.008128895759582519, 0.008175616264343261, 0.008122528076171875, 0.008183648109436035, 0.008179167747497558, 0.00815772819519043, 0.008431839942932128, 0.008146495819091796, 0.008261856079101562, 0.008128512382507324, 0.008101856231689453, 0.008153087615966797, 0.008109408378601074, 0.008095775604248047, 0.008191871643066406, 0.008099807739257812, 0.008074175834655762, 0.008134528160095215, 0.008123776435852052, 0.00821679973602295, 0.008114591598510742, 0.00812451171875, 0.008125823974609375, 0.008136544227600098, 0.008204416275024414, 0.008133184432983399, 0.008306048393249511, 0.008128512382507324, 0.008356351852416993, 0.008134976387023925, 0.008106975555419922, 0.008097920417785644, 0.008073951721191406, 0.008118271827697754, 0.00782972812652588, 0.008163392066955566, 0.008195455551147462, 0.008165087699890137, 0.008154175758361816, 0.008348671913146973, 0.008125311851501464, 0.008134655952453614, 0.008158207893371582, 0.008147583961486817, 0.008120512008666992, 0.008126848220825196, 0.008178848266601563, 0.008267583847045899, 0.00826249599456787, 0.008320735931396485, 0.008241439819335937, 0.00817471981048584, 0.008196991920471191, 0.008203392028808593, 0.008170368194580079, 0.008142720222473145, 0.008189503669738769, 0.008172096252441407, 0.008122367858886719, 0.00816316795349121, 0.008122400283813476, 0.008147071838378907, 0.008129631996154785, 0.008192928314208984, 0.008104191780090333, 0.008111104011535645, 0.008097887992858887, 0.008144960403442383, 0.008122624397277832, 0.008114399909973145, 0.008064703941345215, 0.008198528289794922, 0.008147007942199707, 0.008144864082336426, 0.008123583793640136, 0.008090208053588867, 0.008116479873657226, 0.0081112642288208, 0.008129504203796387, 0.008181856155395508, 0.008083231925964355, 0.00832921600341797, 0.008162752151489257, 0.008146688461303711, 0.008129343986511231, 0.00808454418182373, 0.008239839553833008, 0.008150527954101563, 0.008173343658447265, 0.008168383598327637, 0.008133631706237793, 0.008157376289367677, 0.008321727752685547, 0.008325247764587403, 0.008165375709533691, 0.008140128135681152, 0.008165472030639649, 0.008171520233154296, 0.008586432456970215, 0.008208288192749023, 0.008407967567443848, 0.008249343872070313, 0.008310336112976074, 0.008189663887023926, 0.008239839553833008, 0.008209471702575683, 0.008221632003784179, 0.00817750358581543, 0.008155296325683594, 0.008177663803100586, 0.008177663803100586, 0.008169471740722656, 0.008177663803100586, 0.008170623779296875, 0.008171903610229491, 0.008171648025512694, 0.00814732837677002, 0.00814463996887207, 0.008113471984863282, 0.008178624153137207, 0.008166496276855468, 0.008121248245239257, 0.008156224250793457, 0.008125375747680665, 0.008425503730773927, 0.008427647590637207, 0.008210271835327148, 0.008177023887634277, 0.008143487930297852, 0.008144895553588867, 0.008134655952453614, 0.008099840164184571, 0.008157183647155761, 0.008103360176086425, 0.008079936027526855, 0.00808140754699707, 0.008089695930480957, 0.008102944374084473, 0.008112064361572266, 0.00810694408416748, 0.008140800476074218, 0.008234944343566895, 0.008407103538513183, 0.008253727912902832, 0.008239232063293456, 0.008205920219421388, 0.008180768013000488, 0.008262207984924316, 0.008124832153320313, 0.008112128257751466, 0.00816256046295166, 0.008137151718139649, 0.008152864456176757, 0.00808739185333252, 0.008096447944641113, 0.008130208015441894, 0.008151391983032226, 0.008204256057739258, 0.008159296035766601, 0.008240575790405274, 0.007837664127349854, 0.008187456130981445, 0.008118271827697754, 0.00811894416809082, 0.008258848190307618, 0.008165696144104003, 0.008083871841430664, 0.008135711669921875, 0.008096735954284668, 0.008144479751586914, 0.008144831657409668, 0.00908086395263672, 0.008413311958312988, 0.00821241569519043, 0.008179327964782716, 0.008256256103515625, 0.00818569564819336, 0.008159392356872559, 0.008249343872070313, 0.00818995189666748, 0.008181759834289551, 0.00825654411315918, 0.008147711753845216, 0.008171744346618652, 0.00820633602142334, 0.00818284797668457, 0.008143487930297852, 0.008126784324645996, 0.00821452808380127, 0.008159008026123047, 0.008209856033325195, 0.00818665599822998, 0.008132800102233887, 0.008144479751586914, 0.008187456130981445, 0.008176287651062012, 0.008142848014831543, 0.008119647979736327, 0.008157216072082519, 0.008182175636291504, 0.008220159530639648, 0.008264415740966797, 0.00818511962890625, 0.008280768394470214, 0.008177696228027343, 0.008179455757141114, 0.00822707176208496, 0.008163328170776368, 0.00812764835357666, 0.008184160232543945, 0.008239104270935058, 0.008226943969726562, 0.008364640235900878, 0.008179488182067872, 0.008443552017211914, 0.008225312232971191, 0.008338720321655273, 0.008202783584594727, 0.008224767684936523, 0.008131967544555665, 0.008160127639770507, 0.008191743850708007, 0.00851916790008545, 0.007955615997314452, 0.008229727745056153, 0.008196096420288086, 0.008210432052612305, 0.00819814395904541, 0.008198399543762207, 0.00821945571899414, 0.008215583801269532, 0.00815056037902832, 0.008277664184570313, 0.00826038360595703, 0.008263872146606446, 0.008238847732543946, 0.008173567771911621, 0.008189311981201172, 0.008179455757141114, 0.008221728324890137, 0.008171263694763184, 0.008134752273559571, 0.008213567733764648, 0.008172479629516602, 0.008378111839294433, 0.00821609592437744, 0.008114591598510742, 0.00816160011291504, 0.008191519737243653, 0.008182144165039063, 0.008193792343139649, 0.008184160232543945, 0.008144448280334473, 0.008169152259826661, 0.008168416023254394, 0.00815078353881836, 0.008187264442443847, 0.00819267177581787, 0.00813270378112793, 0.008302495956420899, 0.00818819236755371, 0.008217408180236816, 0.008133760452270508, 0.008136544227600098, 0.008256575584411622, 0.008192895889282226, 0.008482815742492676, 0.00819814395904541, 0.00819324779510498, 0.008174367904663086, 0.008177760124206544, 0.008152095794677735, 0.008141695976257324, 0.008157183647155761, 0.008148991584777832, 0.008169024467468262, 0.00815993595123291, 0.00821555233001709, 0.008159999847412109, 0.008116224288940429, 0.00811616039276123, 0.008147007942199707, 0.008136256217956543, 0.008134464263916015, 0.008110367774963378, 0.008698399543762208, 0.008474176406860351, 0.00820473575592041, 0.008269824028015137, 0.008189696311950684, 0.00817353630065918, 0.008194527626037597, 0.00815283203125, 0.008135871887207031, 0.008250240325927735, 0.008155136108398438, 0.008152799606323242, 0.008188032150268554, 0.008175071716308593, 0.008186688423156738, 0.008185728073120117, 0.008237055778503418, 0.008208191871643067, 0.00816579246520996, 0.008146143913269044, 0.008186079978942872, 0.00817187213897705, 0.008130559921264649, 0.008151040077209473, 0.008138751983642578, 0.008118304252624511, 0.008204256057739258, 0.008101887702941894, 0.008218784332275391, 0.008183648109436035, 0.008127903938293457, 0.00810863971710205, 0.008093695640563964, 0.008088576316833495, 0.008068063735961913, 0.008150879859924316, 0.008116415977478027, 0.008089599609375, 0.00808902359008789, 0.008073792457580567, 0.00821452808380127, 0.008124256134033203, 0.008071552276611329, 0.008095295906066894, 0.008084768295288087, 0.00813766384124756, 0.008069120407104492, 0.008114175796508789, 0.0081746244430542, 0.00812335968017578, 0.008191647529602051, 0.010514687538146973, 0.009141599655151367, 0.010855104446411133, 0.009269280433654785, 0.00821001625061035, 0.008305184364318848, 0.008513440132141113, 0.008340831756591796, 0.008216416358947754, 0.008340288162231445, 0.009181183815002441, 0.009244959831237793, 0.009362624168395997, 0.008247167587280273, 0.008253439903259278, 0.008265695571899413, 0.008263839721679687, 0.008244416236877442, 0.008235391616821288, 0.008208928108215332, 0.008170271873474121, 0.0081725435256958, 0.008107232093811036, 0.008151359558105468, 0.008231488227844239, 0.00819600009918213, 0.008316927909851075, 0.008144895553588867, 0.008220671653747558, 0.008232959747314453, 0.008218624114990235, 0.008220671653747558, 0.008187904357910156, 0.008171199798583984, 0.008198464393615722, 0.008216768264770509, 0.008150815963745117, 0.0081428804397583, 0.008182784080505372, 0.008203264236450195, 0.0082608642578125, 0.00824169635772705, 0.008218815803527832, 0.008158592224121094, 0.008155808448791504, 0.008194047927856446, 0.008173567771911621, 0.008226592063903809, 0.008339679718017579, 0.008226943969726562, 0.00822054386138916, 0.008316736221313476, 0.008378560066223145, 0.008369343757629395, 0.008266559600830078, 0.008263551712036132, 0.008226719856262207, 0.008255711555480957, 0.008143136024475098, 0.008174431800842286, 0.008177887916564942, 0.008155808448791504, 0.008150400161743164, 0.008163552284240722, 0.008153504371643066, 0.008216480255126953, 0.008237248420715331, 0.008178688049316407, 0.008236031532287597, 0.008141983985900879, 0.008134592056274413, 0.008177727699279784, 0.008171296119689942, 0.00814355182647705, 0.00809603214263916, 0.008122367858886719, 0.007876895904541015, 0.008214112281799316, 0.00829430389404297, 0.008229056358337402, 0.008187423706054688, 0.008145088195800782, 0.00819264030456543, 0.008173088073730468, 0.008181535720825195, 0.008207103729248048, 0.00811513614654541, 0.008233792304992675, 0.008167424201965333, 0.008210432052612305, 0.008167424201965333, 0.008167424201965333, 0.008169376373291015, 0.008157247543334962, 0.008162431716918946, 0.00817251205444336, 0.008128383636474609, 0.008191647529602051, 0.008176223754882812, 0.00816646385192871, 0.008192768096923828, 0.008161279678344726, 0.008129728317260743, 0.008233792304992675, 0.008179519653320312, 0.008181952476501464, 0.008120223999023438, 0.008195712089538574, 0.008175904273986817, 0.008163583755493165, 0.008157119750976563, 0.00870201587677002, 0.009855263710021973, 0.008334272384643555, 0.008205023765563965, 0.008510848045349122, 0.008395615577697755, 0.00828544044494629, 0.008162015914916993, 0.00822383975982666, 0.008999648094177247, 0.008228384017944335, 0.00815561580657959, 0.008371392250061034, 0.008194592475891114, 0.008458527565002441, 0.00827132797241211, 0.008186176300048828, 0.008172831535339355, 0.008178624153137207, 0.00819814395904541, 0.00819200038909912, 0.008269344329833984, 0.008120384216308594, 0.008096192359924316, 0.008184896469116212, 0.008158368110656738, 0.008224512100219726, 0.008208383560180664, 0.007877024173736572, 0.008187487602233886, 0.008257599830627441, 0.0083056640625, 0.008277664184570313, 0.008140735626220703, 0.008778240203857422, 0.008170720100402831, 0.008150752067565919, 0.008143775939941407, 0.008138175964355468, 0.008229120254516602, 0.008202560424804687, 0.008202495574951172, 0.008146688461303711, 0.008136128425598145, 0.008210463523864746, 0.008124416351318359, 0.0082008638381958, 0.008340607643127442, 0.00810694408416748, 0.008165184020996094, 0.008113183975219727, 0.008100831985473633, 0.008120320320129394, 0.008118304252624511, 0.008107999801635742, 0.008102144241333008, 0.008500991821289062, 0.008101216316223145, 0.008106656074523926, 0.008128512382507324, 0.008097536087036132, 0.008128895759582519, 0.00813804817199707, 0.008157695770263672, 0.008142911911010742, 0.00818502426147461, 0.008092576026916504, 0.008122271537780761, 0.008318592071533203, 0.008135040283203126, 0.008082943916320801, 0.008105567932128906, 0.008156064033508301, 0.008124416351318359, 0.00810208034515381, 0.008116191864013671, 0.008091296195983887, 0.008183327674865723, 0.008161312103271484, 0.00814684772491455, 0.008127264022827149, 0.00816153621673584, 0.0081079683303833, 0.008096927642822266, 0.008065631866455078, 0.00826527976989746, 0.008182239532470704, 0.008121503829956054, 0.008098624229431152, 0.008126367568969726, 0.008136799812316894, 0.007851744174957275, 0.008151328086853028, 0.00811030387878418, 0.0081364803314209, 0.00811580753326416, 0.008135071754455566, 0.008157183647155761, 0.008140992164611817, 0.00808448028564453, 0.008104767799377442, 0.008151040077209473, 0.008187104225158691, 0.008181535720825195, 0.008266336441040039, 0.008266400337219239, 0.008146688461303711, 0.008146944046020508, 0.008116191864013671, 0.008123968124389648, 0.008110207557678223, 0.008098143577575684, 0.0081080322265625, 0.008145983695983888, 0.008090368270874023, 0.00809388828277588, 0.008101375579833984, 0.008096256256103516, 0.008101887702941894, 0.008111968040466308, 0.0081245756149292, 0.008151007652282714, 0.008137951850891114, 0.008184576034545898, 0.008177727699279784, 0.008148544311523438, 0.008118720054626464, 0.008103967666625977, 0.00811030387878418, 0.008142080307006836, 0.008200096130371095, 0.008134943962097168, 0.008113727569580079, 0.008121088027954101, 0.008118271827697754, 0.008147199630737305, 0.008134143829345703, 0.008170751571655274, 0.00813491153717041, 0.008135007858276368, 0.008135071754455566, 0.008687616348266602, 0.010086400032043457, 0.008591360092163085, 0.00851353645324707, 0.008222847938537598, 0.008286368370056153, 0.00821168041229248, 0.008196576118469238, 0.008214559555053711, 0.008220383644104004, 0.008193856239318847, 0.008306431770324706, 0.008157855987548828, 0.007888959884643556, 0.008187840461730957, 0.008142304420471192, 0.008177375793457032, 0.008141183853149414, 0.008117888450622559, 0.008104479789733887, 0.008073504447937013, 0.008089599609375, 0.00810524845123291, 0.008106656074523926, 0.008101951599121094, 0.008214431762695313, 0.008114463806152343, 0.008107392311096192, 0.008186047554016113, 0.008261088371276856, 0.008242176055908204, 0.00819968032836914, 0.008180000305175782, 0.008134655952453614, 0.00820633602142334, 0.008194047927856446, 0.008210432052612305, 0.008148672103881836, 0.008182080268859863, 0.00823628807067871, 0.00813759994506836, 0.00812224006652832, 0.008110079765319824, 0.008092896461486816, 0.008117119789123535, 0.008160160064697266, 0.008182720184326172, 0.008168512344360352, 0.00810051155090332, 0.008099231719970703, 0.00808620834350586, 0.00812057590484619, 0.008091103553771973, 0.008196800231933594, 0.008089119911193848, 0.008103520393371581, 0.00816982364654541, 0.008081791877746582, 0.008087039947509766, 0.008087936401367188, 0.00808681583404541, 0.008101792335510253, 0.008092896461486816, 0.008179424285888671, 0.008160896301269531, 0.008139328002929687, 0.008126272201538085, 0.008095487594604493, 0.008126720428466797, 0.008115327835083008, 0.008123231887817382, 0.008075039863586425, 0.008145055770874023, 0.008122464179992676, 0.008112128257751466, 0.008101887702941894, 0.007831456184387207, 0.008129247665405273, 0.008136799812316894, 0.008130816459655762, 0.00815283203125, 0.008118399620056152, 0.008188063621520997, 0.008129728317260743, 0.00814089584350586, 0.008157631874084473, 0.008150752067565919, 0.008183615684509277, 0.008130335807800293, 0.008141504287719727, 0.008134655952453614, 0.008095775604248047, 0.008120287895202636, 0.008132479667663573, 0.00811631965637207, 0.008125663757324219, 0.00810582447052002, 0.00813974380493164, 0.008168959617614746, 0.00809830379486084, 0.008074655532836914, 0.00830303955078125, 0.008138784408569336, 0.008113984107971192, 0.008102144241333008, 0.008147007942199707, 0.00812172794342041, 0.008124095916748047, 0.008107232093811036, 0.008113887786865235, 0.008130592346191406, 0.008119999885559082, 0.008069343566894531, 0.008163392066955566, 0.008032608032226562, 0.008091296195983887, 0.008111136436462402, 0.008109024047851562, 0.008136832237243652, 0.008093440055847168, 0.008087103843688964, 0.00809017562866211, 0.008070719718933106, 0.00812828826904297, 0.008132512092590333, 0.008115232467651367, 0.008103327751159668, 0.008106176376342773, 0.008165632247924805, 0.008097536087036132, 0.008126591682434082, 0.00818620777130127, 0.008113823890686035, 0.008193663597106934, 0.00811251163482666, 0.008134655952453614, 0.008170623779296875, 0.008106528282165527, 0.00809171199798584, 0.007835391998291016, 0.00812940788269043, 0.008191712379455566, 0.008157024383544922, 0.008108480453491211, 0.008226816177368163, 0.008120320320129394, 0.008134304046630859, 0.008152704238891601, 0.008118016242980957, 0.008109024047851562, 0.008157183647155761, 0.008935423851013183, 0.008128512382507324, 0.008122367858886719, 0.00840499210357666, 0.008167519569396972, 0.008161215782165528, 0.008112095832824706, 0.00813372802734375, 0.008608672142028808, 0.008157376289367677, 0.008197952270507812, 0.008179583549499513, 0.008192319869995118, 0.008116031646728515, 0.008083456039428711, 0.008113727569580079, 0.00812076759338379, 0.008199551582336425, 0.008147583961486817, 0.008210432052612305, 0.00816767978668213, 0.008166560173034668, 0.008173855781555175, 0.008159232139587403, 0.00814742374420166, 0.00820364761352539, 0.00817404842376709, 0.008088640213012695, 0.00809670352935791, 0.0082227201461792, 0.008111488342285157, 0.008137344360351562, 0.008109760284423829, 0.00809507179260254, 0.008158176422119141, 0.008260671615600585, 0.008124383926391601, 0.008201055526733399, 0.008099200248718262, 0.008108415603637696, 0.008163711547851563, 0.008217599868774414, 0.008130784034729005, 0.00813913631439209, 0.008122783660888672, 0.008287615776062011, 0.008133472442626953, 0.008144672393798829, 0.008138848304748534, 0.008175519943237305, 0.008140480041503906, 0.007821119785308838, 0.00823040008544922, 0.008139264106750489, 0.008174783706665039, 0.008147392272949219, 0.008181792259216308, 0.008223072052001953, 0.008349696159362792, 0.008172831535339355, 0.00820911979675293, 0.00821571159362793, 0.008164192199707031, 0.008177151679992676, 0.00824723243713379, 0.008187520027160644, 0.008237728118896484, 0.00819638442993164, 0.008189663887023926, 0.008167967796325684, 0.008408831596374513, 0.008169471740722656, 0.008168704032897949, 0.008135423660278321, 0.008123744010925292, 0.008136832237243652, 0.00814134407043457, 0.00819814395904541, 0.008138912200927734, 0.008136384010314942, 0.008176032066345216, 0.00816921615600586, 0.008158944129943847, 0.008133919715881347, 0.008129535675048828, 0.008124704360961915, 0.008177375793457032, 0.008120223999023438, 0.008144991874694824, 0.008208383560180664, 0.008172608375549317, 0.008171680450439454, 0.008224767684936523, 0.008182880401611328, 0.008174304008483887, 0.008184800148010254, 0.008173567771911621, 0.008127872467041015, 0.008153727531433105, 0.008130304336547852, 0.008507967948913574, 0.008652480125427246, 0.008232959747314453, 0.008166751861572265, 0.008182432174682617, 0.00818995189666748, 0.008267200469970704, 0.008276576042175294, 0.008202207565307618, 0.008203904151916503, 0.008299936294555664, 0.008177791595458984, 0.008203104019165039, 0.008165375709533691]",tokens/s,122.04958024327257,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,5468.495872,3850.24,0.0,3470.78656,3271.993344,s,1,17.07706640625,17.07706640625,0.0,17.07706640625,17.07706640625,17.07706640625,17.07706640625,[17.07706640625],,kWh,0.00024485292043332834,2.70015502783384e-05,7.872172964399834e-05,0.0003505762003556651,,MB,2224.61952,3938.320384,0.0,3512.7296,3298.469376,s,10,1.2747375106811523,0.12747375106811523,0.0006476287113536839,0.12722237014770507,0.1286111114501953,0.12870070495605468,0.12877237976074218,"[0.12708051300048828, 0.12859120178222655, 0.12879029846191406, 0.1270567398071289, 0.12761638641357423, 0.12737996673583984, 0.12682272338867187, 0.126980224609375, 0.12705522918701173, 0.12736422729492186]",tokens/s,2008.2565850220187,kWh,3.741285398397449e-06,4.125981629144357e-07,1.860752913102531e-06,6.014636474414416e-06,tokens/kWh,42562838.35091199,MB,2228.871168,3967.680512,0.0,3542.089728,3284.027904,s,10,78.08365673828125,7.808365673828125,0.013971841807828267,7.807783203125,7.829096142578125,7.830091284179687,7.830887397460938,"[7.81824560546875, 7.828875, 7.80168408203125, 7.7902841796875, 7.80705029296875, 7.80357275390625, 7.83108642578125, 7.80851611328125, 7.785537109375, 7.80880517578125]",tokens/s,8.068269677886851,kWh,0.00022801514152868653,2.5151163603592476e-05,8.621915088409802e-05,0.00033938545601637705,tokens/kWh,185629.63993648547,,s,630,78.08204837799072,0.12393975933014402,0.0010987563344655577,0.12367025756835938,0.12490686721801758,0.1259447868347168,0.12853412002563477,"[0.12596182250976562, 0.12414627075195313, 0.123863037109375, 0.1240033950805664, 0.1241258544921875, 0.12362371063232422, 0.12386815643310548, 0.12435887908935547, 0.1230928955078125, 0.12373696136474609, 0.12306841278076172, 0.12362751770019531, 0.12429721832275391, 0.12346768188476563, 0.12326668548583984, 0.12400016021728516, 0.12411248016357422, 0.12451119995117188, 0.12400220489501954, 0.12435779571533204, 0.12433708953857422, 0.12389311981201172, 0.12592396545410156, 0.12459212493896485, 0.12318224334716797, 0.12352393341064453, 0.12655010986328125, 0.125446044921875, 0.12357427215576172, 0.12374201965332031, 0.12374559783935547, 0.12378524780273438, 0.12346864318847656, 0.12354742431640625, 0.12325247955322266, 0.1237318115234375, 0.12321241760253906, 0.12305942535400391, 0.12326377868652344, 0.12480124664306641, 0.12336492919921875, 0.12568809509277343, 0.12331001281738281, 0.12316671752929688, 0.1241313247680664, 0.12558745574951172, 0.12377503967285156, 0.1250564193725586, 0.12501251220703125, 0.12434342193603516, 0.12424691009521484, 0.12572057342529297, 0.12406374359130859, 0.12446924591064454, 0.1237475814819336, 0.1250414047241211, 0.12384815979003906, 0.12411074829101562, 0.12354009246826173, 0.12386713409423829, 0.12351078033447266, 0.12445670318603516, 0.12390425872802735, 0.12677120208740233, 0.12495040130615234, 0.1236337890625, 0.12388556671142578, 0.12382412719726563, 0.12436614227294922, 0.12363811492919922, 0.12536624145507813, 0.12383679962158203, 0.12411698913574219, 0.12385279846191406, 0.12355075073242187, 0.12413206481933593, 0.12345779418945313, 0.12318105316162109, 0.12415385437011718, 0.12673638153076172, 0.12453199768066406, 0.12369542694091797, 0.12406185913085938, 0.1238202896118164, 0.12417024230957031, 0.12366159820556641, 0.1248038101196289, 0.1236538543701172, 0.12401897430419923, 0.12370726776123046, 0.12363340759277344, 0.12357606506347656, 0.12424870300292969, 0.12335619354248047, 0.12444156646728516, 0.12342066955566407, 0.12514713287353516, 0.12393062591552734, 0.12364185333251954, 0.1237606430053711, 0.12425625610351562, 0.12379135894775391, 0.12453619384765625, 0.1232759017944336, 0.12352716827392578, 0.124368896484375, 0.12810003662109376, 0.12373136138916016, 0.12416912078857421, 0.12383363342285156, 0.12454121398925781, 0.12374060821533203, 0.123936767578125, 0.12387532806396484, 0.12377276611328125, 0.12365020751953125, 0.12436204528808593, 0.12439008331298829, 0.1251920928955078, 0.12414518737792969, 0.12556073760986328, 0.12886082458496093, 0.12396502685546874, 0.12385337829589843, 0.12428067016601563, 0.12426041412353515, 0.12337619018554688, 0.12413337707519531, 0.12332994842529296, 0.12425234985351563, 0.1250533447265625, 0.12370489501953125, 0.12353337860107422, 0.12425663757324219, 0.1234329605102539, 0.12312297821044922, 0.12282147216796875, 0.12304537963867188, 0.12343555450439453, 0.12330499267578125, 0.12426937866210938, 0.127629150390625, 0.12471926116943359, 0.12484524536132813, 0.12338422393798829, 0.12377289581298828, 0.12373651123046875, 0.12393881225585937, 0.12359474945068359, 0.12347090911865234, 0.12389676666259766, 0.12411625671386718, 0.12372246551513671, 0.12327017974853516, 0.12325929260253907, 0.1239878387451172, 0.1234986572265625, 0.12377718353271484, 0.12354528045654296, 0.1256715545654297, 0.12424044799804687, 0.12393071746826172, 0.12357212829589843, 0.12355788421630859, 0.1235968017578125, 0.123655517578125, 0.12374317169189453, 0.1234736328125, 0.12402278137207032, 0.12327731323242187, 0.12298550415039063, 0.12335612487792968, 0.12338531494140625, 0.12351132965087891, 0.12326092529296875, 0.12328726196289062, 0.12569789123535155, 0.12388191986083984, 0.12383641815185546, 0.12433817291259766, 0.12379564666748047, 0.1235986557006836, 0.12350873565673828, 0.12380902099609375, 0.12340505981445313, 0.12354914855957032, 0.12475785827636719, 0.12389798736572266, 0.12368550109863281, 0.12362783813476562, 0.1229639663696289, 0.12400025939941406, 0.12315647888183594, 0.12282816314697266, 0.12325132751464844, 0.12336946868896484, 0.12342864227294922, 0.12425206756591797, 0.12357049560546875, 0.12379743957519532, 0.12532742309570313, 0.12298572540283204, 0.12312006378173829, 0.12348358154296875, 0.1226740493774414, 0.12309503936767578, 0.12323020935058594, 0.1234881591796875, 0.12383650970458984, 0.12345753479003906, 0.1245074234008789, 0.12387619018554688, 0.12366835021972657, 0.12481260681152344, 0.12328006744384766, 0.12332441711425782, 0.12333606719970704, 0.12271475219726563, 0.12358233642578124, 0.1230931167602539, 0.12371558380126953, 0.12366028594970703, 0.12554351806640626, 0.12598774719238282, 0.1233848648071289, 0.12321891021728516, 0.12314176177978516, 0.12383494567871094, 0.12348521423339844, 0.12463593292236329, 0.12376620483398437, 0.12447187042236328, 0.12455430603027344, 0.12318611145019531, 0.12333280181884766, 0.12323411560058593, 0.12360399627685546, 0.12385584259033203, 0.12381798553466797, 0.12647779083251953, 0.12346217346191406, 0.12368831634521485, 0.12334931182861328, 0.1230421142578125, 0.123109375, 0.12388684844970703, 0.12339891052246094, 0.12307014465332031, 0.12302777862548828, 0.12288409423828126, 0.12328755187988281, 0.12387123107910156, 0.12397731018066406, 0.12407654571533203, 0.12393043518066406, 0.12435260772705078, 0.12349858856201172, 0.1236858901977539, 0.12351795196533204, 0.12243516540527344, 0.12271564483642577, 0.12414249420166015, 0.12445836639404297, 0.12348067474365235, 0.12456467437744141, 0.12433494567871094, 0.1233285140991211, 0.12382822418212891, 0.1237174072265625, 0.1250326690673828, 0.12377919769287109, 0.1241957778930664, 0.12327008056640625, 0.12416928100585937, 0.123560546875, 0.1235479965209961, 0.12318303680419922, 0.12513241577148437, 0.1235849609375, 0.12332032012939453, 0.12610355377197266, 0.1228853759765625, 0.12339884948730469, 0.12461795043945313, 0.12309542083740234, 0.1248749771118164, 0.12412368011474609, 0.12427830505371094, 0.12345977783203126, 0.12491139221191407, 0.12395337677001952, 0.12324044799804687, 0.12286080169677735, 0.1243544921875, 0.12381062316894531, 0.12385689544677735, 0.12309417724609376, 0.12282761383056641, 0.12309257507324219, 0.12434268951416015, 0.12375382232666016, 0.12369068908691407, 0.13144496154785157, 0.12378348541259766, 0.12377449798583984, 0.12403395080566407, 0.12325033569335937, 0.12408048248291016, 0.12356813049316406, 0.1235408935546875, 0.12353353881835938, 0.12400883483886718, 0.12359254455566407, 0.12345359802246093, 0.12337561798095703, 0.12399177551269532, 0.12360118103027344, 0.1257512969970703, 0.12441180419921875, 0.1235804443359375, 0.12365420532226562, 0.1232015380859375, 0.1236684799194336, 0.12315213012695313, 0.12335334777832031, 0.12446217346191406, 0.12359334564208985, 0.12347564697265626, 0.12331053161621094, 0.1231402587890625, 0.12363136291503907, 0.12412953948974609, 0.12262809753417969, 0.1246904296875, 0.12445260620117188, 0.12342444610595703, 0.12339008331298829, 0.12295168304443359, 0.12316031646728516, 0.12315309143066407, 0.1230192642211914, 0.1231619873046875, 0.12321552276611328, 0.12344009399414063, 0.1283987579345703, 0.12351334381103515, 0.12369436645507813, 0.12365907287597656, 0.12313600158691407, 0.12349747467041015, 0.12320051574707032, 0.12377500915527344, 0.12356604766845704, 0.12294553375244141, 0.12411901092529297, 0.12312540435791015, 0.12274140930175781, 0.1231431655883789, 0.12289302062988282, 0.12290636444091797, 0.13067085266113282, 0.12287721252441407, 0.12349104309082032, 0.12300399780273437, 0.12338883209228516, 0.12354287719726563, 0.12426512145996094, 0.12326620483398437, 0.128461669921875, 0.12430054473876953, 0.12460928344726563, 0.12293859100341797, 0.12324944305419921, 0.12392447662353516, 0.12384870147705078, 0.12351590728759766, 0.1262639694213867, 0.124681884765625, 0.12337356567382812, 0.12598886108398438, 0.12328959655761719, 0.12674969482421874, 0.12411186981201172, 0.12350259399414062, 0.1288581085205078, 0.12456339263916015, 0.12378118133544921, 0.12342054748535156, 0.12381327819824219, 0.12370403289794922, 0.124474365234375, 0.12334611511230469, 0.12777657318115235, 0.12330508422851562, 0.12314060974121094, 0.12320806121826172, 0.1236495361328125, 0.12585215759277343, 0.12431330871582032, 0.12416438293457031, 0.12839730834960938, 0.12331209564208985, 0.1233674545288086, 0.12326297760009766, 0.12391219329833984, 0.12407526397705078, 0.12373209381103516, 0.12281510162353515, 0.1231646728515625, 0.12813909912109375, 0.12305593872070313, 0.1228472671508789, 0.12314246368408203, 0.12474716949462891, 0.12468694305419922, 0.12489043426513671, 0.12448553466796874, 0.1288850555419922, 0.1236496353149414, 0.12357302093505859, 0.12404541015625, 0.12331171417236328, 0.12319785308837891, 0.12336946868896484, 0.12568370819091798, 0.128563232421875, 0.12343907165527344, 0.12246630096435547, 0.12297119903564453, 0.12333971405029297, 0.12391417694091797, 0.12317040252685547, 0.12388201904296875, 0.12369401550292969, 0.1242427520751953, 0.12323654174804688, 0.12388966369628907, 0.12378291320800781, 0.12419200134277343, 0.12676780700683593, 0.12323872375488282, 0.12361305236816406, 0.12399779510498046, 0.12308541107177734, 0.12425142669677734, 0.12434893035888672, 0.12340860748291016, 0.12508489227294922, 0.1239491195678711, 0.12345747375488281, 0.12390684509277344, 0.12374752044677734, 0.1240274887084961, 0.12358882904052734, 0.1234466552734375, 0.12330457305908203, 0.12432921600341797, 0.12314630126953124, 0.12357907104492187, 0.12333609771728515, 0.1264258575439453, 0.12446070098876953, 0.12302150726318359, 0.1234208984375, 0.12340713500976562, 0.12406681823730469, 0.12355548858642579, 0.12378556823730469, 0.12331021118164062, 0.12378125, 0.124395263671875, 0.12336537933349609, 0.12380364990234374, 0.12350669097900391, 0.12409196472167969, 0.12413292694091797, 0.12490636444091797, 0.1274571533203125, 0.12418675231933594, 0.12371513366699219, 0.12341907501220703, 0.12343705749511719, 0.12365526580810547, 0.12416502380371094, 0.12362342071533203, 0.12326044464111328, 0.12382051086425781, 0.12324658966064453, 0.12350035095214844, 0.1231976318359375, 0.12362739562988281, 0.12335020446777344, 0.12350899505615234, 0.124275390625, 0.12361122894287109, 0.12332144165039062, 0.12318931579589844, 0.12365634918212891, 0.12347270202636719, 0.12310095977783203, 0.12356813049316406, 0.12438937377929687, 0.12303564453125, 0.13251280212402344, 0.12298649597167968, 0.12292915344238281, 0.12268544006347656, 0.1232691192626953, 0.12304930877685546, 0.1232093734741211, 0.12311974334716796, 0.12311366271972657, 0.1226943359375, 0.12283289337158203, 0.12248268890380859, 0.12295507049560547, 0.12332275390625, 0.12304621124267578, 0.12496291351318359, 0.12312156677246094, 0.12358844757080079, 0.12334102630615235, 0.1232956771850586, 0.1233469467163086, 0.12350054168701172, 0.12327935791015625, 0.12360256195068359, 0.12419625854492188, 0.12353020477294922, 0.12349440002441406, 0.12319753265380859, 0.12324009704589843, 0.12407014465332031, 0.12304383850097657, 0.12353475189208984, 0.1231319351196289, 0.12292940521240234, 0.1234373779296875, 0.12328726196289062, 0.12316643524169922, 0.1259812469482422, 0.12392867279052734, 0.12388086700439453, 0.123736572265625, 0.12371942138671875, 0.12412735748291015, 0.12352252960205078, 0.12378179168701171, 0.12376863861083984, 0.1243872299194336, 0.12375206756591797, 0.12351299285888671, 0.12358617401123047, 0.1237259521484375, 0.12382080078125, 0.12389155578613281, 0.1258183059692383, 0.1254898910522461, 0.12413951873779297, 0.12418863677978516, 0.12328553771972656, 0.12362953948974609, 0.12402486419677734, 0.123430908203125, 0.12285132598876954, 0.12367203521728516, 0.12371932983398437, 0.12357657623291016, 0.1236165771484375, 0.12312397003173828, 0.12252381134033204, 0.1232694091796875, 0.12373993682861328, 0.12368508911132813, 0.12353343963623047, 0.12318502044677734, 0.12379923248291015, 0.12315606689453125, 0.12457004547119141, 0.12338579559326172, 0.12331193542480469, 0.12423014068603516, 0.12645938873291016, 0.1233675537109375, 0.12280780792236329, 0.12325497436523437, 0.12314281463623047, 0.123482177734375, 0.12379872131347656, 0.12335801696777343, 0.12350227355957032, 0.12340870666503906, 0.12329574584960938, 0.12344255828857421, 0.12311542510986329, 0.12368073272705078, 0.12298111724853515, 0.1234554901123047, 0.1261563491821289, 0.12361062622070312, 0.12364588928222656, 0.12405862426757812, 0.12364720153808594, 0.12324508666992187, 0.12501814270019532, 0.12405753326416015, 0.12442447662353516, 0.12534957122802734, 0.12401625823974609, 0.12371218872070312, 0.12331622314453125, 0.12409235382080078, 0.12387538909912109, 0.12350669097900391, 0.12358246612548827, 0.12386099243164063, 0.12372787475585938, 0.12357017517089844, 0.12647014617919922, 0.1241190414428711, 0.12846284484863282, 0.12413337707519531, 0.12412723541259765, 0.12471234893798828, 0.12429488372802734, 0.12442918395996094, 0.12443590545654297, 0.12417081451416015, 0.12444892883300782, 0.12510128021240235]",tokens/s,8.068435870818936,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 1047, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 890, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 366, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 1047, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 890, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 366, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,945.692672,2820.538368,0.0,2418.016256,2409.87136,s,1,11.5556484375,11.5556484375,0.0,11.5556484375,11.5556484375,11.5556484375,11.5556484375,[11.5556484375],,kWh,8.662707092083792e-05,9.548025514360794e-06,2.7874188966003216e-05,0.00012404928540120193,,MB,1494.601728,3007.184896,0.0,2589.98272,2552.616448,s,10,3.076612457275391,0.30766124572753906,0.0007127783532832162,0.30770518493652343,0.30848175048828125,0.3086775817871094,0.3088342468261719,"[0.3068743896484375, 0.3068748779296875, 0.30785513305664064, 0.30755523681640623, 0.30806768798828127, 0.3072139892578125, 0.3088734130859375, 0.30821990966796875, 0.30663958740234376, 0.308438232421875]",tokens/s,832.0840000326541,kWh,9.14059669414057e-06,1.0080443790828132e-06,6.096862516374976e-06,1.624550358959836e-05,tokens/kWh,15758206.483910488,MB,1495.32672,3036.545024,0.0,2619.342848,2559.606272,s,10,42.8304765625,4.28304765625,0.018074607873886598,4.27612353515625,4.308916210937499,4.3155528320312495,4.320862128906249,"[4.322189453125, 4.30744140625, 4.29376171875, 4.27714892578125, 4.27509814453125, 4.27111376953125, 4.281681640625, 4.27269921875, 4.26433056640625, 4.26501171875]",tokens/s,14.709152233706249,kWh,0.00012490427632502637,1.3777420492577546e-05,6.0148329368625114e-05,0.00019883002618622904,tokens/kWh,316853.5517919847,,s,630,42.82889181518554,0.06798236796061197,0.000884476628352267,0.06788801574707032,0.06874802322387695,0.06919595909118652,0.07139528945922852,"[0.0696657943725586, 0.06823487854003907, 0.06771750640869141, 0.0689623031616211, 0.06920105743408203, 0.06902169799804687, 0.0683221435546875, 0.06883433532714844, 0.06821737670898438, 0.06827804565429688, 0.06820496368408203, 0.06828166198730469, 0.06769145965576172, 0.06874726104736328, 0.06866534423828125, 0.06847078704833984, 0.06877798461914063, 0.06857318115234375, 0.0681553955078125, 0.0681816635131836, 0.06822537231445312, 0.06834166717529297, 0.06847232055664063, 0.06792253112792969, 0.06769657897949219, 0.0680830078125, 0.06798131561279297, 0.06833638763427734, 0.06796492767333985, 0.06859161376953125, 0.06849906921386718, 0.06921254730224609, 0.06799565124511718, 0.068392578125, 0.07015257263183594, 0.06972415924072266, 0.06885785675048828, 0.06808313751220703, 0.06790630340576172, 0.06823916625976563, 0.06824678039550781, 0.06839091491699219, 0.06867225646972656, 0.0687267837524414, 0.06833561706542969, 0.06881279754638672, 0.06823117065429687, 0.06871449279785156, 0.06881190490722656, 0.06817817687988281, 0.06809049224853515, 0.06922137451171875, 0.07840393829345703, 0.06862095642089844, 0.06850899505615235, 0.06833392333984376, 0.06819055938720703, 0.06794409942626953, 0.06785673522949219, 0.0682239990234375, 0.0679141082763672, 0.06804544067382813, 0.06866534423828125, 0.06889676666259766, 0.068421630859375, 0.06835529327392578, 0.06822582244873047, 0.06847020721435547, 0.06851846313476563, 0.06882303619384765, 0.06843328094482422, 0.0680630111694336, 0.0682127685546875, 0.06809683227539062, 0.06838854217529297, 0.06857472229003907, 0.06864361572265625, 0.06806495666503906, 0.06829430389404297, 0.06844483184814452, 0.06858879852294922, 0.06791999816894531, 0.06754780578613281, 0.06821587371826172, 0.07165609741210938, 0.06835433959960938, 0.06822502136230468, 0.06810173034667968, 0.06823056030273437, 0.06765366363525391, 0.06837347412109375, 0.06821196746826172, 0.06973312377929687, 0.06824960327148437, 0.06825727844238282, 0.06794700622558594, 0.06890262603759766, 0.06778089904785156, 0.06785433959960938, 0.0685068817138672, 0.06853024291992188, 0.06808585357666015, 0.06834236907958985, 0.06853836822509765, 0.06810963439941406, 0.06795334625244141, 0.06871449279785156, 0.06898416137695312, 0.06772342681884766, 0.06769110107421875, 0.06790464019775391, 0.06772611236572265, 0.06803852844238281, 0.06741788482666015, 0.06726604461669922, 0.07037564849853516, 0.06853443145751953, 0.0684200668334961, 0.06801622772216796, 0.0680278091430664, 0.06794601440429687, 0.0695440673828125, 0.06829142761230468, 0.06874428558349609, 0.06884793853759766, 0.06826649475097656, 0.0697580795288086, 0.06921920013427735, 0.06905241394042969, 0.06904444885253906, 0.06819612884521484, 0.06840697479248047, 0.06823763275146484, 0.06853632354736328, 0.06838272094726562, 0.06889676666259766, 0.06876774597167969, 0.0682015380859375, 0.068583740234375, 0.06841139221191406, 0.07049868774414063, 0.06917068481445313, 0.06885359954833985, 0.0696493148803711, 0.06916476440429688, 0.06862448120117187, 0.06799993896484376, 0.06833487701416016, 0.06792265319824219, 0.06812671661376953, 0.06803660583496093, 0.06816563415527344, 0.06860969543457031, 0.06831958770751953, 0.06687744140625, 0.06741356658935548, 0.06800224304199219, 0.06833152008056641, 0.06753689575195312, 0.06734585571289063, 0.06755490875244141, 0.06748806762695313, 0.06765020751953126, 0.0675031967163086, 0.0672685775756836, 0.06748595428466797, 0.06716700744628906, 0.06759410858154297, 0.06822300720214844, 0.06750985717773438, 0.06788137817382812, 0.06888243103027344, 0.0687083511352539, 0.06854208374023438, 0.06798169708251953, 0.0678536605834961, 0.06843682861328125, 0.0676220474243164, 0.06729590606689453, 0.06727680206298828, 0.07142400360107422, 0.06736077117919922, 0.06727782440185547, 0.06690006256103516, 0.06668994903564453, 0.06698598480224609, 0.06755718231201172, 0.0673589096069336, 0.06744678497314453, 0.06916118621826171, 0.06795037078857422, 0.06795462036132813, 0.06812025451660156, 0.06759779357910156, 0.06697615814208985, 0.06732412719726563, 0.06778498840332031, 0.06763212585449219, 0.06815872192382813, 0.06741532897949219, 0.06795132446289062, 0.06768614196777344, 0.06734188842773438, 0.06705606079101563, 0.06738262176513672, 0.0673511962890625, 0.06735215759277344, 0.06810374450683594, 0.0675145263671875, 0.06739043426513672, 0.06771478271484375, 0.06753485107421875, 0.06765286254882813, 0.06844287872314453, 0.06831513977050781, 0.06764134216308594, 0.06798540496826172, 0.06823458862304688, 0.0679837417602539, 0.06829698944091797, 0.06837395477294922, 0.0674144287109375, 0.06794870758056641, 0.07183769226074219, 0.0676754913330078, 0.06743106842041016, 0.067297119140625, 0.06814940643310546, 0.06899507141113281, 0.068114013671875, 0.06924534606933594, 0.06834585571289062, 0.06785638427734375, 0.0676016616821289, 0.06775862121582031, 0.06725846099853515, 0.06709616088867187, 0.06762140655517578, 0.0674897918701172, 0.06795980834960938, 0.06740275573730468, 0.06768611145019532, 0.06789097595214844, 0.06847539520263672, 0.06857663726806641, 0.067744384765625, 0.06780210876464844, 0.06779510498046876, 0.0679241943359375, 0.06775638580322266, 0.06780854034423828, 0.06766019439697266, 0.06879881286621094, 0.06801219177246094, 0.06840099334716797, 0.06811033630371094, 0.06767318725585937, 0.0679677734375, 0.06826166534423828, 0.0677801284790039, 0.06731766510009765, 0.0673821792602539, 0.06737920379638672, 0.06762249755859374, 0.06725408172607422, 0.06865161895751953, 0.0679188461303711, 0.0678920669555664, 0.06788224029541015, 0.06804777526855468, 0.06764876556396485, 0.06761138916015624, 0.06795673370361328, 0.0678071060180664, 0.06847090911865235, 0.06855999755859375, 0.06848601531982422, 0.06798336029052734, 0.068136962890625, 0.06959308624267578, 0.0689623031616211, 0.06802022552490235, 0.06854246520996093, 0.06831027221679688, 0.06838278198242187, 0.0680250244140625, 0.06751792144775391, 0.0669742431640625, 0.06813286590576172, 0.06788076782226563, 0.06751865386962891, 0.0677949447631836, 0.06770893096923829, 0.0672911376953125, 0.06696959686279297, 0.0670696029663086, 0.06734819030761718, 0.06787955474853516, 0.06879420471191407, 0.06768608093261719, 0.06779542541503907, 0.06742947387695312, 0.06724495697021485, 0.0674672622680664, 0.067106689453125, 0.06703084564208985, 0.06749971008300781, 0.06717298889160156, 0.06702489471435547, 0.06814447784423829, 0.06815724945068359, 0.06796278381347656, 0.06807647705078125, 0.06779872131347656, 0.06759986877441407, 0.06880665588378906, 0.06830284881591797, 0.06760371398925781, 0.06767282867431641, 0.06745475006103516, 0.06814505767822265, 0.06797071838378907, 0.06951593780517579, 0.06935139465332031, 0.06819967651367187, 0.0682237777709961, 0.06760652923583985, 0.06754713439941407, 0.06721331024169921, 0.066825439453125, 0.06742095947265625, 0.06754508972167969, 0.06726451110839844, 0.06723971557617188, 0.06779312133789063, 0.06706172943115235, 0.06701222229003906, 0.06777718353271485, 0.06798655700683594, 0.06770921325683593, 0.06767855834960937, 0.06800137329101562, 0.06760697937011718, 0.06741094207763672, 0.067642333984375, 0.06736486053466798, 0.06730281829833984, 0.06716851043701172, 0.06706985473632812, 0.06739318084716797, 0.0676646728515625, 0.06707389068603516, 0.06727286529541016, 0.06733382415771484, 0.06760249328613281, 0.06828844451904297, 0.06875577545166016, 0.06802358245849609, 0.06792880249023438, 0.06716751861572266, 0.06749433898925782, 0.06929436492919921, 0.06946415710449219, 0.06753628540039062, 0.0685032958984375, 0.06746396636962891, 0.06773292541503906, 0.06736271667480469, 0.06773426818847657, 0.06880857849121094, 0.06801817321777344, 0.06824249267578125, 0.0679986572265625, 0.06766796875, 0.06813629150390625, 0.06818380737304687, 0.0673453140258789, 0.06697551727294922, 0.06868268585205078, 0.0680318374633789, 0.06771123504638672, 0.06809241485595703, 0.06783785247802734, 0.0675225601196289, 0.06844175720214844, 0.06857469177246094, 0.06755971527099609, 0.06979046630859374, 0.06797510528564453, 0.06795254516601562, 0.06780636596679687, 0.06783270263671876, 0.06769865417480468, 0.06823251342773437, 0.06808470153808593, 0.06796669006347657, 0.06822502136230468, 0.06784210968017577, 0.0677188491821289, 0.06770867156982421, 0.06825830078125, 0.067561279296875, 0.06806073760986328, 0.06837721252441406, 0.06839910125732422, 0.06803043365478516, 0.06785641479492187, 0.06756873321533204, 0.06792079925537109, 0.06766182708740234, 0.06767955017089844, 0.06830150604248048, 0.06859571075439454, 0.06840684509277344, 0.06806915283203124, 0.0675535659790039, 0.06791001892089844, 0.06996377563476562, 0.06855474853515625, 0.06741980743408203, 0.06793046569824218, 0.06829023742675781, 0.068299072265625, 0.06830079650878906, 0.06763072204589844, 0.06771750640869141, 0.06762905883789062, 0.06771910095214843, 0.06763321685791016, 0.06790758514404296, 0.06732972717285156, 0.06746083068847657, 0.06735107421875, 0.06740383911132812, 0.06760243225097656, 0.06745702362060547, 0.06766796875, 0.06788505554199219, 0.06734786987304688, 0.06731590270996093, 0.06819267272949218, 0.06855551910400391, 0.06819020843505859, 0.06866534423828125, 0.06820044708251953, 0.06795442962646485, 0.06894207763671875, 0.06791686248779297, 0.067621826171875, 0.06750969696044921, 0.0672359390258789, 0.06742473602294922, 0.06799504089355468, 0.0681233901977539, 0.06852796936035156, 0.06759423828125, 0.06782498931884766, 0.06765033721923829, 0.06776156616210938, 0.06753478240966797, 0.0678364486694336, 0.07072940826416016, 0.06812246704101563, 0.06785072326660156, 0.06829625701904297, 0.06802067565917969, 0.06754096221923828, 0.06752159881591797, 0.068171875, 0.06764224243164063, 0.06722700500488281, 0.06696614074707032, 0.0677427215576172, 0.067532958984375, 0.06847068786621094, 0.06795359802246094, 0.06753689575195312, 0.06781536102294922, 0.06786463928222657, 0.06743161773681641, 0.06732851409912109, 0.06731961822509766, 0.06723225402832031, 0.06761993408203125, 0.06783683013916016, 0.06713139343261719, 0.06701465606689454, 0.06694086456298828, 0.06722566223144531, 0.06717440032958985, 0.06760562896728516, 0.06753475189208985, 0.06811465454101563, 0.06774038696289063, 0.06744477081298828, 0.06833904266357421, 0.06808777618408203, 0.06695597076416016, 0.06711500549316406, 0.06711090850830079, 0.07072930908203125, 0.06717011260986328, 0.06767881774902344, 0.06858681488037109, 0.06871858978271485, 0.06810361480712891, 0.06739990234375, 0.06713945770263671, 0.06717282867431641, 0.06753279876708984, 0.06760012817382813, 0.06738150024414062, 0.06683462524414062, 0.06696454620361328, 0.06718745422363281, 0.0685137939453125, 0.06736691284179687, 0.06809302520751953, 0.06736579132080078, 0.06768434906005859, 0.06704512023925781, 0.06684902191162109, 0.06683647918701172, 0.06690406036376953, 0.06667862701416015, 0.07220764923095703, 0.06768646240234374, 0.0682074203491211, 0.06738534545898438, 0.06772736358642578, 0.06792310333251952, 0.06789356994628906, 0.06775775909423828, 0.06787363433837891, 0.06780048370361329, 0.06775049591064453, 0.06779084777832031, 0.06746931457519531, 0.06788435363769531, 0.06755174255371094, 0.067342529296875, 0.06742015838623047, 0.06865715026855469, 0.06736006164550781, 0.06795549011230469, 0.06754499053955078, 0.0670512924194336, 0.06712137603759766, 0.06728498840332031, 0.06712934112548828, 0.06728224182128906, 0.06765843200683594, 0.06803865814208984, 0.06763455963134765, 0.06708697509765625, 0.06918972778320312, 0.06857068634033203, 0.06790589141845703, 0.06726451110839844, 0.06807689666748047, 0.06790825653076171, 0.06794796752929687, 0.06789955139160156, 0.0679501724243164, 0.06742918395996093, 0.06793011474609376, 0.06725222778320313, 0.06977289581298828, 0.068370849609375, 0.06816563415527344, 0.0684031982421875, 0.0687548828125, 0.0677320327758789, 0.06789507293701172, 0.06725142669677735, 0.06715923309326172, 0.06691001892089844, 0.06661734771728516, 0.06707536315917968, 0.06670569610595703, 0.06700006103515625, 0.06743106842041016, 0.0678543701171875, 0.06733401489257812, 0.06797456359863281, 0.06738915252685547, 0.06732851409912109, 0.0673097915649414, 0.06774710083007812, 0.06663270568847657, 0.06688768005371094, 0.07215615844726563, 0.06837760162353515, 0.06795613098144532, 0.06677696228027344, 0.06652528381347657, 0.06678297424316407, 0.06738623809814454, 0.06722764587402344, 0.07132498931884766, 0.07444960021972656, 0.06736595153808594, 0.0670638427734375, 0.06802114868164062, 0.06754863739013672, 0.06749443054199218, 0.067302978515625, 0.06712774658203124, 0.06735443115234375, 0.0670537567138672, 0.06704537963867188, 0.06725577545166016, 0.06749183654785157, 0.06726614379882813, 0.06734124755859375, 0.06848886108398437, 0.06773795318603516, 0.0677042236328125, 0.06753890991210937, 0.06716889953613281, 0.06700851440429688, 0.06709657287597656, 0.06730879974365235, 0.06732828521728515, 0.06746364593505859, 0.0670999984741211, 0.06693341064453125, 0.06716825866699219, 0.06767932891845703, 0.06773145294189453]",tokens/s,14.709696499236184,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,954.937344,2820.538368,0.0,2418.016256,2409.87136,s,1,11.5264013671875,11.5264013671875,0.0,11.5264013671875,11.5264013671875,11.5264013671875,11.5264013671875,[11.5264013671875],,kWh,8.59012549499937e-05,9.468353865499404e-06,2.6923632650003443e-05,0.00012229324146549654,,MB,1531.691008,3007.184896,0.0,2589.98272,2552.699392,s,10,3.1151042785644534,0.3115104278564453,0.0006302461893467508,0.3116200714111328,0.3121025390625,0.31226239013671875,0.31239027099609373,"[0.312026123046875, 0.31075717163085936, 0.31139678955078126, 0.31027349853515623, 0.31186532592773436, 0.3113985595703125, 0.3124222412109375, 0.3118415832519531, 0.3120670166015625, 0.31105596923828127]",tokens/s,821.8023446649228,kWh,9.267778554817535e-06,1.0218940522463853e-06,6.1731212579375526e-06,1.6462793865001472e-05,tokens/kWh,15550215.965725882,MB,1551.622144,3036.545024,0.0,2619.342848,2559.606272,s,10,44.61129541015625,4.461129541015625,0.01912781385284917,4.46366455078125,4.477728955078125,4.478954809570313,4.479935493164063,"[4.47095361328125, 4.4629833984375, 4.4538955078125, 4.4801806640625, 4.46349658203125, 4.46383251953125, 4.47745654296875, 4.468140625, 4.46226953125, 4.40808642578125]",tokens/s,14.121983999966377,kWh,0.0001299898632580996,1.4338557991189313e-05,6.222276679206256e-05,0.00020655118804135147,tokens/kWh,305009.1388841948,,s,630,44.609716148376485,0.07080907325139121,0.0009485237101360463,0.07067668914794922,0.07149573287963867,0.0722039825439453,0.0740512858581543,"[0.07157260894775391, 0.0708453140258789, 0.07059455871582031, 0.07031094360351563, 0.07031001281738282, 0.0731759033203125, 0.071272705078125, 0.07090163421630859, 0.07080723571777343, 0.07145465850830078, 0.07043004608154296, 0.07032514953613281, 0.07055974578857421, 0.07100825500488281, 0.07076659393310547, 0.07031603240966797, 0.07070310211181641, 0.07032195281982422, 0.07043238067626953, 0.07075081634521484, 0.07054534149169922, 0.07054956817626953, 0.07080754852294922, 0.07451551818847656, 0.0713430404663086, 0.0714260482788086, 0.07180035400390625, 0.07117667388916016, 0.0710307846069336, 0.07073923492431641, 0.07095369720458984, 0.07062242889404297, 0.07049910736083985, 0.0706495361328125, 0.07046979522705078, 0.07032579040527344, 0.07035478210449218, 0.07084931182861329, 0.07053107452392578, 0.07071949005126953, 0.07114281463623047, 0.07076505279541015, 0.07201334381103516, 0.07090438079833984, 0.0707391357421875, 0.07066912078857422, 0.07062937927246093, 0.07071539306640626, 0.07059458923339844, 0.07067030334472656, 0.07073769378662109, 0.07099987030029296, 0.0705700454711914, 0.07099427032470704, 0.07262617492675781, 0.07067443084716797, 0.07053721618652344, 0.07370751953125, 0.0714260482788086, 0.07106150054931641, 0.07078092956542968, 0.07067411041259766, 0.07039379119873047, 0.07227881622314453, 0.07139328002929687, 0.07068780517578124, 0.07074201965332032, 0.07148639678955078, 0.07085260772705078, 0.07070105743408203, 0.07068441772460937, 0.07048831939697266, 0.07041372680664063, 0.07054972839355468, 0.07030413055419922, 0.07041027069091797, 0.07058633422851562, 0.0705249252319336, 0.07088742065429687, 0.07051385498046875, 0.07053942108154297, 0.0705478744506836, 0.07096959686279297, 0.07104278564453125, 0.07056143951416016, 0.07063001251220703, 0.07056787109375, 0.07038572692871094, 0.07099129486083984, 0.07074230194091796, 0.07062556457519531, 0.07091302490234375, 0.07091506958007812, 0.07116566467285156, 0.07098716735839844, 0.07116889953613281, 0.07118144226074219, 0.07194854736328125, 0.07063123321533203, 0.07029638671875, 0.07055888366699219, 0.07023228454589844, 0.07104370880126953, 0.0709775390625, 0.07068406677246093, 0.07124848175048829, 0.07058211517333984, 0.07135254669189453, 0.0707685775756836, 0.07136991882324219, 0.07117852783203125, 0.07225398254394531, 0.07076044464111328, 0.07027507019042968, 0.07055699157714844, 0.07101091003417968, 0.07065814208984375, 0.07053337860107421, 0.07091747283935547, 0.07067689514160157, 0.07047987365722656, 0.07168553924560547, 0.07095356750488281, 0.07093452453613282, 0.07035494232177734, 0.07045906829833984, 0.07184796905517578, 0.07166345977783203, 0.07145670318603516, 0.07157190704345703, 0.07086870574951172, 0.07054000091552734, 0.07068070220947266, 0.07038114929199218, 0.0705244140625, 0.0704576644897461, 0.07055830383300782, 0.07255609893798828, 0.07091814422607422, 0.07073123168945313, 0.07054208374023438, 0.07037564849853516, 0.07041766357421875, 0.07066659545898438, 0.07061135864257813, 0.07067648315429688, 0.07033036804199219, 0.07062528228759765, 0.07041961669921876, 0.07020979309082032, 0.07038953399658203, 0.0704228515625, 0.07026329803466796, 0.07037712097167968, 0.0701069107055664, 0.07025737762451172, 0.07408211517333985, 0.07129856109619141, 0.07063606262207031, 0.07053107452392578, 0.07038976287841797, 0.07004153442382813, 0.07061286163330079, 0.07024864196777343, 0.07015424346923828, 0.0706355209350586, 0.07030374145507813, 0.07027513885498046, 0.07008419036865235, 0.07012387084960937, 0.07007846069335938, 0.07126630401611328, 0.07011737823486328, 0.07053836822509765, 0.07060364532470703, 0.07026687622070313, 0.070061279296875, 0.07017263793945312, 0.07012438201904297, 0.07038358306884765, 0.07091993713378907, 0.07048534393310547, 0.0702957763671875, 0.07131410980224609, 0.07160643005371094, 0.07170646667480468, 0.07042160034179687, 0.0720040283203125, 0.07049478149414062, 0.07159196472167968, 0.07637763214111327, 0.0713138885498047, 0.07094271850585937, 0.07149568176269532, 0.0712273941040039, 0.07155097961425781, 0.07126387023925781, 0.0733880615234375, 0.07086262512207031, 0.07076278686523438, 0.07045967864990234, 0.07046348571777344, 0.07082803344726563, 0.07093862152099609, 0.0709222412109375, 0.07097958374023437, 0.07082598114013672, 0.07065395355224609, 0.0703795166015625, 0.0711529312133789, 0.0706171875, 0.07050508880615235, 0.07073145294189453, 0.07071469116210938, 0.07042054748535156, 0.07066233825683593, 0.07070294189453125, 0.07022828674316406, 0.07075202941894532, 0.07128352355957031, 0.07084579467773437, 0.07127721405029297, 0.07115744018554687, 0.0719195556640625, 0.07361164855957031, 0.0711776351928711, 0.0707221450805664, 0.0711119384765625, 0.07057689666748047, 0.07086077117919921, 0.07047376251220704, 0.07037519836425782, 0.07082006072998047, 0.07067017364501953, 0.07090191650390625, 0.07146268463134765, 0.07116143798828126, 0.07124163055419921, 0.07093321228027344, 0.07118643188476563, 0.07102873229980469, 0.07088457489013672, 0.0708511962890625, 0.07313737487792969, 0.07087315368652344, 0.0704211196899414, 0.07059257507324218, 0.07097977447509765, 0.07121004486083984, 0.07112528228759765, 0.0707460479736328, 0.07068252563476562, 0.0714961929321289, 0.07069280242919922, 0.07070662689208984, 0.07064236450195313, 0.07067401885986328, 0.07060012817382813, 0.07131565093994141, 0.07183010864257812, 0.07131488037109375, 0.07240985870361329, 0.07145881652832031, 0.07087471771240235, 0.07079955291748047, 0.07076617431640625, 0.07033510589599609, 0.07024569702148438, 0.0704334716796875, 0.07087104034423829, 0.07232032012939453, 0.07121376037597656, 0.07111494445800781, 0.07110208129882813, 0.07049030303955078, 0.070901123046875, 0.07070697784423828, 0.07067485046386719, 0.07045574188232422, 0.07070697784423828, 0.0705970230102539, 0.07045101165771485, 0.07080550384521485, 0.07061913299560547, 0.07072140502929687, 0.07112921905517579, 0.07059574127197266, 0.07232189178466797, 0.07512064361572265, 0.0726302719116211, 0.070897216796875, 0.07103324890136718, 0.07036112213134765, 0.07025606536865234, 0.07067279815673828, 0.07037286376953125, 0.07013238525390625, 0.07019725036621094, 0.07005753326416016, 0.07015449523925782, 0.06996956634521484, 0.07029814147949219, 0.07055506896972656, 0.07038614654541016, 0.07078511810302734, 0.07090300750732421, 0.07044172668457031, 0.0706494369506836, 0.07118892669677734, 0.07018637084960938, 0.07030777740478515, 0.07029625701904296, 0.07039750671386719, 0.07042630767822265, 0.07025536346435547, 0.07223228454589843, 0.0714533462524414, 0.07152188873291015, 0.07310934448242187, 0.07055980682373048, 0.07025452423095703, 0.07033094024658203, 0.0702525405883789, 0.0700964126586914, 0.07039842987060548, 0.07182733154296875, 0.07131763458251954, 0.07211993408203125, 0.07082415771484375, 0.07103913879394531, 0.07054745483398438, 0.07051987457275391, 0.07095999908447266, 0.07054547119140625, 0.07051673889160157, 0.07051264190673828, 0.07067759704589843, 0.07023709106445312, 0.07222886657714844, 0.07096028900146484, 0.07046761322021484, 0.07174428558349609, 0.07115705871582031, 0.07105638122558594, 0.07028809356689453, 0.07030067443847657, 0.07040614318847656, 0.07103282928466798, 0.07040764617919922, 0.07027561950683593, 0.07007622528076173, 0.07013190460205078, 0.07004112243652344, 0.07295433807373047, 0.07018921661376953, 0.07028924560546874, 0.0709775390625, 0.0707747802734375, 0.07144818878173828, 0.07064409637451172, 0.07058425903320313, 0.0708608627319336, 0.06997401428222656, 0.0700572509765625, 0.07020003509521484, 0.0701493148803711, 0.06998099517822266, 0.07096902465820312, 0.07040646362304688, 0.0706170883178711, 0.07119257354736327, 0.07080271911621094, 0.07068131256103516, 0.07079730987548828, 0.0708438720703125, 0.07387190246582032, 0.07112322998046874, 0.0708641586303711, 0.07189516448974609, 0.07116239929199218, 0.07102668762207032, 0.07129702758789062, 0.07098982238769531, 0.07071295928955078, 0.07122777557373047, 0.07060275268554687, 0.08153228759765625, 0.07071798706054687, 0.07062857818603516, 0.07044953918457031, 0.0709043197631836, 0.07170047760009765, 0.07153049468994141, 0.07217356872558593, 0.0713809585571289, 0.0712166748046875, 0.07104505920410156, 0.07058819580078125, 0.07045609283447266, 0.07060479736328125, 0.07032012939453125, 0.07080960083007813, 0.0739758071899414, 0.07184384155273438, 0.0712069091796875, 0.07080953979492187, 0.07068678283691407, 0.07067443084716797, 0.07077500915527343, 0.0704837417602539, 0.07114937591552735, 0.07056403350830077, 0.07050473785400391, 0.07024972534179688, 0.07044758605957031, 0.07039794921875, 0.07037337493896484, 0.07068672180175781, 0.07056588745117187, 0.07133961486816406, 0.07058882904052734, 0.07465164947509766, 0.07066214752197265, 0.07050035095214843, 0.07073760223388671, 0.07059471893310547, 0.07064745330810547, 0.07057603454589843, 0.0703821792602539, 0.07046348571777344, 0.07027632141113281, 0.07038646697998047, 0.07037462615966797, 0.0700498275756836, 0.07040019226074219, 0.07025312042236329, 0.07048397064208985, 0.07105554962158203, 0.07065580749511718, 0.07120416259765625, 0.07064851379394531, 0.07125257873535157, 0.07059270477294922, 0.07131270599365234, 0.07087801361083984, 0.07096662139892577, 0.07096387481689453, 0.07225920104980468, 0.07113286590576172, 0.07116255950927734, 0.07356211090087891, 0.07143360137939453, 0.07075904083251953, 0.07054541015625, 0.07057539367675782, 0.07043759918212891, 0.07051033782958985, 0.0712685775756836, 0.07156329345703125, 0.071329345703125, 0.07101827239990234, 0.07089161682128907, 0.0710959701538086, 0.0707265625, 0.07097686767578125, 0.07068534088134766, 0.07073149108886718, 0.07077276611328125, 0.07077689361572266, 0.07038595581054688, 0.0705983657836914, 0.07119481658935548, 0.07062470245361328, 0.071150146484375, 0.07084031677246094, 0.07074201965332032, 0.07118643188476563, 0.0709713897705078, 0.07123763275146484, 0.07107584381103516, 0.07089561462402344, 0.07055974578857421, 0.07028284454345703, 0.07034512329101562, 0.07018905639648437, 0.07126774597167969, 0.07123824310302734, 0.0712273941040039, 0.07169023895263672, 0.07133602905273438, 0.07074806213378906, 0.07052729797363282, 0.07132947540283203, 0.07066809844970703, 0.07062937927246093, 0.0704493408203125, 0.07010918426513672, 0.07043071746826172, 0.07018879699707031, 0.07125222778320313, 0.07064070129394531, 0.070425537109375, 0.07098473358154297, 0.07038050842285157, 0.07279821014404297, 0.07120822143554688, 0.07089225769042969, 0.07146025848388672, 0.07183424377441407, 0.07103404998779297, 0.07121302032470703, 0.07069190216064453, 0.07074140930175782, 0.0705820770263672, 0.0707567367553711, 0.07087139129638671, 0.07103084564208985, 0.07081549072265625, 0.07087449645996094, 0.07057817840576172, 0.07041436767578126, 0.07058697509765625, 0.07073177337646484, 0.07274086761474609, 0.07161142730712891, 0.07178134155273437, 0.07135177612304687, 0.0705848617553711, 0.07079936218261719, 0.07061094665527344, 0.07092166137695312, 0.07044480133056641, 0.0710316162109375, 0.07093043518066407, 0.0725401611328125, 0.07140966033935547, 0.07147315216064454, 0.07091712188720703, 0.07084748840332031, 0.07054105377197266, 0.0709505615234375, 0.07059030151367188, 0.0710540771484375, 0.07058636474609375, 0.07121510314941407, 0.07062032318115234, 0.07054217529296875, 0.07097548675537109, 0.07069491577148437, 0.0707583999633789, 0.07071334075927735, 0.07099801635742188, 0.07071539306640626, 0.07140557098388672, 0.07828479766845703, 0.06939151763916016, 0.06933900451660156, 0.06905149078369141, 0.07000182342529297, 0.06942819213867188, 0.06931839752197265, 0.06952754974365234, 0.06945791625976562, 0.06919292449951171, 0.06895081329345704, 0.06980198669433593, 0.06890496063232422, 0.07020543670654297, 0.06998012542724609, 0.06955635070800781, 0.06932621002197266, 0.06949737548828125, 0.0695889892578125, 0.06934259033203125, 0.06952102661132813, 0.0699168930053711, 0.06937474822998047, 0.06918115234375, 0.06934761810302735, 0.07129110717773438, 0.07080732727050781, 0.06993836975097656, 0.0739428482055664, 0.07035596466064453, 0.06961949157714843, 0.0693229751586914, 0.07206297302246094, 0.06972140502929687, 0.06932313537597656, 0.0698616943359375, 0.07074610900878907, 0.07011328125, 0.0701168975830078, 0.07011341094970704, 0.07077855682373047, 0.06979398345947266, 0.06930889892578125, 0.06965366363525391, 0.06987862396240234, 0.0694780502319336, 0.06938358306884766, 0.06996665954589844, 0.06990850830078126, 0.07009030151367188, 0.07042691040039062, 0.07020550537109375, 0.07144876861572266, 0.06926544189453125, 0.06934233856201172, 0.0699721908569336, 0.07044979095458985, 0.06915071868896484, 0.06984508514404297, 0.06942915344238282, 0.06911795043945312, 0.06912521362304687, 0.06979373168945313, 0.06946428680419922, 0.07029827117919922, 0.07000045013427734, 0.06998454284667968, 0.07001907348632813, 0.0700513916015625, 0.06999407958984374, 0.07030646514892579, 0.07032643127441406, 0.07026815795898438, 0.07017142486572266, 0.06937395477294922, 0.06967910766601562]",tokens/s,14.122483942837828,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,5458.665472,3827.171328,0.0,3470.78656,3271.993344,s,1,17.578322265625,17.578322265625,0.0,17.578322265625,17.578322265625,17.578322265625,17.578322265625,[17.578322265625],,kWh,0.00025056304061250216,2.7625611464353347e-05,7.951700805799988e-05,0.0003577056601348554,,MB,2214.526976,3938.320384,0.0,3512.7296,3298.469376,s,10,1.2436092529296876,0.12436092529296876,0.0004623130413365534,0.12423958587646484,0.124963818359375,0.12509677352905274,0.12520313766479493,"[0.12445168304443359, 0.12493427276611328, 0.12423379516601563, 0.12383763122558594, 0.12414361572265625, 0.12522972869873047, 0.124748291015625, 0.12366226959228516, 0.12424537658691406, 0.12412258911132812]",tokens/s,2058.5244070588624,kWh,3.638853457253127e-06,4.0130096113827275e-07,1.806897878987668e-06,5.847052297379068e-06,tokens/kWh,43782745.04484107,MB,2214.526976,3967.680512,0.0,3542.089728,3284.027904,s,10,76.08017919921875,7.608017919921875,0.012148829474893288,7.607392822265625,7.6197572265625,7.625586132812501,7.6302492578125,"[7.61819970703125, 7.60814404296875, 7.6314150390625, 7.59806982421875, 7.603111328125, 7.6066416015625, 7.6184619140625, 7.6110927734375, 7.585412109375, 7.599630859375]",tokens/s,8.280737593300376,kWh,0.00022207051780191308,2.4495434589566528e-05,8.41763375742129e-05,0.0003307422899656925,tokens/kWh,190480.63072470992,,s,630,76.07854073333738,0.12075958846561492,0.0009932065926804643,0.12054016113281249,0.12163236465454101,0.12247278709411621,0.12436685081481934,"[0.12064153289794922, 0.12018482971191406, 0.11992793273925781, 0.11988467407226562, 0.12033580780029297, 0.12011577606201172, 0.120327392578125, 0.12063414764404297, 0.12326729583740234, 0.1208927001953125, 0.12049254608154297, 0.12035878753662109, 0.12022370910644531, 0.12012969970703125, 0.12144226837158204, 0.121183837890625, 0.12034867095947266, 0.12134349060058594, 0.12075094604492187, 0.12078908538818359, 0.1204695053100586, 0.12123526763916016, 0.12055683135986328, 0.12101840209960937, 0.12062809753417969, 0.12084591674804687, 0.12160860443115235, 0.12174336242675782, 0.12089548492431641, 0.12119007873535156, 0.12038694763183594, 0.12119276428222656, 0.1203689956665039, 0.12075087738037109, 0.12765798187255858, 0.12064559936523438, 0.12092991638183594, 0.12095436859130859, 0.1204008026123047, 0.1209788818359375, 0.12039430236816406, 0.12033583831787109, 0.12047379302978516, 0.12084259033203125, 0.12048550415039062, 0.12063318634033203, 0.1238861083984375, 0.1230285415649414, 0.1211954574584961, 0.12017779541015625, 0.12030390167236328, 0.12102454376220703, 0.12044486236572266, 0.12012921905517578, 0.12092066955566406, 0.1200643539428711, 0.12102652740478516, 0.1211371841430664, 0.12135993957519531, 0.12057234954833984, 0.12115507507324219, 0.1203921890258789, 0.12032614135742188, 0.122927490234375, 0.12047526550292968, 0.1209411849975586, 0.12062137603759765, 0.12159795379638672, 0.12110438537597656, 0.12065996551513672, 0.12051193237304687, 0.11990243530273438, 0.11980630493164063, 0.11936153411865234, 0.11976675415039062, 0.12025385284423828, 0.12033644866943359, 0.120019775390625, 0.11986124420166015, 0.12005375671386719, 0.1215836181640625, 0.12018073272705078, 0.1203978271484375, 0.11977942657470703, 0.12003673553466797, 0.12021199798583984, 0.120159423828125, 0.12102047729492188, 0.12104576110839843, 0.12018656158447266, 0.1305521240234375, 0.12062310028076172, 0.12035794830322266, 0.12068505859375, 0.12012793731689453, 0.12002674865722657, 0.12028864288330078, 0.12035584259033204, 0.11996160125732422, 0.12000592041015624, 0.12029615783691407, 0.1203620834350586, 0.12049910736083984, 0.12160176086425781, 0.12027677154541015, 0.1206830062866211, 0.12024543762207031, 0.12011808013916016, 0.12169420623779297, 0.12026802825927735, 0.12031158447265625, 0.1213224639892578, 0.12082713317871094, 0.1208901138305664, 0.12000665283203125, 0.12035884857177734, 0.1259950714111328, 0.12007424163818359, 0.12003942108154297, 0.12064972686767578, 0.1209150390625, 0.12083497619628907, 0.12080550384521484, 0.12055744171142578, 0.1217109146118164, 0.12082559967041015, 0.1212911376953125, 0.12215084838867188, 0.12227414703369141, 0.1218130874633789, 0.12171820831298828, 0.12218962860107421, 0.1210025634765625, 0.12132179260253906, 0.12072755432128907, 0.1211678695678711, 0.12035702514648437, 0.12162844848632813, 0.12044499206542969, 0.12034435272216797, 0.12037757110595704, 0.12030361938476562, 0.1204142074584961, 0.12105260467529297, 0.12237677001953125, 0.12152627563476562, 0.1212252197265625, 0.12107571411132813, 0.12060406494140626, 0.12037177276611329, 0.12073280334472657, 0.1229279022216797, 0.12100198364257812, 0.12090787506103516, 0.12103266906738282, 0.12102047729492188, 0.12056272125244141, 0.12066300964355468, 0.12108799743652343, 0.12144230651855469, 0.12123085021972656, 0.12278377532958984, 0.12242566680908203, 0.12158335876464844, 0.1209000015258789, 0.12158489227294922, 0.12069913482666016, 0.1206297607421875, 0.12187238311767579, 0.12403507232666015, 0.12079904174804687, 0.12294895935058593, 0.1219142074584961, 0.12018892669677735, 0.12056902313232422, 0.11995219421386719, 0.12081942749023437, 0.1208220443725586, 0.12101388549804687, 0.12086310577392578, 0.1205157470703125, 0.12011325073242188, 0.12017734527587891, 0.12004768371582031, 0.12080947113037109, 0.12047721862792969, 0.12085260772705078, 0.12075452423095703, 0.12073369598388672, 0.12087657928466797, 0.12045680236816406, 0.12036595153808594, 0.12185916900634766, 0.12112809753417969, 0.1204049301147461, 0.12130790710449219, 0.12152159881591797, 0.12419136047363281, 0.12021456146240235, 0.12015455627441406, 0.12076204681396484, 0.12034909057617188, 0.12086669158935547, 0.1208919677734375, 0.11996947479248046, 0.12037366485595703, 0.12022332763671875, 0.11998764801025391, 0.12032498931884765, 0.1230929946899414, 0.12094182586669922, 0.1203753890991211, 0.12070569610595704, 0.1201844482421875, 0.12042073822021485, 0.12023580932617188, 0.12042368316650391, 0.11993545532226563, 0.120799072265625, 0.12161196899414063, 0.12064822387695312, 0.12065968322753906, 0.11989174652099609, 0.119925537109375, 0.12009693145751953, 0.12027808380126953, 0.12228403472900391, 0.12103759765625, 0.12096636962890625, 0.12128966522216797, 0.12004557037353515, 0.12020735931396484, 0.11981613159179688, 0.12026016235351562, 0.12020787048339844, 0.12021984100341797, 0.12007766723632812, 0.12162079620361328, 0.1198675537109375, 0.12000800323486328, 0.11994924926757812, 0.12024089813232422, 0.1200923843383789, 0.12071878051757813, 0.12074070739746094, 0.12104908752441407, 0.12020326232910156, 0.12074188995361328, 0.1200650863647461, 0.11983353424072266, 0.12036688232421874, 0.11954796600341797, 0.12458972930908203, 0.12094550323486328, 0.12043395233154297, 0.12027772521972656, 0.1201739501953125, 0.12078697967529296, 0.1199656982421875, 0.1201219482421875, 0.12024422454833984, 0.12125593566894531, 0.1197485122680664, 0.11970089721679687, 0.1214013442993164, 0.12095148468017577, 0.12020735931396484, 0.12025392150878907, 0.12006454467773438, 0.1208832015991211, 0.12074188995361328, 0.12061491394042968, 0.12025411224365234, 0.11984111785888672, 0.1197875213623047, 0.1202627182006836, 0.12019296264648438, 0.12002275085449218, 0.12250105285644532, 0.1222757797241211, 0.12010934448242187, 0.12010857391357421, 0.12046601867675781, 0.12002902221679687, 0.11985043334960938, 0.12009673309326171, 0.12043670654296874, 0.12022806549072265, 0.12019321441650391, 0.12260562896728516, 0.1198609619140625, 0.12486422729492187, 0.12108480072021484, 0.1202050552368164, 0.12097142028808594, 0.12079638671875, 0.12076531219482421, 0.12013568115234376, 0.12098963165283202, 0.1203016357421875, 0.12101372528076172, 0.12036150360107421, 0.11963311767578125, 0.12019798278808594, 0.12101401519775391, 0.12010499572753906, 0.12035686492919923, 0.12024419403076173, 0.12331343841552735, 0.12139740753173828, 0.1208502426147461, 0.12074489593505859, 0.120700927734375, 0.12003327941894532, 0.1203768310546875, 0.12023372650146484, 0.12008956909179687, 0.1201891860961914, 0.12014937591552734, 0.11996403503417968, 0.12011302185058594, 0.11978765106201172, 0.12005977630615235, 0.11995142364501953, 0.12085664367675782, 0.1206987533569336, 0.12020697784423828, 0.12087551879882813, 0.12026675415039062, 0.12085609436035157, 0.12051708984375, 0.12006156921386718, 0.12018112182617187, 0.12021759796142578, 0.12139433288574218, 0.12085948944091797, 0.12054710388183594, 0.12108617401123047, 0.12037529754638672, 0.12015408325195312, 0.11970966339111327, 0.12063449859619141, 0.12089234924316407, 0.12104499053955078, 0.1204202880859375, 0.1206231689453125, 0.12070706939697265, 0.12095283508300782, 0.12054118347167969, 0.12083990478515624, 0.12032848358154297, 0.12041216278076172, 0.12084796905517578, 0.12068605041503906, 0.12077766418457031, 0.12051455688476563, 0.12017209625244141, 0.12076640319824218, 0.12113257598876953, 0.12090863800048827, 0.12281033325195312, 0.1231214370727539, 0.12089929962158204, 0.12110710144042969, 0.1209527359008789, 0.12109423828125, 0.12098252868652344, 0.12072022247314453, 0.12202614593505859, 0.1218314208984375, 0.12059645080566406, 0.12058386993408203, 0.12097980499267579, 0.12242329406738281, 0.12121820831298828, 0.12027970886230469, 0.12037149047851563, 0.12186962890625, 0.12166963195800781, 0.12209337615966796, 0.12074208068847657, 0.12243824005126953, 0.12101529693603516, 0.12050889587402344, 0.12093270111083984, 0.12405145263671875, 0.12086067199707032, 0.12072889709472656, 0.12224797058105469, 0.12326287841796875, 0.12167753601074219, 0.12122115325927735, 0.12079299163818359, 0.12077273559570313, 0.12024578857421875, 0.1206258544921875, 0.12111644744873047, 0.12032444763183593, 0.12048169708251953, 0.11998521423339843, 0.12070800018310547, 0.12071254730224609, 0.12041078186035156, 0.12070272064208984, 0.12084044647216798, 0.1213685760498047, 0.12128870391845703, 0.12095104217529297, 0.12113619232177734, 0.12120339202880859, 0.12046131134033203, 0.11999641418457031, 0.12143529510498047, 0.12344588470458985, 0.12077865600585938, 0.12054969787597657, 0.12278521728515625, 0.12025504302978515, 0.12090367889404297, 0.12009881591796875, 0.12034793853759766, 0.12011199951171875, 0.12163423919677735, 0.12011119842529297, 0.1198895034790039, 0.11989798736572266, 0.1204477767944336, 0.1201050262451172, 0.11968716430664063, 0.12065766143798828, 0.12023363494873048, 0.12001881408691406, 0.12003414154052734, 0.12026252746582031, 0.12088114929199219, 0.12160745239257813, 0.12114608001708985, 0.12011519622802734, 0.12094054412841797, 0.12128870391845703, 0.12104908752441407, 0.1216099853515625, 0.12326016235351563, 0.12096975708007812, 0.12045142364501953, 0.12064726257324218, 0.12056221008300781, 0.12036914825439453, 0.12051046752929688, 0.12053708648681641, 0.1203460464477539, 0.12021369934082031, 0.1203183364868164, 0.12223849487304687, 0.12146736145019531, 0.12133955383300782, 0.12074018859863281, 0.12088658905029297, 0.12094889831542968, 0.1211572494506836, 0.12098857879638672, 0.12128377532958984, 0.12074066925048828, 0.1216880645751953, 0.12084429168701172, 0.12184134674072265, 0.12093497467041016, 0.12115122985839843, 0.12152217864990235, 0.12060438537597656, 0.12270825958251953, 0.12156313323974609, 0.12124364471435548, 0.12099971008300782, 0.12086003112792969, 0.12153942108154298, 0.12080947113037109, 0.12089132690429688, 0.12072688293457032, 0.12077334594726563, 0.12063497924804688, 0.120934814453125, 0.12038044738769531, 0.1202984619140625, 0.1208705596923828, 0.11995958709716797, 0.11969773101806641, 0.12020851135253906, 0.12016278076171875, 0.12035724639892578, 0.11990223693847656, 0.12056934356689453, 0.11999897766113281, 0.11959862518310548, 0.12001241302490234, 0.11980271911621093, 0.11996979522705079, 0.12012748718261719, 0.12443852996826171, 0.11986534118652344, 0.12045295715332031, 0.12095708465576172, 0.11971311950683594, 0.11972822570800781, 0.11995238494873046, 0.12048604583740234, 0.12036930847167969, 0.11983942413330079, 0.12038329315185547, 0.12061666870117188, 0.12036246490478515, 0.1198912353515625, 0.1202930908203125, 0.12127187347412109, 0.12118624114990234, 0.12058191680908203, 0.12052313232421875, 0.12006537628173829, 0.11966905975341798, 0.11974671936035156, 0.12086265563964843, 0.12038396453857422, 0.12062528228759765, 0.12192972564697266, 0.12365984344482422, 0.12013203430175781, 0.12037036895751953, 0.11981273651123046, 0.12045331573486329, 0.11987558746337891, 0.11991817474365235, 0.12001459503173828, 0.12017664337158203, 0.12377155303955079, 0.1202135009765625, 0.1213966064453125, 0.1208326416015625, 0.12034636688232422, 0.12004991912841798, 0.12006748962402344, 0.1207548828125, 0.12023136138916016, 0.12071327972412109, 0.12017852783203126, 0.11986780548095703, 0.12078050994873046, 0.1193927001953125, 0.11925721740722656, 0.12003110504150391, 0.11985305786132812, 0.12012556457519531, 0.11982425689697265, 0.12397183990478515, 0.12110521697998047, 0.12005471801757812, 0.11999027252197265, 0.12053298950195312, 0.12025241851806641, 0.12010639953613281, 0.12024892425537109, 0.11927276611328125, 0.11965411376953125, 0.11943804931640625, 0.11952767944335937, 0.11974803161621093, 0.11977375793457032, 0.12042972564697266, 0.12041203308105469, 0.1210266876220703, 0.12081766510009766, 0.12049612426757812, 0.12026630401611328, 0.12093689727783204, 0.1205186538696289, 0.1203916778564453, 0.12044902038574219, 0.12028928375244141, 0.12009276580810548, 0.12050028991699219, 0.12163215637207031, 0.12031785583496094, 0.12010345458984376, 0.12096102142333984, 0.12027657318115234, 0.11970121765136718, 0.12048226928710938, 0.11984099578857423, 0.12022950744628906, 0.12045894622802734, 0.12027168273925781, 0.12057087707519532, 0.1210008316040039, 0.11991244506835938, 0.12003683471679688, 0.12010870361328126, 0.11965734100341797, 0.11971174621582031, 0.12115353393554687, 0.12037324523925781, 0.12122643280029297, 0.1204211196899414, 0.12017692565917969, 0.12058601379394532, 0.12046137237548828, 0.12002438354492187, 0.12147980499267579, 0.1263964157104492, 0.12183888244628906, 0.1212685775756836, 0.12038162994384766, 0.12034207916259766, 0.12018768310546875, 0.12003721618652344, 0.12114656066894532, 0.12018156433105469, 0.12032947540283204, 0.12034649658203125, 0.12303446197509765, 0.12096892547607421, 0.1200008316040039, 0.12051660919189452, 0.12053929901123046, 0.12054102325439453, 0.12176179504394531, 0.120774658203125, 0.12047682952880859, 0.12030390167236328, 0.12106003570556641, 0.11987702178955079, 0.11978704071044922]",tokens/s,8.280915931447877,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1171.259392,5054.005248,0.0,4651.483136,4638.22848,s,1,13.9498076171875,13.9498076171875,0.0,13.9498076171875,13.9498076171875,13.9498076171875,13.9498076171875,[13.9498076171875],,kWh,0.00020217938477909834,2.229470251688932e-05,6.833866578198622e-05,0.0002928127530779739,,MB,1381.941248,6180.175872,0.0,5765.070848,5418.661888,s,10,9.529435363769533,0.9529435363769532,0.005886735208323169,0.9533370361328125,0.9597713012695313,0.96010693359375,0.960375439453125,"[0.940464111328125, 0.948941650390625, 0.951172119140625, 0.9521414794921875, 0.9475943603515625, 0.9545325927734375, 0.9580509033203125, 0.9596967163085938, 0.9563988647460937, 0.9604425659179687]",tokens/s,268.64131003322615,kWh,2.781714142878572e-05,3.0668915072989607e-06,1.8329130824907734e-05,4.9213163760992415e-05,tokens/kWh,5201860.242988727,MB,1407.008768,6182.273024,0.0,5765.070848,5418.664448,s,10,50.89211669921875,5.089211669921875,0.0037710526553629614,5.0882761230468745,5.09395107421875,5.09460322265625,5.09512494140625,"[5.09525537109375, 5.09380615234375, 5.0858896484375, 5.0934169921875, 5.08929833984375, 5.0868583984375, 5.08725390625, 5.08311669921875, 5.08653662109375, 5.0906845703125]",tokens/s,12.379127473188223,kWh,0.0001484292569412194,1.6373704707455988e-05,9.579415491809029e-05,0.0002605971165667657,tokens/kWh,241752.48302818128,,s,630,50.889211494445746,0.08077652618166001,0.0013257700660994675,0.08059187316894531,0.08099440383911133,0.08139314613342286,0.09075969192504883,"[0.09081446075439453, 0.08182848358154297, 0.08112537384033203, 0.0806645736694336, 0.08029388427734375, 0.0801753921508789, 0.08039395141601563, 0.08036262512207032, 0.08031728363037109, 0.08016694641113281, 0.08029350280761718, 0.08029631805419922, 0.08019149017333985, 0.08072505950927734, 0.08107923126220704, 0.0805881576538086, 0.08061811065673828, 0.08036873626708985, 0.08039107513427735, 0.08046163177490234, 0.0804755859375, 0.08082412719726563, 0.08074720001220703, 0.08090057373046874, 0.08077085113525391, 0.08041471862792969, 0.08055779266357421, 0.08065849304199219, 0.08044723510742187, 0.08059133148193359, 0.08053145599365234, 0.08059718322753906, 0.08085689544677735, 0.08075011444091797, 0.08064048004150391, 0.08176025390625, 0.08087142181396484, 0.08045772552490234, 0.08064553833007812, 0.08072048187255859, 0.080866943359375, 0.08064243316650391, 0.08074034881591798, 0.08073420715332032, 0.08225299072265625, 0.08065106964111328, 0.08066368103027344, 0.080735107421875, 0.08078540802001953, 0.08076083374023438, 0.08061337280273438, 0.08084873962402343, 0.08081423950195313, 0.08057373046875, 0.08076918029785156, 0.08097440338134766, 0.08092630767822266, 0.08096604919433593, 0.08086854553222657, 0.08086131286621094, 0.08083074951171874, 0.08080799865722656, 0.08091887664794922, 0.09062560272216796, 0.08169312286376954, 0.08122898864746093, 0.08052934265136719, 0.0801063003540039, 0.08051923370361327, 0.08035446166992187, 0.08032489776611328, 0.08032313537597656, 0.080289794921875, 0.08029388427734375, 0.08029798126220702, 0.08015257263183594, 0.08026930999755859, 0.08055500793457031, 0.08052223968505859, 0.08046332550048828, 0.08055452728271484, 0.08066864013671875, 0.08068019104003907, 0.08035443115234375, 0.08049971008300781, 0.08068572998046875, 0.08072396850585938, 0.0804716796875, 0.08047449493408203, 0.0803594207763672, 0.08062770843505859, 0.08041062164306641, 0.08038809967041016, 0.08055983734130859, 0.08056246185302735, 0.08048748779296876, 0.08061228942871093, 0.08103116607666015, 0.081870849609375, 0.08072758483886719, 0.08064604949951172, 0.0805340805053711, 0.08064940643310547, 0.08071250915527343, 0.08124813079833984, 0.08085721588134766, 0.08083042907714844, 0.08085011291503906, 0.08096649932861329, 0.08102220916748047, 0.0809090576171875, 0.0808037109375, 0.0807071075439453, 0.08067145538330078, 0.08061734771728515, 0.08076707458496094, 0.08079119873046875, 0.080785888671875, 0.08089167785644531, 0.08099533081054687, 0.08101376342773438, 0.08100003051757812, 0.08103568267822266, 0.08073779296875, 0.0807647705078125, 0.08138601684570312, 0.09106253051757812, 0.0817457275390625, 0.08106854248046876, 0.08040265655517578, 0.08007635498046875, 0.08009913635253907, 0.0802097625732422, 0.08038265228271485, 0.08043119812011719, 0.08040447998046875, 0.08032841491699219, 0.08040476989746094, 0.0805027847290039, 0.08015049743652344, 0.08015465545654298, 0.08036557006835937, 0.08018943786621094, 0.0801809310913086, 0.08107039642333984, 0.0804653091430664, 0.0805745620727539, 0.0803438720703125, 0.08040198516845704, 0.08028787231445313, 0.08027954864501953, 0.08024166107177734, 0.08023142242431641, 0.08028160095214844, 0.08038400268554688, 0.08027545928955078, 0.08048406219482422, 0.08046578979492187, 0.08046185302734375, 0.08059737396240234, 0.0807034912109375, 0.0805165786743164, 0.08047261047363281, 0.08034713745117188, 0.08034620666503907, 0.08041359710693359, 0.08063385772705078, 0.08033296203613281, 0.08043030548095703, 0.08087206268310547, 0.0807171859741211, 0.08089788818359375, 0.08098483276367187, 0.08080582427978515, 0.08078755187988282, 0.0806638412475586, 0.08073289489746094, 0.08063180541992188, 0.08074034881591798, 0.08063180541992188, 0.08056626892089844, 0.08072406768798829, 0.08084060668945313, 0.08074060821533204, 0.08086297607421875, 0.08114697265625, 0.08123280334472656, 0.0809801254272461, 0.08085011291503906, 0.09045014190673828, 0.08162509155273437, 0.08095852661132813, 0.08048326110839844, 0.08018927764892578, 0.0802080307006836, 0.08019334411621094, 0.08037599945068359, 0.08075263977050781, 0.08166809844970703, 0.08063581085205078, 0.08028556823730469, 0.08017123413085937, 0.08016281890869141, 0.08015660858154297, 0.08025299072265625, 0.08029798126220702, 0.08044038391113281, 0.08044435119628907, 0.08064134216308594, 0.08079961395263671, 0.08059763336181641, 0.08074259185791016, 0.08044905853271485, 0.08036914825439453, 0.0803927001953125, 0.08044796752929688, 0.08058879852294921, 0.08074444580078124, 0.08072950744628907, 0.0805873565673828, 0.08057574462890625, 0.08076313781738281, 0.08087318420410156, 0.08070838165283203, 0.08081817626953125, 0.08070963287353515, 0.08158822631835938, 0.08059903717041016, 0.0805212173461914, 0.08056800079345704, 0.08053997039794922, 0.08056531524658203, 0.08075542449951172, 0.08081619262695312, 0.08066063690185547, 0.08070249938964844, 0.08096383666992188, 0.08127766418457032, 0.08092671966552735, 0.08065433502197265, 0.08076493072509766, 0.08123596954345703, 0.08088703918457031, 0.0808043212890625, 0.08068656158447265, 0.08083539581298828, 0.08088985443115235, 0.08090937805175781, 0.08093996429443359, 0.08097964477539063, 0.08099807739257812, 0.08079414367675782, 0.09091686248779297, 0.08167398071289063, 0.08099046325683594, 0.0805940170288086, 0.08026992034912109, 0.08012217712402343, 0.08031436920166016, 0.0803430404663086, 0.08037926483154297, 0.08034162902832032, 0.08061337280273438, 0.08043084716796875, 0.08023248291015625, 0.08018966674804688, 0.08017250823974609, 0.08031801605224609, 0.080301025390625, 0.08070569610595703, 0.08029702758789062, 0.08045807647705078, 0.0808001937866211, 0.0803799057006836, 0.08048953247070313, 0.08027388763427734, 0.08045206451416016, 0.08042291259765624, 0.08046697235107422, 0.08048067474365235, 0.0806539535522461, 0.08046073913574218, 0.08053699493408203, 0.08057218933105469, 0.08047666931152343, 0.0806648941040039, 0.08049433898925781, 0.08049468994140625, 0.08043331146240235, 0.08047395324707031, 0.08058393859863282, 0.0806932144165039, 0.08065529632568359, 0.0807383041381836, 0.08073420715332032, 0.08054124450683593, 0.08065049743652344, 0.08079945373535156, 0.0807510757446289, 0.08056352233886718, 0.08071590423583984, 0.08056662750244141, 0.08072144317626953, 0.08051577758789062, 0.08052531433105468, 0.08078950500488281, 0.08084070587158203, 0.08120464324951172, 0.08066313934326172, 0.0811325454711914, 0.08198614501953125, 0.08127938842773437, 0.08097929382324219, 0.08092876434326172, 0.08075536346435547, 0.09029743957519532, 0.08181839752197266, 0.0809019546508789, 0.08074066925048828, 0.08041062164306641, 0.08022220611572266, 0.08007884979248046, 0.08016281890869141, 0.08009452819824218, 0.080085693359375, 0.07997347259521484, 0.08087161254882813, 0.08049072265625, 0.08030429077148438, 0.08024508666992188, 0.08018851470947265, 0.08056310272216796, 0.08027244567871093, 0.08069010925292969, 0.08022630310058594, 0.08073216247558594, 0.08038365173339844, 0.08042675018310547, 0.0802514877319336, 0.08023145294189453, 0.08042729949951172, 0.08063609313964844, 0.080578369140625, 0.08065702056884766, 0.08047532653808594, 0.08055830383300781, 0.08065625762939453, 0.08079644775390625, 0.08152393341064453, 0.08064899444580079, 0.08059241485595703, 0.08038220977783203, 0.0804722900390625, 0.08049664306640625, 0.08054988861083984, 0.08068879699707031, 0.08059859466552734, 0.08163817596435546, 0.08070470428466797, 0.08052409362792969, 0.0807936019897461, 0.08055193328857421, 0.08064205169677735, 0.08057881927490235, 0.08045452880859374, 0.0804500503540039, 0.08064380645751953, 0.08067270660400391, 0.08069366455078125, 0.08085298919677734, 0.08077548980712891, 0.08079769897460938, 0.08095948791503907, 0.08076287841796875, 0.08085094451904297, 0.08057241821289063, 0.08054783630371094, 0.08072191619873047, 0.09179958343505859, 0.08155964660644531, 0.08099430084228515, 0.08072918701171874, 0.08026409912109375, 0.08012537384033203, 0.08014701080322266, 0.08028511810302734, 0.08011424255371094, 0.08018895721435547, 0.08011369323730469, 0.08011411285400391, 0.08028733062744141, 0.08025743865966797, 0.08041168212890625, 0.08038204956054687, 0.08048726654052735, 0.08034905242919922, 0.08027750396728515, 0.08047017669677735, 0.08053689575195312, 0.0804431381225586, 0.08057279968261719, 0.08024121856689453, 0.08024269104003906, 0.08025497436523438, 0.08022627258300781, 0.08038182067871094, 0.08042511749267578, 0.080636962890625, 0.0804068832397461, 0.08041321563720703, 0.08155532836914063, 0.08097200012207031, 0.08065151977539063, 0.08043119812011719, 0.08031843566894531, 0.08041744232177735, 0.08049462127685547, 0.08046797180175781, 0.08057651519775391, 0.08087347412109375, 0.08098201751708985, 0.08069324493408203, 0.08063545227050781, 0.08070313262939453, 0.08068508911132813, 0.0806039047241211, 0.08055398559570312, 0.08061746978759765, 0.08056422424316406, 0.08053555297851563, 0.08067440032958985, 0.08070521545410156, 0.08086601257324219, 0.081080322265625, 0.08080486297607421, 0.08079222106933594, 0.08068540954589844, 0.0807014389038086, 0.08176025390625, 0.08082637023925782, 0.08060518646240235, 0.0911851806640625, 0.08159964752197266, 0.08089888000488281, 0.08069725036621093, 0.08029558563232422, 0.08019967651367188, 0.08018355560302734, 0.08051526641845703, 0.08020377349853515, 0.08010867309570313, 0.08002969360351563, 0.08018828582763672, 0.08004812622070312, 0.08007062530517578, 0.08006774139404296, 0.08027225494384765, 0.0803675537109375, 0.08141417694091797, 0.08051100921630859, 0.08065750122070313, 0.08061001586914063, 0.08043949127197265, 0.08052057647705078, 0.08039692687988281, 0.08034486389160156, 0.08025110626220704, 0.08017715454101562, 0.0801908187866211, 0.08020035552978516, 0.08064579010009766, 0.08046358489990234, 0.08056438446044922, 0.08034867095947265, 0.08033993530273438, 0.08027481842041016, 0.08052960205078125, 0.08028643035888672, 0.0802567367553711, 0.08031468963623047, 0.0803815689086914, 0.08113772583007813, 0.08068224334716798, 0.0806366729736328, 0.08087939453125, 0.08075689697265626, 0.08055609893798828, 0.08063385772705078, 0.08047615814208985, 0.08053119659423828, 0.08051328277587891, 0.08056626892089844, 0.08052297973632813, 0.08062287902832031, 0.08074547576904297, 0.08070963287353515, 0.08063549041748047, 0.08098009490966797, 0.08070172882080077, 0.08084889221191406, 0.08073420715332032, 0.08071091461181641, 0.0805789794921875, 0.0805667495727539, 0.09086361694335937, 0.08162044525146485, 0.08088220977783203, 0.08053555297851563, 0.08015462493896484, 0.08045362854003907, 0.08120089721679688, 0.08131199645996094, 0.0804814682006836, 0.080216796875, 0.08017091369628906, 0.08012751770019531, 0.08019420623779297, 0.08019999694824219, 0.08013152313232422, 0.08013849639892578, 0.08039218902587891, 0.08035533142089844, 0.08022345733642579, 0.08039004516601563, 0.08051795196533203, 0.08025299072265625, 0.0801976318359375, 0.08020595550537109, 0.08028761291503907, 0.0803147201538086, 0.08036112213134766, 0.08033280181884765, 0.08021916961669921, 0.08060323333740234, 0.08061328125, 0.080659423828125, 0.08057020568847656, 0.08074665832519531, 0.08139897918701172, 0.0805425262451172, 0.08055728149414063, 0.08050540924072265, 0.08038809967041016, 0.0807958755493164, 0.08049664306640625, 0.08045734405517578, 0.08047385406494141, 0.08067529296875, 0.08075827026367187, 0.08076969909667969, 0.08075059509277344, 0.08057036590576172, 0.080500732421875, 0.08064614105224609, 0.08068505859375, 0.08059257507324219, 0.08077247619628906, 0.08075289916992187, 0.0809144287109375, 0.08081887817382813, 0.08103936004638672, 0.08079974365234376, 0.08089385223388672, 0.0807547836303711, 0.08068096160888671, 0.08066252899169922, 0.08067871856689453, 0.09082342529296875, 0.08183971405029297, 0.08109897613525391, 0.08060947418212891, 0.080500732421875, 0.08073171234130859, 0.08037728118896484, 0.08023526763916015, 0.08019292449951172, 0.08043132781982422, 0.08023923492431641, 0.08025433349609375, 0.08023843383789063, 0.0801943359375, 0.0808980484008789, 0.08024269104003906, 0.08043660736083984, 0.08055052947998047, 0.08051734161376953, 0.08045340728759766, 0.08037939453125, 0.08047666931152343, 0.08058223724365235, 0.08046428680419922, 0.0803430404663086, 0.08050892639160157, 0.08040345764160156, 0.08052019500732421, 0.08053350067138672, 0.08051696014404297, 0.08066441345214843, 0.08051129913330078, 0.08058633422851562, 0.08069939422607422, 0.08061698913574218, 0.08060336303710937, 0.08052559661865234, 0.08059721374511719, 0.08055369567871094, 0.08067100524902343, 0.08065039825439453, 0.08057855987548829, 0.08067686462402343, 0.0809039077758789, 0.08070582580566406, 0.08079974365234376, 0.08072396850585938, 0.0807383041381836, 0.08071798706054688, 0.08061299133300781, 0.08064019012451172, 0.08066460418701171, 0.08090374755859375, 0.08071417236328125, 0.08101423645019531, 0.08134915161132812, 0.08126367950439453, 0.08102582550048829, 0.08079974365234376, 0.08076509094238281, 0.0808304672241211, 0.08080793762207031, 0.08083865356445312]",tokens/s,12.379834182904563,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1068.486656,1620.967424,0.0,1218.445312,1206.173696,s,1,9.1551044921875,9.1551044921875,0.0,9.1551044921875,9.1551044921875,9.1551044921875,9.1551044921875,[9.1551044921875],,kWh,5.861655079591704e-05,6.458685418926997e-06,1.958251566600011e-05,8.465775188084415e-05,,MB,1394.704384,1918.763008,0.0,1503.657984,1463.228416,s,10,1.8224012451171876,0.18224012451171875,0.0005835376396232388,0.1820662384033203,0.18296395416259767,0.1832844596862793,0.18354086410522463,"[0.18177462768554686, 0.18246070861816407, 0.18187779235839843, 0.18153298950195312, 0.18223397827148438, 0.18192950439453126, 0.18189097595214843, 0.18360496520996095, 0.18289273071289064, 0.18220297241210937]",tokens/s,1404.7400411183223,kWh,5.503416832947056e-06,6.068971886836563e-07,3.6522611522588476e-06,9.76257517388956e-06,tokens/kWh,26222589.372185662,MB,1415.163904,1918.763008,0.0,1503.657984,1463.230976,s,10,19.255182739257812,1.9255182739257815,0.010140896335088447,1.9248670654296876,1.9386391235351563,1.9416206481933593,1.944005867919922,"[1.927885498046875, 1.9253594970703125, 1.9446021728515626, 1.921302734375, 1.910549560546875, 1.9379765625, 1.9127025146484375, 1.918294677734375, 1.9321348876953126, 1.9243746337890626]",tokens/s,32.718463830288385,kWh,5.5908020384134235e-05,6.166451050370873e-06,2.9011681645541536e-05,9.108615308004663e-05,tokens/kWh,691652.8788369788,,s,630,19.25189714813231,0.030558566901797337,0.0005616265794975625,0.03045156764984131,0.030960354995727538,0.03131689529418945,0.032720637817382825,"[0.0317388801574707, 0.031152128219604492, 0.03059507179260254, 0.03039641571044922, 0.03043654441833496, 0.03030713653564453, 0.030394496917724608, 0.030343040466308594, 0.030129247665405274, 0.03036662483215332, 0.030492095947265624, 0.03033760070800781, 0.030188959121704103, 0.030278047561645507, 0.030283231735229493, 0.03032137680053711, 0.030433055877685546, 0.03027497673034668, 0.030297983169555665, 0.03029088020324707, 0.030251007080078125, 0.030250303268432616, 0.03033568000793457, 0.030426784515380858, 0.03036115264892578, 0.03039030456542969, 0.030280448913574218, 0.03056025505065918, 0.030277631759643556, 0.030902111053466796, 0.030836511611938476, 0.031385984420776364, 0.031039487838745116, 0.030782880783081053, 0.030635744094848632, 0.030710687637329103, 0.03142243194580078, 0.031116607666015626, 0.031117952346801758, 0.03087513542175293, 0.030474559783935547, 0.03046441650390625, 0.030588224411010743, 0.030456544876098633, 0.030673791885375976, 0.03173465538024902, 0.03284384155273438, 0.030453567504882813, 0.03043961524963379, 0.030357664108276367, 0.03046806335449219, 0.030345087051391603, 0.030455808639526367, 0.030297088623046874, 0.030426111221313477, 0.030404287338256834, 0.030388544082641602, 0.03057254409790039, 0.03060710334777832, 0.030705919265747072, 0.030529184341430662, 0.03046806335449219, 0.030488128662109374, 0.03192201614379883, 0.031127552032470703, 0.030775392532348633, 0.030672800064086913, 0.030345216751098632, 0.030531200408935547, 0.030502496719360353, 0.030476064682006837, 0.030587903976440428, 0.030515199661254884, 0.031784767150878905, 0.03040480041503906, 0.03034726333618164, 0.030455360412597655, 0.03028188705444336, 0.030387744903564454, 0.03025177574157715, 0.03034726333618164, 0.03035897636413574, 0.030392192840576173, 0.03051590347290039, 0.030532960891723634, 0.030850784301757812, 0.03060527992248535, 0.030582815170288085, 0.030706623077392577, 0.03101263999938965, 0.030459680557250977, 0.030525888442993164, 0.030563871383666993, 0.030609888076782228, 0.03046531105041504, 0.03067158317565918, 0.03041279983520508, 0.03029193687438965, 0.03037392044067383, 0.03026937675476074, 0.030369855880737304, 0.03044668769836426, 0.030286752700805664, 0.030423040390014647, 0.030545183181762695, 0.030556800842285157, 0.03050873565673828, 0.03048080062866211, 0.030346303939819335, 0.030374208450317384, 0.030323328018188475, 0.03070057678222656, 0.030495168685913086, 0.03033113670349121, 0.030384096145629882, 0.030314815521240233, 0.030431167602539062, 0.030587072372436522, 0.030559167861938477, 0.03092710494995117, 0.030560960769653322, 0.030740480422973632, 0.030539871215820313, 0.03075177574157715, 0.030441919326782228, 0.030768768310546875, 0.03142531204223633, 0.03091836738586426, 0.03101299285888672, 0.03060089683532715, 0.030572864532470705, 0.03057663917541504, 0.030689279556274415, 0.030635936737060547, 0.03059312057495117, 0.03061350440979004, 0.030439424514770507, 0.03057583999633789, 0.030503711700439452, 0.03062396812438965, 0.03059280014038086, 0.030519296646118164, 0.030625024795532228, 0.03047212791442871, 0.030691839218139647, 0.030380352020263672, 0.030542911529541014, 0.03047520065307617, 0.030678976058959962, 0.03067424011230469, 0.030675712585449218, 0.030668800354003906, 0.030697216033935548, 0.03089664077758789, 0.030563135147094727, 0.030904672622680665, 0.03482614517211914, 0.030866111755371094, 0.030785535812377928, 0.03056025505065918, 0.030646368026733397, 0.030626880645751954, 0.03063868713378906, 0.030558176040649414, 0.030707904815673828, 0.03090640068054199, 0.030774688720703124, 0.031336448669433595, 0.03121776008605957, 0.03075334358215332, 0.030652095794677734, 0.030775615692138672, 0.031031295776367186, 0.03072204780578613, 0.030685184478759765, 0.03041391944885254, 0.030501632690429686, 0.030570112228393554, 0.03059721565246582, 0.030605663299560548, 0.030677087783813478, 0.030650592803955077, 0.030595872879028322, 0.03054489517211914, 0.030586944580078126, 0.03129542350769043, 0.03279257583618164, 0.0317295036315918, 0.03388636779785156, 0.031786016464233395, 0.031027168273925782, 0.03086617660522461, 0.03050819206237793, 0.030694240570068358, 0.030327871322631837, 0.0309237117767334, 0.03058652877807617, 0.030454111099243165, 0.030521343231201172, 0.030296255111694335, 0.030371007919311525, 0.030327392578125, 0.030328351974487303, 0.030382591247558592, 0.030353408813476562, 0.030381536483764647, 0.030343135833740233, 0.03049087905883789, 0.03046134376525879, 0.030394336700439454, 0.030312768936157225, 0.03046668815612793, 0.030304384231567384, 0.0303570556640625, 0.030818143844604493, 0.030456287384033203, 0.030250368118286134, 0.03025574493408203, 0.03033238410949707, 0.030238752365112306, 0.03041689682006836, 0.03030281639099121, 0.03030006408691406, 0.03037183952331543, 0.030899648666381837, 0.030620384216308593, 0.03067478370666504, 0.030345056533813478, 0.030273183822631836, 0.03054969596862793, 0.030267328262329102, 0.030767391204833985, 0.03027756881713867, 0.03038889694213867, 0.030341119766235353, 0.03052310371398926, 0.03050934410095215, 0.030228479385375977, 0.030318431854248047, 0.030357215881347658, 0.03076723289489746, 0.030707199096679686, 0.03070035171508789, 0.030864383697509764, 0.030569215774536133, 0.03152716827392578, 0.030285823822021486, 0.030236671447753907, 0.03016499137878418, 0.03038607978820801, 0.030206047058105468, 0.030287872314453124, 0.03154300880432129, 0.031086879730224608, 0.030633312225341797, 0.030693279266357423, 0.030310592651367187, 0.03066921615600586, 0.030542112350463866, 0.030351232528686524, 0.030337024688720703, 0.030242816925048828, 0.030257152557373046, 0.030272672653198242, 0.030188159942626955, 0.030189184188842772, 0.030348991394042967, 0.03037071990966797, 0.030212064743041993, 0.03012816047668457, 0.030367008209228517, 0.030159040451049803, 0.030405311584472655, 0.030453439712524413, 0.030315935134887697, 0.030419647216796877, 0.030242752075195313, 0.030304607391357423, 0.030092351913452147, 0.030233312606811523, 0.03015395164489746, 0.03022108840942383, 0.030021343231201172, 0.030124319076538085, 0.030105375289916993, 0.03126812744140625, 0.030376768112182616, 0.030027904510498048, 0.030100608825683595, 0.030245759963989257, 0.030236671447753907, 0.030365695953369142, 0.030263296127319338, 0.03028895950317383, 0.030120319366455078, 0.030199455261230468, 0.030036895751953126, 0.0302357120513916, 0.0303175048828125, 0.030232479095458984, 0.030818368911743162, 0.030242752075195313, 0.030165088653564452, 0.030651647567749022, 0.030405120849609377, 0.03038377571105957, 0.030160768508911133, 0.03011043167114258, 0.030482431411743165, 0.029902719497680665, 0.030124160766601564, 0.03041641616821289, 0.030094079971313477, 0.030012735366821287, 0.030030431747436522, 0.03101081657409668, 0.030555967330932618, 0.030448928833007812, 0.03029452705383301, 0.030632287979125976, 0.031334463119506835, 0.03030348777770996, 0.030204511642456053, 0.030199935913085937, 0.030209503173828124, 0.030286399841308594, 0.030244096755981446, 0.03022719955444336, 0.03023187255859375, 0.03020460891723633, 0.03056835174560547, 0.030504959106445313, 0.03039446449279785, 0.0308604793548584, 0.03068601608276367, 0.031071456909179687, 0.03111097526550293, 0.031200223922729493, 0.031137632369995116, 0.031008928298950196, 0.030652416229248046, 0.030503999710083007, 0.030571456909179687, 0.030424320220947265, 0.03892505645751953, 0.03302300643920898, 0.030553056716918946, 0.03044105529785156, 0.030343584060668945, 0.030414848327636718, 0.030361248016357423, 0.03053603172302246, 0.0305664005279541, 0.030588224411010743, 0.030544576644897462, 0.030371679306030273, 0.03049718475341797, 0.030328575134277343, 0.030474016189575195, 0.03101308822631836, 0.03061555290222168, 0.03072108840942383, 0.030929088592529297, 0.030552799224853516, 0.03162015914916992, 0.030477279663085936, 0.03033603286743164, 0.030325279235839844, 0.030277408599853516, 0.03039446449279785, 0.030441791534423827, 0.030807903289794922, 0.03061187171936035, 0.030478336334228515, 0.03036275291442871, 0.030382976531982422, 0.031836448669433595, 0.03050262451171875, 0.031035167694091797, 0.03074003219604492, 0.030809696197509766, 0.03073849678039551, 0.030477088928222658, 0.03038559913635254, 0.030330591201782227, 0.03018838310241699, 0.030500864028930662, 0.03023052787780762, 0.030340959548950195, 0.030578752517700196, 0.031075456619262695, 0.03032316780090332, 0.03047065544128418, 0.030355072021484374, 0.030265888214111327, 0.03031331253051758, 0.030297088623046874, 0.030515071868896484, 0.0305296630859375, 0.030394367218017578, 0.030318016052246093, 0.03019219207763672, 0.030390272140502928, 0.03022751998901367, 0.030260160446166993, 0.03029996871948242, 0.030289983749389647, 0.030206079483032226, 0.030357503890991212, 0.030273536682128906, 0.030177215576171874, 0.03027324867248535, 0.03018332862854004, 0.030232959747314454, 0.03029417610168457, 0.031088544845581056, 0.030781375885009767, 0.03049888038635254, 0.030365695953369142, 0.030258399963378906, 0.030294815063476564, 0.03025926399230957, 0.03070969581604004, 0.0301711368560791, 0.030471712112426757, 0.030125728607177736, 0.03012281608581543, 0.030027776718139648, 0.030031871795654298, 0.030271488189697264, 0.03021004867553711, 0.03015452766418457, 0.030216415405273436, 0.030316543579101563, 0.03014201545715332, 0.030083776473999024, 0.030196544647216796, 0.0300512638092041, 0.03038412857055664, 0.03018489646911621, 0.030179840087890625, 0.031060863494873046, 0.030738367080688476, 0.030545280456542968, 0.030466751098632814, 0.030220287322998047, 0.030287872314453124, 0.030257152557373046, 0.030367647171020508, 0.03076515197753906, 0.03050422477722168, 0.030636768341064453, 0.030639839172363282, 0.030149919509887695, 0.030213119506835938, 0.030522911071777344, 0.030147039413452148, 0.030103551864624024, 0.030259199142456054, 0.030121984481811522, 0.030031871795654298, 0.030111007690429688, 0.030055103302001954, 0.030226720809936523, 0.030113504409790038, 0.03014678382873535, 0.030109472274780273, 0.03039030456542969, 0.030183712005615235, 0.030124767303466797, 0.030245471954345703, 0.030327104568481447, 0.03023052787780762, 0.03007600021362305, 0.030139392852783203, 0.03031564712524414, 0.03100761604309082, 0.03095961570739746, 0.03096700859069824, 0.030661407470703124, 0.030451391220092775, 0.03046841621398926, 0.03075071907043457, 0.031059648513793944, 0.03225836944580078, 0.030637632369995116, 0.03202076721191406, 0.030697504043579103, 0.03068649673461914, 0.030452831268310547, 0.030757856369018555, 0.030603071212768555, 0.03041993522644043, 0.030300159454345704, 0.030271488189697264, 0.030117504119873045, 0.030097728729248048, 0.0301013126373291, 0.030168895721435548, 0.03018124771118164, 0.030072608947753907, 0.030112384796142578, 0.03022038459777832, 0.03029612731933594, 0.0319036808013916, 0.03133689689636231, 0.03099171257019043, 0.030583328247070312, 0.030857215881347655, 0.030681087493896485, 0.03031235122680664, 0.03025676727294922, 0.03051772880554199, 0.03029737663269043, 0.03030729675292969, 0.03028486442565918, 0.0301964168548584, 0.030124000549316406, 0.03045702362060547, 0.030507871627807617, 0.030406911849975585, 0.03038140869140625, 0.03048873519897461, 0.030418815612792968, 0.030568832397460936, 0.030516544342041017, 0.03053385543823242, 0.03052387237548828, 0.030394367218017578, 0.030351232528686524, 0.03055014419555664, 0.030649728775024414, 0.030501344680786132, 0.030515647888183593, 0.030621408462524414, 0.030414848327636718, 0.030547967910766603, 0.03042291259765625, 0.03135868835449219, 0.030704032897949218, 0.030858783721923827, 0.030728672027587892, 0.030863359451293947, 0.030671871185302735, 0.030561439514160155, 0.030490463256835937, 0.030445568084716795, 0.030498815536499024, 0.030385536193847658, 0.03061596870422363, 0.03060963249206543, 0.030570783615112306, 0.03254451370239258, 0.0314134407043457, 0.030880544662475588, 0.0304783992767334, 0.030450944900512696, 0.030567136764526368, 0.03061756706237793, 0.030737983703613282, 0.030677440643310547, 0.030736448287963868, 0.03084681510925293, 0.031198944091796875, 0.031035776138305663, 0.030833887100219726, 0.030615808486938477, 0.032037567138671875, 0.031600032806396484, 0.030689792633056642, 0.030470239639282228, 0.03079987144470215, 0.030349279403686525, 0.030486560821533202, 0.030360671997070314, 0.030456064224243164, 0.03032943916320801, 0.030432416915893556, 0.03031747245788574, 0.030404640197753907, 0.030297279357910156, 0.03041360092163086, 0.030381248474121093, 0.030384960174560546, 0.030271488189697264, 0.030368864059448244, 0.03034636878967285, 0.030227264404296874, 0.030331872940063478, 0.030448991775512694, 0.030412767410278322, 0.030367647171020508, 0.030300832748413085, 0.030255231857299805, 0.030242816925048828, 0.030708927154541016, 0.03052217674255371, 0.030639455795288085, 0.0306441593170166, 0.030339839935302735, 0.030838752746582033, 0.03101852798461914, 0.03150019264221191, 0.03076483154296875, 0.03053411293029785, 0.03341484832763672, 0.03089641571044922, 0.030470048904418946, 0.030446016311645507, 0.030267391204833984, 0.03027347183227539, 0.030298175811767577, 0.030305343627929686, 0.03045471954345703, 0.030228479385375977, 0.030350912094116212, 0.030294111251831055, 0.030217952728271484, 0.030451744079589844, 0.03035759925842285, 0.03043132781982422, 0.03022480010986328, 0.030523040771484374, 0.03032304000854492, 0.030294015884399415, 0.030318464279174805, 0.03079750442504883, 0.030335424423217773, 0.03037308883666992, 0.030481184005737304]",tokens/s,32.72404766930297,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,26487.656448,13910.278144,0.0,13507.756032,13505.835008,s,1,53.18528125,53.18528125,0.0,53.18528125,53.18528125,53.18528125,53.18528125,[53.18528125],,kWh,0.00134744153897506,0.00014862494887913743,0.0004639137044639974,0.001959980192318195,,MB,1359.351808,14803.664896,0.0,14380.171264,14175.648768,s,10,1.554255111694336,0.15542551116943362,0.0005283037350115343,0.15539912414550783,0.15604535369873046,0.1560596565246582,0.15607109878540038,"[0.15457382202148437, 0.15484991455078126, 0.15563655090332032, 0.15513200378417968, 0.15493846130371094, 0.1551616973876953, 0.15594041442871093, 0.15590611267089843, 0.15604217529296874, 0.15607395935058593]",tokens/s,1647.0912533845706,kWh,4.6105033930331275e-06,5.08458769219671e-07,3.0438869837189943e-06,8.162849145971793e-06,tokens/kWh,31361598.802341092,MB,1379.049472,14845.607936,0.0,14422.114304,14358.052352,s,10,59.294401855468756,5.929440185546875,0.016467771962565207,5.933983154296875,5.947221923828125,5.950762817382813,5.953595532226563,"[5.93526904296875, 5.94483447265625, 5.937275390625, 5.932697265625, 5.9543037109375, 5.94643505859375, 5.916953125, 5.91321630859375, 5.90455029296875, 5.9088671875]",tokens/s,10.62494907252184,kWh,0.00017035572190738342,1.8790876177788988e-05,0.0001088161747264788,0.0002979627728116512,tokens/kWh,211435.80926407772,,s,630,59.291210342407275,0.09411303228953528,0.000995155021381196,0.0939884490966797,0.09486999816894531,0.09538416633605958,0.09784864166259766,"[0.0952647705078125, 0.09456025695800781, 0.09372589111328125, 0.09455699157714843, 0.09430998229980468, 0.09372918701171876, 0.09385501098632812, 0.09400303649902343, 0.09399737548828126, 0.09395852661132813, 0.09499212646484376, 0.09456070709228516, 0.09427967834472656, 0.09374272155761719, 0.09452377319335938, 0.09378406524658203, 0.09412403106689453, 0.09371670532226563, 0.09388585662841797, 0.09404249572753906, 0.09389568328857421, 0.09442332458496094, 0.094423583984375, 0.09506342315673828, 0.09459385681152344, 0.09477529907226563, 0.09384944152832031, 0.09451238250732422, 0.09475164794921875, 0.09404415893554688, 0.09462374114990234, 0.09474457550048829, 0.09405235290527343, 0.09407283020019531, 0.09373651123046875, 0.09399136352539063, 0.09390898895263672, 0.09368946838378907, 0.09372284698486329, 0.09448873901367187, 0.09497599792480468, 0.09439641571044922, 0.09455001831054688, 0.09423155212402344, 0.09397760009765625, 0.09402361297607421, 0.09429203033447266, 0.09440412902832031, 0.09477394866943359, 0.09422825622558594, 0.09404825592041016, 0.09436172485351563, 0.09433452606201172, 0.09483634948730468, 0.09400729370117188, 0.09473299407958985, 0.09361203002929687, 0.0939859848022461, 0.09325647735595703, 0.09374739074707031, 0.09349308776855468, 0.0939335708618164, 0.0937553939819336, 0.09499852752685548, 0.09499644470214844, 0.09378406524658203, 0.09444969940185546, 0.09408486175537109, 0.09429590606689453, 0.09422480010986328, 0.09478553771972656, 0.09443036651611328, 0.09559967803955079, 0.09433270263671875, 0.09458073425292969, 0.09483679962158204, 0.09423455810546875, 0.09507373046875, 0.09462226867675781, 0.09430220794677735, 0.09413836669921875, 0.09517874908447266, 0.09532608032226562, 0.09601996612548828, 0.0943153305053711, 0.09849120330810547, 0.09498316955566406, 0.09473843383789063, 0.09459097290039062, 0.09423417663574218, 0.09415519714355469, 0.0940926742553711, 0.09409302520751953, 0.09436662292480469, 0.09385337829589843, 0.09347071838378906, 0.09367584228515625, 0.09472819519042969, 0.09472547149658203, 0.0939260482788086, 0.09409481811523437, 0.09362016296386719, 0.09395670318603516, 0.0942562255859375, 0.09389552307128907, 0.09356864166259765, 0.0935551986694336, 0.09412601470947266, 0.09384531402587891, 0.09479529571533203, 0.09463983917236328, 0.0940257568359375, 0.094056640625, 0.09389456176757813, 0.09429894256591796, 0.09488902282714844, 0.0940676498413086, 0.09372793579101563, 0.09346540832519531, 0.09374720001220703, 0.09381887817382813, 0.09335363006591797, 0.09404637145996093, 0.09415638732910156, 0.09370588684082032, 0.0941589126586914, 0.0954081268310547, 0.0937647705078125, 0.09378825378417968, 0.09465910339355468, 0.09364482879638672, 0.09350383758544922, 0.0941137924194336, 0.09386188507080079, 0.0937553939819336, 0.09357011413574219, 0.09337680053710938, 0.09361270141601563, 0.09358489227294922, 0.09458486175537109, 0.09395670318603516, 0.09402105712890625, 0.09377977752685547, 0.09404707336425781, 0.09419139099121093, 0.09434019470214844, 0.09395906829833985, 0.09361974334716797, 0.09407126617431641, 0.0937667236328125, 0.09472710418701172, 0.09434111785888671, 0.09361408233642578, 0.09374515533447265, 0.09373919677734376, 0.10025555419921875, 0.09466185760498047, 0.09422108459472656, 0.09353952026367188, 0.09395487976074218, 0.09375452423095704, 0.09454473876953125, 0.09408262634277344, 0.09443373107910157, 0.0938938217163086, 0.09489695739746094, 0.09705267333984376, 0.09438003540039062, 0.09402758026123047, 0.09401773071289063, 0.09363871765136719, 0.093614013671875, 0.09432268524169922, 0.09386393737792968, 0.09387213134765625, 0.09363251495361329, 0.0939315185546875, 0.09478125, 0.09383497619628907, 0.09373334503173827, 0.0939842529296875, 0.09368819427490234, 0.09396236419677734, 0.09487955474853516, 0.09363836669921875, 0.09414089965820313, 0.09391871643066406, 0.09782428741455078, 0.09483363342285156, 0.09453955078125, 0.09388224029541016, 0.09452819061279297, 0.0946851806640625, 0.09406873321533203, 0.09368370819091797, 0.0937655029296875, 0.09433657836914063, 0.09426592254638672, 0.0939612808227539, 0.09414514923095703, 0.09383353424072266, 0.09346441650390624, 0.09408470153808594, 0.09420652770996094, 0.09359523010253906, 0.09399664306640625, 0.09390713500976562, 0.0939628448486328, 0.09356291198730468, 0.09393766021728515, 0.09376134490966796, 0.09388870239257813, 0.09401344299316407, 0.09465388488769531, 0.09390547180175782, 0.09397171020507812, 0.09330738830566407, 0.0938682861328125, 0.09431430053710937, 0.09456604766845703, 0.09392591857910157, 0.09373081970214844, 0.09350348663330078, 0.09361612701416015, 0.09461068725585937, 0.09419391632080078, 0.09444198608398438, 0.09409740447998047, 0.09477340698242187, 0.09351254272460938, 0.09508306884765624, 0.09378451538085937, 0.09402162933349609, 0.09351577758789062, 0.0946055679321289, 0.09445049285888672, 0.09442809295654297, 0.09429811096191407, 0.09487359619140626, 0.09495359802246094, 0.09395597076416015, 0.09380048370361328, 0.09528521728515625, 0.09417635345458984, 0.09417017364501953, 0.09440831756591797, 0.09452086639404297, 0.09429011535644531, 0.09428224182128907, 0.09596268463134766, 0.09458425903320312, 0.09385657501220702, 0.09356646728515625, 0.09377011108398438, 0.09436160278320313, 0.09438198089599609, 0.09381897735595703, 0.09422335815429687, 0.09485759735107421, 0.09491129302978515, 0.09475872039794922, 0.09475481414794922, 0.09455827331542968, 0.09426118469238282, 0.09399910736083984, 0.0959283218383789, 0.09516627502441406, 0.09517485046386719, 0.09979411315917969, 0.0973848648071289, 0.094615966796875, 0.0945254364013672, 0.09389055633544922, 0.09435132598876952, 0.09403600311279296, 0.09490841674804687, 0.09434630584716797, 0.09439266967773438, 0.09404681396484375, 0.09459302520751953, 0.09408688354492188, 0.093489501953125, 0.09321363067626953, 0.09385881805419923, 0.09373286437988282, 0.09418956756591797, 0.09465036773681641, 0.09444367980957032, 0.09386892700195312, 0.09459613037109375, 0.09717343902587891, 0.09445289611816406, 0.09503385925292969, 0.09441903686523437, 0.09415296173095702, 0.0942551040649414, 0.09424291229248047, 0.09485711669921874, 0.09428173065185547, 0.0937504653930664, 0.09393657684326172, 0.09553907012939453, 0.09384550476074219, 0.09357721710205077, 0.09381206512451172, 0.09409305572509766, 0.09360598754882812, 0.09394258880615235, 0.09461756896972656, 0.09395142364501953, 0.0935041275024414, 0.09728816223144532, 0.09466172790527344, 0.09361910247802735, 0.0938364486694336, 0.09409766387939453, 0.0944742431640625, 0.09479318237304687, 0.09441286468505859, 0.09418800354003906, 0.09456025695800781, 0.09534377288818359, 0.09415254211425782, 0.09421311950683593, 0.09410559844970703, 0.09430016326904297, 0.09435135650634766, 0.09483673858642579, 0.0947220458984375, 0.09489202880859375, 0.09403158569335937, 0.09562550354003906, 0.09497180938720703, 0.09620486450195312, 0.09414364624023437, 0.09402204895019531, 0.09390521240234374, 0.09417890930175782, 0.09455059051513671, 0.09430220794677735, 0.09385779571533204, 0.09391513824462891, 0.09424022674560546, 0.09437439727783203, 0.0939639663696289, 0.09440857696533203, 0.09371900939941406, 0.09398271942138672, 0.09388236999511719, 0.09494461059570312, 0.094304931640625, 0.09460342407226563, 0.09425263977050781, 0.09392118072509766, 0.0943919677734375, 0.09412473297119141, 0.09390499114990235, 0.09415666961669922, 0.09388240051269531, 0.0942551040649414, 0.09452953338623046, 0.094115234375, 0.09435606384277344, 0.09405059051513671, 0.09384521484375, 0.09413811492919921, 0.09383712005615234, 0.09378998565673828, 0.09368438720703125, 0.09390255737304687, 0.09447007751464843, 0.09414262390136718, 0.09482227325439453, 0.09788233947753906, 0.09486959838867187, 0.09474076843261718, 0.09435517120361328, 0.09411090850830078, 0.09434028625488282, 0.09464463806152344, 0.0948904037475586, 0.09415487670898437, 0.09445362854003907, 0.0942551040649414, 0.09393996429443359, 0.09416236877441406, 0.09414268493652343, 0.093552734375, 0.09400685119628906, 0.09412012481689454, 0.09454783630371094, 0.09402371215820313, 0.09421654510498047, 0.09649152374267578, 0.09446604919433593, 0.09608806610107422, 0.09491455841064453, 0.09399689483642579, 0.09392515563964844, 0.09381251525878906, 0.09410006713867188, 0.09482806396484375, 0.09413475036621094, 0.09399091339111328, 0.09405644989013671, 0.09411788940429687, 0.09424652862548828, 0.09462822723388672, 0.09451315307617188, 0.09403392028808594, 0.09441104125976563, 0.09435830688476562, 0.09407788848876954, 0.09410944366455078, 0.09348326110839844, 0.09332736206054687, 0.09328025817871094, 0.0929648666381836, 0.0933560333251953, 0.0929906234741211, 0.09332982635498047, 0.09363910675048828, 0.09480214691162109, 0.09358521270751953, 0.09347097778320312, 0.09391280364990234, 0.09392915344238281, 0.09356524658203125, 0.09356082916259766, 0.0933509750366211, 0.09287161254882813, 0.09292755126953126, 0.09295235443115234, 0.09347958374023438, 0.09334989166259766, 0.09324543762207031, 0.09269760131835937, 0.09330707550048828, 0.09275846099853516, 0.09249830627441406, 0.09324748992919922, 0.09305945587158203, 0.09354841613769531, 0.09336217498779296, 0.09343385314941406, 0.09296466827392579, 0.092830078125, 0.09330054473876953, 0.0930099868774414, 0.09370003509521484, 0.0936568603515625, 0.09347843170166016, 0.09327827453613281, 0.09392326354980468, 0.09329119873046875, 0.09332121276855469, 0.0933433609008789, 0.0930543975830078, 0.09301728057861328, 0.09276710510253906, 0.09267276763916016, 0.09346265411376953, 0.09331097412109375, 0.09333123016357422, 0.09374345397949219, 0.09951612854003906, 0.09619599914550782, 0.09290742492675781, 0.09341219329833984, 0.09355216217041015, 0.09301039886474609, 0.09329007720947266, 0.09405632019042968, 0.0934876480102539, 0.09290544128417969, 0.09411382293701172, 0.10484668731689453, 0.09499919891357422, 0.09401683044433594, 0.09400358581542968, 0.09343161773681641, 0.09362687683105468, 0.09329869079589843, 0.09435340881347656, 0.09392681884765625, 0.09457462310791015, 0.09413894653320312, 0.09423462677001954, 0.0933578872680664, 0.09323709106445313, 0.0929236831665039, 0.09291014099121093, 0.09273139190673828, 0.09350144195556641, 0.09359891510009766, 0.09327903747558594, 0.09372454071044922, 0.09392262268066406, 0.09350431823730469, 0.09627238464355468, 0.09545318603515625, 0.09370751953125, 0.09325199890136719, 0.09276860809326172, 0.09368418884277344, 0.09380976104736329, 0.0930863037109375, 0.09359375762939454, 0.09341311645507812, 0.09307933044433593, 0.092914306640625, 0.09278838348388672, 0.09301177978515625, 0.09678492736816406, 0.09368787384033203, 0.0935072021484375, 0.09778963470458985, 0.0936431655883789, 0.09369725036621093, 0.09357548522949219, 0.09388307189941406, 0.09314070129394532, 0.09286685180664063, 0.09292185974121094, 0.09315151977539063, 0.09427938842773438, 0.0933785629272461, 0.09518431854248047, 0.0948598403930664, 0.09416044616699219, 0.09418182373046875, 0.09385984039306641, 0.09380249786376953, 0.09298738861083984, 0.0931912612915039, 0.09322166442871094, 0.09379033660888672, 0.09303987121582032, 0.09397939300537109, 0.09315235137939454, 0.09299193572998046, 0.09568694305419922, 0.09486876678466796, 0.09477212524414062, 0.09415065765380859, 0.09431449890136719, 0.09395350646972657, 0.09383376312255859, 0.09435750579833985, 0.09392127990722657, 0.09371033477783203, 0.0929487075805664, 0.09329232025146485, 0.09331033325195312, 0.09339724731445312, 0.09325769805908203, 0.09326223754882812, 0.09352601623535156, 0.09364889526367187, 0.09372057342529297, 0.09394544219970703, 0.09370460510253906, 0.0927825927734375, 0.09305235290527344, 0.09267612457275391, 0.09294310760498047, 0.09312847900390625, 0.09423052978515625, 0.09465036773681641, 0.09379779052734374, 0.09342524719238281, 0.09323622131347656, 0.0929031982421875, 0.09268246459960937, 0.09283174133300781, 0.0929587173461914, 0.09309593963623047, 0.09301811218261719, 0.09290902709960938, 0.09737474822998046, 0.09785858917236329, 0.09473942565917969, 0.09355059051513671, 0.09371443176269531, 0.09299763488769532, 0.09269999694824219, 0.09287747192382813, 0.09716531372070313, 0.0937059555053711, 0.09349497222900391, 0.09387055969238281, 0.09404783630371094, 0.09317021179199218, 0.09386710357666016, 0.09302845001220703, 0.09364739227294921, 0.09395228576660156, 0.0935887680053711, 0.09356771087646484, 0.09335193634033204, 0.0935546875, 0.09409126281738281, 0.09535488128662109, 0.09655500793457031, 0.09401280212402344, 0.09334233856201171, 0.09329869079589843, 0.09385164642333985, 0.09367116546630859, 0.09391129302978515, 0.0932638702392578, 0.09415065765380859, 0.09415446472167968, 0.09369219207763672, 0.09286409759521484, 0.09343017578125, 0.09298274993896484, 0.09335657501220704, 0.09302742767333984, 0.09345731353759766, 0.09320441436767578, 0.09350335693359375, 0.09396038055419922, 0.09723411560058594, 0.09316230773925781, 0.09315023803710938, 0.09289759826660156, 0.09318262481689453, 0.09287843322753907, 0.09335440063476562]",tokens/s,10.62552098973431,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1174.528,13209.829376,0.0,12807.307264,12661.927936,s,1,26.48431640625,26.48431640625,0.0,26.48431640625,26.48431640625,26.48431640625,26.48431640625,[26.48431640625],,kWh,0.0005673952184833373,6.257984614908599e-05,0.00019439404440399488,0.0008243691090364182,,MB,1360.904192,15734.800384,0.0,15319.69536,14319.896576,s,10,27.898913085937497,2.7898913085937496,0.009946051509037298,2.790453125,2.8011642822265626,2.8016948120117187,2.802119235839844,"[2.76961181640625, 2.785351806640625, 2.778037353515625, 2.786249755859375, 2.789737060546875, 2.796716552734375, 2.791169189453125, 2.798767822265625, 2.802225341796875, 2.80104638671875]",tokens/s,91.7598471350618,kWh,8.12525756904112e-05,8.960930107057612e-06,5.405079324060092e-05,0.0001442642990380697,tokens/kWh,1774520.804571646,MB,1381.421056,15734.800384,0.0,15319.69536,14319.899136,s,10,132.0521064453125,13.205210644531249,0.014243572941039624,13.209818359375,13.21933310546875,13.219707080078125,13.220006259765624,"[13.1750703125, 13.1883349609375, 13.1940126953125, 13.203171875, 13.2096103515625, 13.2100263671875, 13.216732421875, 13.21581640625, 13.21925, 13.2200810546875]",tokens/s,4.7708440021053775,kWh,0.0003866893082441644,4.265468416882534e-05,0.0002568185943435997,0.0006861625867565894,tokens/kWh,91814.97390843423,,s,630,132.04798678588867,0.2095999790252201,0.0016796143599737677,0.2096267547607422,0.2107382339477539,0.2114342933654785,0.21816252822875978,"[0.2185032043457031, 0.20632847595214843, 0.20579327392578126, 0.20664035034179687, 0.2126363220214844, 0.20924557495117188, 0.20679096984863282, 0.20679493713378908, 0.2067335662841797, 0.21144985961914062, 0.20923802185058593, 0.2067283477783203, 0.20766188049316406, 0.20866050720214843, 0.20970252990722657, 0.20769625854492188, 0.20818515014648437, 0.20751788330078125, 0.20958003234863282, 0.20953701782226564, 0.20758518981933594, 0.20829193115234376, 0.20813618469238282, 0.2094397430419922, 0.20834815979003907, 0.20860064697265626, 0.20843565368652345, 0.20875808715820313, 0.20942230224609376, 0.20940243530273436, 0.20905564880371094, 0.20809523010253905, 0.2100408935546875, 0.20895353698730468, 0.20983193969726563, 0.20937113952636718, 0.20807612609863282, 0.20914242553710938, 0.20920501708984374, 0.20989503479003907, 0.20971084594726563, 0.20863414001464844, 0.20874911499023438, 0.20972726440429687, 0.20979119873046875, 0.20947084045410155, 0.20858738708496094, 0.20883865356445314, 0.20988517761230469, 0.2099384307861328, 0.20996640014648438, 0.20877375793457031, 0.21006137084960938, 0.210587646484375, 0.21018418884277343, 0.20925645446777344, 0.209177734375, 0.2099393310546875, 0.20992410278320311, 0.20991961669921874, 0.20953945922851563, 0.21011660766601561, 0.21039923095703125, 0.21822224426269532, 0.20711868286132812, 0.20701593017578124, 0.20675584411621092, 0.21272300720214843, 0.20833555603027343, 0.20732928466796874, 0.2072657012939453, 0.20787619018554687, 0.210865478515625, 0.20801727294921876, 0.20819839477539062, 0.20754234313964845, 0.2087895050048828, 0.21057904052734375, 0.2079888916015625, 0.20785935974121095, 0.2078009338378906, 0.20910671997070313, 0.21071688842773437, 0.20803993225097656, 0.20786994934082031, 0.20828775024414062, 0.210370361328125, 0.21010450744628906, 0.20809461975097657, 0.2079503631591797, 0.20830419921875, 0.2101226806640625, 0.21056723022460938, 0.20802284240722657, 0.20840109252929687, 0.2091499481201172, 0.20993228149414062, 0.20963040161132812, 0.20814314270019532, 0.20831849670410157, 0.2103951416015625, 0.20999909973144532, 0.20977052307128907, 0.20832470703125, 0.20869389343261718, 0.21062825012207032, 0.20995686340332032, 0.2084928894042969, 0.20960797119140626, 0.20987705993652345, 0.21026882934570312, 0.21021900939941407, 0.20895126342773437, 0.2096742706298828, 0.2089697265625, 0.21083955383300781, 0.2104647674560547, 0.21038890075683594, 0.20977877807617187, 0.20936825561523437, 0.21081376647949218, 0.21049455261230468, 0.21014620971679687, 0.20924826049804687, 0.21010841369628908, 0.2109890594482422, 0.2176201629638672, 0.20707804870605467, 0.20752345275878906, 0.20691378784179687, 0.21245571899414062, 0.20803701782226564, 0.20783299255371093, 0.20686505126953125, 0.20817955017089843, 0.21136575317382814, 0.2081295928955078, 0.2076227264404297, 0.20766085815429688, 0.20915618896484375, 0.21039305114746093, 0.20801292419433592, 0.20784751892089845, 0.20825949096679688, 0.20996893310546874, 0.20910922241210939, 0.2085531768798828, 0.20791325378417969, 0.20956553649902343, 0.21089347839355468, 0.20885264587402344, 0.20842941284179686, 0.20813618469238282, 0.20969786071777344, 0.2108419189453125, 0.20934515380859375, 0.20827751159667968, 0.20866412353515626, 0.21085433959960936, 0.20906393432617187, 0.20986880493164062, 0.20836093139648437, 0.20869984436035155, 0.20985455322265625, 0.210064697265625, 0.21000262451171875, 0.20957318115234375, 0.20873081970214843, 0.21047091674804688, 0.2099875793457031, 0.20990156555175782, 0.20895904541015625, 0.20880348205566407, 0.21109613037109376, 0.2104189453125, 0.20890313720703124, 0.2100387878417969, 0.20916838073730468, 0.21040333557128907, 0.21023907470703124, 0.209093017578125, 0.21042381286621092, 0.2103336944580078, 0.21065245056152343, 0.20947561645507812, 0.21051609802246093, 0.20934681701660157, 0.21054290771484374, 0.2106060791015625, 0.2208466491699219, 0.2075852813720703, 0.20704981994628907, 0.20728311157226562, 0.21228070068359375, 0.20874620056152343, 0.2074407958984375, 0.20738458251953126, 0.20842906188964844, 0.21107066345214845, 0.20795616149902343, 0.2074993896484375, 0.2075352020263672, 0.20992501831054688, 0.20989085388183593, 0.20777587890625, 0.20790512084960938, 0.2097111053466797, 0.21059584045410157, 0.20871315002441407, 0.20782713317871093, 0.20943705749511718, 0.20960400390625, 0.20985098266601562, 0.2081135711669922, 0.2091111297607422, 0.20937318420410156, 0.20957321166992188, 0.20932630920410156, 0.20799533081054689, 0.20952621459960938, 0.20973829650878906, 0.20943463134765625, 0.20956953430175781, 0.2086709747314453, 0.2101898193359375, 0.20991845703125, 0.21010841369628908, 0.20964352416992188, 0.20845106506347658, 0.21017167663574218, 0.20917117309570313, 0.2100674591064453, 0.21035621643066407, 0.20959027099609376, 0.21010809326171875, 0.21021299743652344, 0.21010444641113282, 0.209970458984375, 0.2098368377685547, 0.20979273986816407, 0.21006716918945312, 0.20944134521484375, 0.2105927734375, 0.21083033752441407, 0.20991795349121095, 0.20925030517578125, 0.21051528930664062, 0.21073574829101563, 0.21007337951660157, 0.2103585205078125, 0.20921548461914063, 0.21123481750488282, 0.2192025909423828, 0.20703330993652344, 0.20740505981445312, 0.20775935363769532, 0.21167301940917968, 0.20952012634277345, 0.20759779357910157, 0.20769212341308593, 0.20831581115722655, 0.21063740539550782, 0.20880496215820313, 0.2084397735595703, 0.20804850769042968, 0.2090369873046875, 0.21026853942871093, 0.20894105529785156, 0.20938137817382813, 0.20771836853027345, 0.20943670654296875, 0.20984217834472657, 0.20856124877929688, 0.2097954559326172, 0.20812156677246094, 0.20954547119140626, 0.2095928955078125, 0.209669189453125, 0.2088375701904297, 0.20896893310546874, 0.20927976989746094, 0.20989132690429688, 0.20901434326171875, 0.20897836303710937, 0.20985856628417968, 0.20967945861816406, 0.20979362487792969, 0.20891644287109376, 0.20952919006347656, 0.20957798767089844, 0.210444091796875, 0.2099570617675781, 0.20972543334960939, 0.209842041015625, 0.20992153930664062, 0.2101029052734375, 0.20990077209472657, 0.20897053527832032, 0.21037461853027345, 0.21030096435546874, 0.21040127563476563, 0.2088775634765625, 0.21051116943359374, 0.2105125732421875, 0.210229248046875, 0.21016348266601562, 0.20920860290527343, 0.21055789184570312, 0.21055859375, 0.21047030639648437, 0.210525146484375, 0.21004698181152343, 0.21110578918457032, 0.21068800354003905, 0.2114559326171875, 0.21780224609375, 0.20688890075683594, 0.20807449340820314, 0.2070700225830078, 0.21354495239257812, 0.208826171875, 0.2075415344238281, 0.2074039306640625, 0.20882022094726563, 0.21211117553710937, 0.20798892211914063, 0.2076399688720703, 0.20891885375976563, 0.2104998779296875, 0.20959231567382813, 0.2077605438232422, 0.20983689880371093, 0.20919705200195313, 0.2093690948486328, 0.20976815795898437, 0.20813238525390626, 0.20889190673828126, 0.21000579833984376, 0.20942601013183593, 0.20953152465820313, 0.2087767333984375, 0.209650146484375, 0.20885093688964843, 0.2096558074951172, 0.20831027221679688, 0.2097418212890625, 0.209677978515625, 0.20974012756347657, 0.21007974243164063, 0.2090987548828125, 0.20983807373046875, 0.20879965209960938, 0.2101510772705078, 0.209988037109375, 0.2094198760986328, 0.20915625, 0.21024761962890626, 0.21018841552734374, 0.21010861206054687, 0.20993023681640624, 0.2096680908203125, 0.20942633056640625, 0.21041571044921875, 0.2101964874267578, 0.21042976379394532, 0.2096007080078125, 0.21001609802246093, 0.21000413513183594, 0.21018342590332031, 0.21025459289550782, 0.21036405944824219, 0.21038307189941408, 0.20946710205078126, 0.21140316772460938, 0.210440185546875, 0.20991384887695314, 0.21209837341308593, 0.20933088684082032, 0.22078703308105468, 0.20699385070800783, 0.20729580688476562, 0.20723753356933594, 0.21414268493652344, 0.2081736297607422, 0.207451904296875, 0.20750157165527344, 0.20857856750488282, 0.21251193237304689, 0.20847084045410155, 0.20753202819824218, 0.20760569763183595, 0.21015895080566407, 0.21186428833007812, 0.20810096740722656, 0.20774954223632813, 0.2081464385986328, 0.21153382873535156, 0.20944281005859375, 0.2089246368408203, 0.2082570495605469, 0.20911865234375, 0.21123333740234376, 0.20935270690917968, 0.20837376403808594, 0.20834713745117187, 0.20985186767578126, 0.21141526794433593, 0.20977696228027343, 0.20852735900878908, 0.20849459838867188, 0.2104722900390625, 0.21073373413085938, 0.20976844787597657, 0.20880172729492188, 0.20865206909179687, 0.21032537841796875, 0.21095797729492188, 0.21020748901367187, 0.20848371887207032, 0.21002099609375, 0.21065501403808592, 0.20958128356933595, 0.21049856567382813, 0.2088096923828125, 0.21035586547851562, 0.21055526733398439, 0.21024998474121093, 0.21038233947753907, 0.20886373901367186, 0.21040882873535155, 0.21088021850585936, 0.21052713012695312, 0.20949197387695312, 0.2101903076171875, 0.21020669555664062, 0.21038211059570314, 0.21177139282226562, 0.2097642822265625, 0.21018710327148438, 0.2101002197265625, 0.21107096862792968, 0.21801632690429687, 0.2070928955078125, 0.2087100524902344, 0.20741693115234375, 0.21176332092285155, 0.2090864715576172, 0.20772732543945313, 0.2080147247314453, 0.20893145751953124, 0.21070643615722656, 0.20763363647460936, 0.20955186462402345, 0.2080545654296875, 0.2095820770263672, 0.20914735412597657, 0.20962310791015626, 0.20939208984375, 0.2080809020996094, 0.20944073486328124, 0.20947970581054687, 0.20947305297851562, 0.20844090270996093, 0.20939430236816406, 0.20923011779785156, 0.20945021057128907, 0.21050242614746092, 0.2084515838623047, 0.21069619750976562, 0.20914166259765626, 0.20984841918945313, 0.2089390106201172, 0.21002035522460938, 0.20953248596191407, 0.2099136962890625, 0.2101719970703125, 0.20916390991210937, 0.21005538940429688, 0.20942095947265624, 0.2099773406982422, 0.21032537841796875, 0.20992169189453125, 0.2105266876220703, 0.21011622619628906, 0.20919743347167968, 0.21038694763183594, 0.21017164611816405, 0.21051593017578124, 0.21028233337402344, 0.2104668731689453, 0.2094697265625, 0.21046864318847655, 0.21035653686523437, 0.21023948669433593, 0.2094441223144531, 0.21029132080078125, 0.21128311157226562, 0.21026483154296874, 0.21049497985839843, 0.2099120330810547, 0.21039561462402342, 0.211019775390625, 0.21037820434570312, 0.21019497680664062, 0.2189001922607422, 0.20742373657226562, 0.20708828735351562, 0.20701799011230468, 0.2130017852783203, 0.21096479797363282, 0.2077159423828125, 0.2076056365966797, 0.2081348114013672, 0.21180621337890626, 0.2092236785888672, 0.20884684753417967, 0.2080809020996094, 0.20856422424316406, 0.21088050842285155, 0.20937522888183593, 0.20916348266601562, 0.2078236541748047, 0.20955722045898437, 0.21011894226074218, 0.21023539733886717, 0.20927693176269532, 0.2083000030517578, 0.20950019836425782, 0.20980735778808593, 0.21052815246582032, 0.2094892120361328, 0.20827215576171876, 0.20975616455078125, 0.209623046875, 0.2108415985107422, 0.208658203125, 0.2096212158203125, 0.20987699890136718, 0.2099261474609375, 0.20989439392089843, 0.20905433654785155, 0.20999615478515626, 0.2099814453125, 0.21148179626464844, 0.2094927978515625, 0.20998051452636718, 0.209546142578125, 0.2104683837890625, 0.21056338500976562, 0.20980288696289062, 0.2103260498046875, 0.20950743103027344, 0.21076060485839843, 0.209588134765625, 0.2103153533935547, 0.20989517211914063, 0.20999807739257811, 0.21011660766601561, 0.2105384979248047, 0.21002035522460938, 0.21004083251953126, 0.21054669189453126, 0.21065318298339844, 0.21033544921875, 0.21014070129394533, 0.21022181701660156, 0.21059718322753906, 0.2191482849121094, 0.20723507690429688, 0.20716339111328125, 0.20746566772460937, 0.21323858642578125, 0.20990876770019531, 0.20743017578125, 0.2075072021484375, 0.20882403564453125, 0.21250732421875, 0.21019471740722656, 0.20764466857910155, 0.20762973022460937, 0.20972604370117187, 0.21154147338867188, 0.20930201721191405, 0.20876007080078124, 0.20745706176757814, 0.2095076446533203, 0.21100204467773437, 0.2093360595703125, 0.20832630920410156, 0.20873481750488282, 0.21004287719726564, 0.21090713500976563, 0.20836256408691406, 0.20811836242675782, 0.20976626586914063, 0.21049392700195313, 0.2098544616699219, 0.2085314178466797, 0.20937461853027345, 0.2100411834716797, 0.21001161193847656, 0.2106785888671875, 0.208648193359375, 0.20970700073242188, 0.20948748779296875, 0.2106044158935547, 0.20931788635253906, 0.20890623474121095, 0.21004698181152343, 0.21031295776367187, 0.21022895812988282, 0.2103076171875, 0.2088837127685547, 0.21010841369628908, 0.20957183837890625, 0.21135554504394533, 0.21017610168457032, 0.2103582763671875, 0.20989453125, 0.2108070068359375, 0.21065968322753906, 0.21005958557128906, 0.21041357421875, 0.21036982727050782, 0.21094883728027344, 0.2107658233642578, 0.2103951416015625, 0.210695556640625, 0.2106886444091797, 0.21010543823242187]",tokens/s,4.770992843847924,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,1131.311104,641.59744,0.0,239.075328,216.531968,s,1,7.90785205078125,7.90785205078125,0.0,7.90785205078125,7.90785205078125,7.90785205078125,7.90785205078125,[7.90785205078125],,kWh,2.5854647087423165e-05,2.844760383336759e-06,8.606951330022983e-06,3.7306358800782906e-05,,MB,1384.824832,727.580672,0.0,304.08704,261.88032,s,10,0.39923558044433594,0.039923558044433594,0.0002066376555631466,0.03988408088684082,0.0401605110168457,0.04028017616271973,0.04037590827941894,"[0.04001327896118164, 0.03991356658935547, 0.039714366912841796, 0.04013391876220703, 0.039636863708496096, 0.039839134216308594, 0.03987804794311523, 0.03981644821166992, 0.039890113830566405, 0.04039984130859375]",tokens/s,6412.254131134317,kWh,1.1710729538666783e-06,1.2914845915012167e-07,4.6577703928796234e-07,1.7659984523047625e-06,tokens/kWh,144960489.44204935,MB,1424.023552,740.163584,0.0,316.669952,261.88288,s,10,24.362640869140627,2.4362640869140626,0.012526122207036368,2.4334583740234375,2.4521180175781248,2.453315966796875,2.454274326171875,"[2.444163818359375, 2.454513916015625, 2.451851806640625, 2.4275537109375, 2.4133427734375, 2.4317978515625, 2.435118896484375, 2.4251533203125, 2.448593017578125, 2.4305517578125]",tokens/s,25.859265560902337,kWh,7.091544599695795e-05,7.821822584273507e-06,2.443824288391183e-05,0.00010317551146514328,tokens/kWh,610610.0091520638,,s,630,24.357482807159418,0.038662671122475276,0.0008206524206509637,0.0385128002166748,0.03911991653442383,0.039646068000793455,0.0428452198791504,"[0.038739456176757815, 0.039903873443603514, 0.03859436798095703, 0.038482273101806644, 0.03861388778686523, 0.03862003326416016, 0.03853529739379883, 0.03840726470947266, 0.03869513702392578, 0.038617950439453125, 0.038503265380859374, 0.03844800186157227, 0.03838700866699219, 0.03839078521728516, 0.03844678497314453, 0.03853647994995117, 0.038570335388183594, 0.038472030639648436, 0.03838569641113281, 0.0385392951965332, 0.038457313537597654, 0.038423969268798826, 0.03902096176147461, 0.03873155212402344, 0.03885728073120117, 0.03895382308959961, 0.0389378890991211, 0.03924115371704102, 0.03886428833007813, 0.03880025482177735, 0.03843692779541016, 0.038569759368896485, 0.03865580749511719, 0.0385516471862793, 0.039016063690185544, 0.03873651123046875, 0.038774112701416015, 0.03884099197387696, 0.03844486236572266, 0.0384964485168457, 0.038830078125, 0.03853311920166016, 0.03863555145263672, 0.03859775924682617, 0.03870137786865235, 0.03882191848754883, 0.038489601135253904, 0.03911948776245117, 0.038652286529541016, 0.038798431396484374, 0.0387960319519043, 0.038475711822509764, 0.04033148956298828, 0.040738815307617186, 0.04033740615844727, 0.039485088348388674, 0.03902908706665039, 0.03929008102416992, 0.03897433471679688, 0.03864771270751953, 0.03858371353149414, 0.03853372955322266, 0.038577438354492184, 0.03815673446655273, 0.03841254425048828, 0.03847577667236328, 0.03841843032836914, 0.03855974578857422, 0.03859203338623047, 0.03856947326660156, 0.03867951965332031, 0.040409088134765625, 0.039658782958984375, 0.03892092895507813, 0.03877273559570313, 0.03861689758300781, 0.038628639221191405, 0.039432830810546875, 0.03898396682739258, 0.03972480010986328, 0.0393177604675293, 0.038877185821533204, 0.03860230255126953, 0.038556095123291015, 0.038397918701171874, 0.0390513916015625, 0.038618816375732425, 0.038670558929443356, 0.038735870361328126, 0.03864547348022461, 0.0388623046875, 0.03914332962036133, 0.039028800964355466, 0.03934294509887695, 0.03907145690917969, 0.03909609603881836, 0.0391253776550293, 0.03960335922241211, 0.03936793518066406, 0.0392209587097168, 0.03927983856201172, 0.040026046752929687, 0.03922415924072266, 0.039565601348876954, 0.03914723205566406, 0.039153694152832035, 0.03902051162719727, 0.039040897369384764, 0.0390340461730957, 0.03887235260009766, 0.042954399108886716, 0.03905324935913086, 0.03898374557495117, 0.0390412483215332, 0.03886700820922852, 0.038688480377197264, 0.038653953552246094, 0.03831587219238281, 0.038287487030029294, 0.03826214218139649, 0.03827369689941406, 0.03830374526977539, 0.038338558197021484, 0.038735870361328126, 0.038268543243408205, 0.03828364944458008, 0.03795171356201172, 0.038391681671142576, 0.03847446441650391, 0.03885055923461914, 0.03854867172241211, 0.03836409759521484, 0.03830595016479492, 0.03835609436035156, 0.03822963333129883, 0.03817340850830078, 0.038321857452392576, 0.038103614807128906, 0.03876582336425781, 0.03815494537353516, 0.0379986572265625, 0.04257791900634766, 0.03838566589355469, 0.03836697769165039, 0.03867782211303711, 0.03808975982666016, 0.03817792129516601, 0.03815913772583008, 0.038084735870361326, 0.03800396728515625, 0.03808755111694336, 0.037975425720214846, 0.03810294342041016, 0.03844521713256836, 0.03984620666503906, 0.04135683059692383, 0.038709152221679685, 0.03840671920776367, 0.038029312133789066, 0.03814128112792969, 0.038154209136962894, 0.03837216186523437, 0.03845430374145508, 0.03839267349243164, 0.03809084701538086, 0.038150047302246096, 0.038131103515625, 0.03812822341918945, 0.038157726287841795, 0.0383535041809082, 0.03830313491821289, 0.038104736328125, 0.03814905548095703, 0.04390092849731445, 0.04306512069702149, 0.04395030212402344, 0.04545945739746094, 0.038481216430664066, 0.03886150360107422, 0.03892838287353516, 0.04023007965087891, 0.03916883087158203, 0.03907379150390625, 0.0389857292175293, 0.03885670471191406, 0.038780193328857425, 0.03887305450439453, 0.03862774276733399, 0.038523231506347656, 0.03836438369750977, 0.03851059341430664, 0.03855324935913086, 0.03853948974609375, 0.038825759887695314, 0.03883612823486328, 0.03877199935913086, 0.03902137756347656, 0.038560958862304685, 0.03843695831298828, 0.038494945526123044, 0.03827916717529297, 0.03830364990234375, 0.040263870239257815, 0.03830579376220703, 0.038682849884033206, 0.038708927154541016, 0.0389607048034668, 0.03851830291748047, 0.038402976989746096, 0.038340831756591795, 0.038280990600585936, 0.038604801177978515, 0.03865190505981445, 0.03868467330932617, 0.0384554557800293, 0.03841011047363281, 0.03829756927490234, 0.03822963333129883, 0.03851945495605469, 0.03862204742431641, 0.03870172882080078, 0.03823571014404297, 0.03847849655151367, 0.03856972885131836, 0.0384637451171875, 0.03814179229736328, 0.038225406646728514, 0.03824092864990234, 0.03887529754638672, 0.03860464096069336, 0.038338558197021484, 0.03899801635742187, 0.03870230484008789, 0.038320030212402344, 0.03837427139282226, 0.038449153900146485, 0.038811649322509766, 0.039139328002929685, 0.038830078125, 0.03901235198974609, 0.03896115112304688, 0.038801311492919925, 0.03850841522216797, 0.038222335815429685, 0.03805974578857422, 0.03826483154296875, 0.03811942291259766, 0.03799363327026367, 0.0379769287109375, 0.03793100738525391, 0.038225921630859375, 0.03804966354370117, 0.03784483337402344, 0.03808256149291992, 0.03801702499389648, 0.03833657455444336, 0.037924766540527344, 0.03867267227172851, 0.0382033920288086, 0.038160255432128906, 0.03833433532714844, 0.03845529556274414, 0.03890956878662109, 0.03834000015258789, 0.03832112121582031, 0.03855062484741211, 0.03890214538574219, 0.038095390319824216, 0.03811337661743164, 0.03801692962646484, 0.038076416015625, 0.03808051300048828, 0.038145313262939455, 0.03791299057006836, 0.03801744079589844, 0.03795347213745117, 0.03799964904785156, 0.03791584014892578, 0.0379318733215332, 0.03812774276733399, 0.03797481536865235, 0.038516670227050784, 0.03829180908203125, 0.03853388977050781, 0.038017185211181644, 0.03819724655151367, 0.03845817565917969, 0.03820127868652344, 0.03808406448364258, 0.03811164855957031, 0.038075935363769534, 0.03809743881225586, 0.03789340972900391, 0.037951614379882814, 0.03801318359375, 0.03797568130493164, 0.03813689422607422, 0.03819868850708008, 0.03803567886352539, 0.03797817611694336, 0.03791209411621094, 0.03788447952270508, 0.03796908950805664, 0.038179649353027346, 0.03795558547973633, 0.037902687072753904, 0.03811651229858398, 0.04082124710083008, 0.04026383972167969, 0.04108828735351563, 0.03846815872192383, 0.03851475143432617, 0.03876617431640625, 0.03853887939453125, 0.03829423904418945, 0.038008289337158205, 0.03810793685913086, 0.03797532653808594, 0.03818134307861328, 0.03818412780761719, 0.0380560302734375, 0.03810140609741211, 0.0382503662109375, 0.03817071914672852, 0.03815683364868164, 0.038074176788330076, 0.03821283340454101, 0.03875241470336914, 0.03877164840698242, 0.03963052749633789, 0.03899788665771484, 0.039196800231933594, 0.03920896148681641, 0.03915529632568359, 0.03901897430419922, 0.038748096466064454, 0.0385761604309082, 0.03857814407348633, 0.0387625617980957, 0.038736129760742186, 0.038534847259521485, 0.038397857666015625, 0.03849593734741211, 0.03837174224853516, 0.038250495910644534, 0.038311038970947266, 0.038221057891845704, 0.03831145477294922, 0.038864990234375, 0.038297409057617186, 0.038470142364501955, 0.03840377426147461, 0.038301025390625, 0.03823494338989258, 0.03834864044189453, 0.03823593521118164, 0.03826723098754883, 0.038274944305419924, 0.03955129623413086, 0.03836076736450195, 0.03857561492919922, 0.03838140869140625, 0.038398303985595704, 0.03859519958496094, 0.038502079010009765, 0.042493728637695315, 0.039395198822021485, 0.038927967071533204, 0.03881856155395508, 0.03873548889160156, 0.03857392120361328, 0.03864038467407226, 0.03850175857543945, 0.03851731109619141, 0.0385513916015625, 0.03839385604858398, 0.03860070419311523, 0.038563838958740236, 0.03806412887573242, 0.038267807006835936, 0.03823820877075195, 0.0382402229309082, 0.03843267059326172, 0.038406272888183594, 0.0385167350769043, 0.03859356689453125, 0.03883958435058594, 0.03844899368286133, 0.038440799713134764, 0.0382704963684082, 0.03836937713623047, 0.03904550552368164, 0.03873168182373047, 0.03860899353027344, 0.03848732757568359, 0.03817987060546875, 0.03984147262573242, 0.04687855911254883, 0.03865411376953125, 0.038356990814208985, 0.038388961791992186, 0.03818985748291016, 0.03836131286621094, 0.038382400512695314, 0.038527263641357425, 0.03837420654296875, 0.03843878555297851, 0.03827507019042969, 0.038608001708984374, 0.038373504638671875, 0.03913606262207031, 0.03850643157958984, 0.03835433578491211, 0.038653823852539064, 0.03863552093505859, 0.03844137573242187, 0.03899763107299805, 0.038523582458496096, 0.03856304168701172, 0.038357120513916015, 0.038136287689208986, 0.03837174224853516, 0.03808643341064453, 0.03820054244995117, 0.038589214324951174, 0.03828742218017578, 0.03843679809570313, 0.038678081512451175, 0.038680065155029295, 0.038661312103271485, 0.03850009536743164, 0.0385445442199707, 0.03817967987060547, 0.03873401641845703, 0.0382213134765625, 0.038286945343017575, 0.03913187026977539, 0.03872153472900391, 0.03901472091674805, 0.038575809478759764, 0.038543041229248044, 0.03849203109741211, 0.0389222412109375, 0.03860310363769531, 0.03858163070678711, 0.03857196807861328, 0.038755935668945314, 0.038510494232177735, 0.03836656188964844, 0.03842595291137695, 0.03986175918579102, 0.0382957763671875, 0.0383579216003418, 0.038250144958496095, 0.03826892852783203, 0.03826483154296875, 0.03824588775634766, 0.038335296630859376, 0.038954689025878904, 0.03843932723999023, 0.038144832611083986, 0.038484512329101564, 0.038545665740966795, 0.03854537582397461, 0.038344158172607425, 0.03822252655029297, 0.03803916931152344, 0.03815039825439453, 0.03804159927368164, 0.03805388641357422, 0.038225406646728514, 0.038226016998291014, 0.03814236831665039, 0.03827507019042969, 0.03824755096435547, 0.03830444717407227, 0.03853740692138672, 0.038209537506103515, 0.03824009704589844, 0.0381605453491211, 0.038150272369384765, 0.03841948699951172, 0.03830217742919922, 0.03845568084716797, 0.03861814498901367, 0.03862204742431641, 0.03865798568725586, 0.03919177627563476, 0.039123775482177735, 0.03902470397949219, 0.03900191879272461, 0.03896758270263672, 0.038723072052001956, 0.038746814727783206, 0.038475582122802734, 0.03851295852661133, 0.03854086303710937, 0.03855168151855469, 0.038340606689453126, 0.03830611038208008, 0.03835257720947265, 0.038852127075195315, 0.03855804824829102, 0.03848566436767578, 0.038407455444335936, 0.038916831970214845, 0.03877609634399414, 0.038536159515380856, 0.03850009536743164, 0.03850876617431641, 0.03865993499755859, 0.03829948806762695, 0.03862511825561524, 0.03892364883422852, 0.03872857666015625, 0.03889561462402344, 0.038778881072998046, 0.03866419219970703, 0.03912483215332031, 0.03897999954223633, 0.038778846740722656, 0.03873155212402344, 0.03882614517211914, 0.03877840042114258, 0.03871401596069336, 0.0385338249206543, 0.0387567024230957, 0.038495903015136716, 0.0385010871887207, 0.03838796615600586, 0.038592353820800784, 0.038465854644775394, 0.03852272033691406, 0.038400222778320316, 0.03826393508911133, 0.03831875228881836, 0.03874771118164062, 0.03877920150756836, 0.038555774688720706, 0.03893657684326172, 0.04143939208984375, 0.04428374481201172, 0.03851264190673828, 0.03837747192382813, 0.03853478240966797, 0.038370975494384764, 0.038685409545898435, 0.038836223602294925, 0.038641342163085936, 0.038873409271240236, 0.03857766342163086, 0.03833926391601562, 0.03848495864868164, 0.038204254150390624, 0.03821763229370117, 0.03846358489990234, 0.03831123352050781, 0.038291934967041016, 0.03827878570556641, 0.03822563171386719, 0.038452095031738284, 0.04143513488769531, 0.04019404983520508, 0.04142192077636719, 0.03944131088256836, 0.03874816131591797, 0.03894262313842774, 0.04061478424072266, 0.038714271545410156, 0.038755199432373044, 0.0384571533203125, 0.038338016510009766, 0.03821769714355469, 0.038075199127197264, 0.038384639739990234, 0.03887814331054688, 0.03854892730712891, 0.038539840698242185, 0.03836537551879883, 0.03852799987792969, 0.039007038116455076, 0.03864761734008789, 0.038766273498535155, 0.0384087028503418, 0.038365184783935545, 0.038160385131835936, 0.03832387161254883, 0.03860886383056641, 0.03885833740234375, 0.03830255889892578, 0.03854431915283203, 0.038507518768310545, 0.03871763229370117, 0.0386223030090332, 0.0384205436706543, 0.03880940628051758, 0.03874044799804687, 0.03879359817504883, 0.038653472900390624, 0.038496734619140625, 0.03839923095703125, 0.03833932876586914, 0.0393702392578125, 0.03859404754638672, 0.0385417594909668, 0.03874259185791016, 0.03861865615844726, 0.038646240234375, 0.03851433563232422, 0.03834492874145508, 0.03828559875488281, 0.038546272277832035, 0.03852185440063476, 0.03844710540771484, 0.03824367904663086, 0.03818153762817383, 0.03818121719360352, 0.03832387161254883, 0.03831916809082031, 0.03902560043334961, 0.0397591667175293, 0.03876748657226563, 0.03836703872680664, 0.03834854507446289, 0.03829967880249024, 0.03824176025390625, 0.038288127899169924, 0.03828841781616211, 0.03873676681518555, 0.03857440185546875]",tokens/s,25.864741647885854,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,3999.256576,2159.935488,0.0,1757.413376,1736.37632,s,1,12.34451171875,12.34451171875,0.0,12.34451171875,12.34451171875,12.34451171875,12.34451171875,[12.34451171875],,kWh,0.0001510086329791496,1.6650117616084696e-05,5.066559608796206e-05,0.00021832434668319636,,MB,4010.930176,2396.913664,0.0,1973.420032,1922.784256,s,10,0.635944606781006,0.0635944606781006,0.000489503032996619,0.06343688011169434,0.06431206130981446,0.06449534416198731,0.06464197044372559,"[0.06427133178710938, 0.0636992301940918, 0.06467862701416016, 0.06338985443115235, 0.0632558708190918, 0.06329219055175782, 0.06312783813476562, 0.06348390579223633, 0.06307660675048828, 0.06366915130615235]",tokens/s,4025.507839366837,kWh,1.869804295355628e-06,2.0620605183419006e-07,1.0463295702547915e-06,3.1223399174446097e-06,tokens/kWh,81989791.87682933,MB,4015.18592,2419.982336,0.0,1994.391552,1971.314176,s,10,38.49758618164062,3.849758618164062,0.02059471986511405,3.8483531494140624,3.8584692382812498,3.882311767578125,3.901385791015625,"[3.906154296875, 3.84786669921875, 3.84954638671875, 3.848839599609375, 3.83255810546875, 3.836901611328125, 3.826186767578125, 3.84478466796875, 3.8531708984375, 3.8515771484375]",tokens/s,16.364662372012432,kWh,0.00011238219770631013,1.2395830346361402e-05,4.427612335434901e-05,0.00016905415140702052,tokens/kWh,372661.6558993518,,s,630,38.49495606613161,0.06110310486687554,0.0007689263528941237,0.060869598388671876,0.061860659408569336,0.06249792366027832,0.0643200360107422,"[0.06262579345703125, 0.062263294219970705, 0.06207056045532226, 0.062212318420410154, 0.06203744125366211, 0.06193619155883789, 0.06164684677124024, 0.06184796905517578, 0.06202483367919922, 0.06196176147460938, 0.06173382568359375, 0.0619415054321289, 0.06215705490112305, 0.061824222564697266, 0.06136624145507812, 0.06142649459838867, 0.06183331298828125, 0.06234463882446289, 0.0616756477355957, 0.061462432861328124, 0.06128684616088867, 0.06134579086303711, 0.06127548980712891, 0.061344417572021484, 0.06142156982421875, 0.06180249786376953, 0.06187212753295898, 0.061703807830810545, 0.06180847930908203, 0.06238665771484375, 0.06180051040649414, 0.06169760131835938, 0.0625074577331543, 0.06308249664306641, 0.06359040069580078, 0.0641553955078125, 0.0643013153076172, 0.06216294479370117, 0.061859840393066405, 0.0619417610168457, 0.062072608947753904, 0.06271817779541015, 0.062199199676513675, 0.06218812942504883, 0.06285516738891601, 0.06402047729492187, 0.06465945434570312, 0.06223238372802734, 0.0619984016418457, 0.06265119934082031, 0.06196758270263672, 0.06169071960449219, 0.06128550338745117, 0.060892032623291015, 0.06091775894165039, 0.06078179168701172, 0.06133225631713867, 0.06135958480834961, 0.06151190567016602, 0.06095084762573242, 0.06133350372314453, 0.061077503204345705, 0.061472766876220705, 0.06065663909912109, 0.061268577575683596, 0.061387168884277345, 0.060821502685546876, 0.060585311889648434, 0.060695201873779296, 0.06042569732666016, 0.061333633422851565, 0.06078096008300781, 0.06066790390014649, 0.06062870407104492, 0.06073168182373047, 0.06062646484375, 0.06109846496582031, 0.06103244781494141, 0.06104064178466797, 0.060745502471923826, 0.06126726531982422, 0.06285200119018555, 0.061224735260009766, 0.06088236618041992, 0.060848926544189455, 0.06076374435424805, 0.06107382583618164, 0.060780544281005856, 0.06104998397827149, 0.060865055084228514, 0.061071712493896486, 0.0613043212890625, 0.06103910446166992, 0.06098739242553711, 0.06076006317138672, 0.06096281433105469, 0.060833728790283204, 0.06112201690673828, 0.06086307144165039, 0.060846080780029295, 0.060829120635986327, 0.06060294342041016, 0.06056755065917969, 0.06071705627441406, 0.060837886810302735, 0.061009918212890625, 0.060715007781982425, 0.06092595291137695, 0.061020160675048826, 0.06101308822631836, 0.06369168090820312, 0.0608554573059082, 0.06076092910766601, 0.06072835159301758, 0.061535198211669924, 0.06098944091796875, 0.061028350830078126, 0.061192192077636716, 0.06124544143676758, 0.06114831924438477, 0.06099849700927734, 0.061318656921386716, 0.06110464096069336, 0.061472640991210935, 0.06359872055053711, 0.06166732788085937, 0.061692638397216795, 0.06189897537231445, 0.06185062408447266, 0.0621363525390625, 0.061720577239990235, 0.061532512664794925, 0.06095935821533203, 0.06087676620483398, 0.06082089614868164, 0.061136863708496095, 0.061061790466308594, 0.060834945678710936, 0.060902271270751956, 0.06105878448486328, 0.06084431838989258, 0.0607531852722168, 0.060812000274658204, 0.060887039184570314, 0.060781665802001957, 0.061084255218505856, 0.06293468856811524, 0.06130691146850586, 0.06133343887329101, 0.060862945556640624, 0.06070399856567383, 0.06079792022705078, 0.06118352127075195, 0.0610305290222168, 0.06119974517822266, 0.06110512161254883, 0.06123484802246094, 0.061208927154541015, 0.06163043212890625, 0.06129462432861328, 0.060921855926513675, 0.060972606658935544, 0.062486270904541015, 0.06160044860839844, 0.06143590545654297, 0.060624065399169924, 0.06053136062622071, 0.06117606353759766, 0.06095657730102539, 0.06077439880371094, 0.060641281127929686, 0.06070207977294922, 0.06067468643188476, 0.06185574340820312, 0.06053068923950195, 0.060706817626953125, 0.06039104080200195, 0.06043049621582031, 0.060838111877441405, 0.06159360122680664, 0.061126655578613284, 0.06061203384399414, 0.060691265106201174, 0.06073523330688477, 0.06084716796875, 0.061206687927246095, 0.06104963302612305, 0.06087475204467773, 0.06085017776489258, 0.061520511627197266, 0.06161734390258789, 0.06075884628295898, 0.06064332962036133, 0.06064486312866211, 0.06110259246826172, 0.06145443344116211, 0.060900737762451175, 0.06067254257202148, 0.06070403289794922, 0.060779232025146485, 0.06112803268432617, 0.06077094268798828, 0.0608267822265625, 0.06165139389038086, 0.06103494262695312, 0.06460620880126954, 0.063395263671875, 0.06127465438842773, 0.061454238891601565, 0.06102006530761719, 0.06071113586425781, 0.060723201751708984, 0.061134849548339844, 0.06302105712890625, 0.06120816040039063, 0.06073795318603516, 0.060397022247314455, 0.06105539321899414, 0.06042227172851562, 0.06075801467895508, 0.060696479797363284, 0.06063523101806641, 0.06123724746704102, 0.06082559967041016, 0.060262401580810546, 0.06044672012329102, 0.0603873291015625, 0.06064905548095703, 0.06092572784423828, 0.06117388916015625, 0.060561153411865236, 0.060674110412597654, 0.06062764739990235, 0.06093414306640625, 0.06079891204833984, 0.06111033630371094, 0.06081740951538086, 0.06119830322265625, 0.060911487579345704, 0.0606328010559082, 0.06071955108642578, 0.060663806915283204, 0.06330678558349609, 0.061438945770263674, 0.06126387023925781, 0.06139862442016602, 0.06146403121948242, 0.06149359893798828, 0.060840545654296876, 0.060981216430664065, 0.060610305786132815, 0.060778785705566406, 0.06058844757080078, 0.06057984161376953, 0.061050880432128904, 0.06076416015625, 0.060478591918945314, 0.06034854507446289, 0.06035529708862305, 0.060416030883789065, 0.06095872116088867, 0.060982879638671876, 0.06095727920532226, 0.06070054244995117, 0.06065350341796875, 0.061048065185546875, 0.06086323165893555, 0.06091775894165039, 0.06074163055419922, 0.06094028854370117, 0.06068428802490235, 0.060782302856445314, 0.060717342376708984, 0.06048956680297852, 0.060604576110839845, 0.06065145492553711, 0.06066387176513672, 0.06101766586303711, 0.06067596817016602, 0.060807743072509766, 0.06072051239013672, 0.060533439636230466, 0.060493759155273434, 0.06061603164672852, 0.060569438934326175, 0.062395198822021485, 0.0643276824951172, 0.06173081588745117, 0.06177164840698242, 0.06083567810058594, 0.0605002555847168, 0.0606075210571289, 0.0606520004272461, 0.06068000030517578, 0.060633792877197265, 0.06037913513183594, 0.060854270935058595, 0.0603928337097168, 0.06099792098999023, 0.0607542724609375, 0.061919200897216795, 0.06080924987792969, 0.06090137481689453, 0.06045695877075195, 0.060440353393554684, 0.06032441711425781, 0.060259998321533205, 0.06024710464477539, 0.06151468658447266, 0.060288993835449216, 0.060864574432373045, 0.06047740936279297, 0.06076211166381836, 0.06109001541137695, 0.06106816101074219, 0.06073193740844727, 0.06074390411376953, 0.060826751708984376, 0.06073955154418945, 0.06111734390258789, 0.06083964920043945, 0.06067375946044922, 0.061669151306152345, 0.06082025527954102, 0.0604791374206543, 0.06070307159423828, 0.060819454193115234, 0.060657665252685546, 0.060624065399169924, 0.06100870513916016, 0.060731391906738284, 0.06086643218994141, 0.060989280700683594, 0.06066742324829102, 0.06114361572265625, 0.060684478759765625, 0.060849887847900394, 0.06049993515014648, 0.060768287658691404, 0.06037737655639648, 0.060439743041992185, 0.060519233703613284, 0.06069372940063476, 0.06059811019897461, 0.06048454284667969, 0.060432384490966794, 0.06114876937866211, 0.0605489616394043, 0.06070943832397461, 0.060801025390625, 0.06086860656738281, 0.06143385696411133, 0.06174720001220703, 0.06320742416381836, 0.06058598327636719, 0.0603955192565918, 0.06054502487182617, 0.06034431838989258, 0.0605464973449707, 0.06070534515380859, 0.06077849578857422, 0.06062044906616211, 0.060571998596191404, 0.06107955169677735, 0.06038256072998047, 0.06056003189086914, 0.060385280609130856, 0.0629505615234375, 0.06357487869262696, 0.06155446243286133, 0.06145455932617187, 0.06215270233154297, 0.060794174194335936, 0.060832447052001956, 0.0605159683227539, 0.06069631958007812, 0.06056819152832031, 0.06040371322631836, 0.061276641845703125, 0.060792896270751955, 0.060665855407714846, 0.060510208129882816, 0.06139494323730469, 0.06087059020996094, 0.0603583984375, 0.06040198516845703, 0.06038281631469727, 0.060273056030273435, 0.06015795135498047, 0.06035491180419922, 0.060233184814453125, 0.06054316711425781, 0.06070272064208984, 0.06066995239257812, 0.06042006301879883, 0.06075600051879883, 0.060774368286132814, 0.06049520111083984, 0.06050444793701172, 0.061425983428955076, 0.06077644729614258, 0.06049913787841797, 0.06065235137939453, 0.061982719421386716, 0.061868030548095705, 0.06080912017822265, 0.06040089416503906, 0.06056428909301758, 0.06050409698486328, 0.060641281127929686, 0.06043356704711914, 0.06074044799804688, 0.06069590377807617, 0.06129452896118164, 0.06104956817626953, 0.060733184814453126, 0.06100326538085937, 0.060701438903808594, 0.06044467163085938, 0.0603873291015625, 0.060482654571533206, 0.06040054321289062, 0.06132937622070313, 0.06067577743530273, 0.06093449783325195, 0.060767807006835935, 0.06071955108642578, 0.060660865783691405, 0.061059104919433595, 0.06066672134399414, 0.06117990493774414, 0.06093734359741211, 0.06213865661621094, 0.060617279052734375, 0.06046928024291992, 0.060726879119873046, 0.06044099044799805, 0.06036070251464844, 0.060507457733154295, 0.06027743911743164, 0.06044467163085938, 0.06033059310913086, 0.06043606567382812, 0.060578208923339844, 0.06058156967163086, 0.06053260803222656, 0.061123008728027346, 0.06707721710205078, 0.06541584014892578, 0.061332992553710934, 0.061233631134033205, 0.060808704376220706, 0.060832542419433595, 0.06077030563354492, 0.06053683090209961, 0.0603765754699707, 0.06083427047729492, 0.06108163070678711, 0.06054502487182617, 0.061140766143798826, 0.060829856872558596, 0.06059628677368164, 0.06060819244384766, 0.06132118225097656, 0.060628673553466794, 0.06101059341430664, 0.06180249786376953, 0.060628673553466794, 0.060684608459472655, 0.06177587127685547, 0.06076825714111328, 0.06082355117797852, 0.06110003280639648, 0.06126300811767578, 0.06096572875976562, 0.060946014404296874, 0.06129647827148438, 0.06082822418212891, 0.06097011184692383, 0.060736385345458985, 0.06188044738769531, 0.06117977523803711, 0.06090041732788086, 0.06075283050537109, 0.060723201751708984, 0.06056108856201172, 0.0605676155090332, 0.060539134979248045, 0.06062080001831055, 0.06076211166381836, 0.060903423309326174, 0.06070476913452148, 0.06087680053710937, 0.060698368072509765, 0.06069036865234375, 0.060823871612548826, 0.06138880157470703, 0.061192192077636716, 0.060780544281005856, 0.0607191047668457, 0.06068576049804687, 0.06051641464233398, 0.06050048065185547, 0.060432384490966794, 0.060434303283691405, 0.06143603134155273, 0.0611605453491211, 0.0611511344909668, 0.06180556869506836, 0.060671775817871094, 0.06058339309692383, 0.06081372833251953, 0.061499744415283206, 0.061499393463134766, 0.06089839935302734, 0.060668319702148435, 0.060512767791748044, 0.06068428802490235, 0.060526592254638675, 0.060589599609375, 0.06046739196777344, 0.06070915222167969, 0.060813312530517576, 0.0607088623046875, 0.061367774963378904, 0.060952129364013674, 0.06076646423339844, 0.06084272003173828, 0.0630063362121582, 0.06559337615966797, 0.06174345779418945, 0.06176563262939453, 0.06159088134765625, 0.06087747192382813, 0.06078428649902344, 0.061028705596923825, 0.06088470458984375, 0.060765857696533206, 0.061177696228027344, 0.06093904113769531, 0.06146047973632812, 0.061179615020751955, 0.06130307388305664, 0.061108257293701174, 0.06116758346557617, 0.061050880432128904, 0.06097510528564453, 0.06094028854370117, 0.06058803176879883, 0.06055263900756836, 0.06069670486450195, 0.0609031982421875, 0.060659774780273436, 0.06071551895141602, 0.06132851028442383, 0.06168431854248047, 0.06126432037353516, 0.06141126251220703, 0.06093961715698242, 0.060946399688720704, 0.0609200325012207, 0.06112303924560547, 0.06064358520507813, 0.06098694229125977, 0.06105014419555664, 0.06321990585327149, 0.06231916809082031, 0.060746112823486326, 0.06065468978881836, 0.060695457458496097, 0.060980480194091795, 0.06064409637451172, 0.06092351913452149, 0.060612991333007814, 0.06105497741699219, 0.06159462356567383, 0.061074432373046876, 0.061454334259033204, 0.060962047576904294, 0.06106556701660156, 0.061196704864501954, 0.06178201675415039, 0.06136422348022461, 0.06115737533569336, 0.060931327819824216, 0.060666336059570315, 0.06071852874755859, 0.060824352264404295, 0.06072326278686523, 0.060722240447998045, 0.06067500686645508, 0.06075596618652344, 0.0628961296081543, 0.06376979064941406, 0.061330238342285154, 0.06094438552856445, 0.060880897521972656, 0.06085804748535156, 0.06063302230834961, 0.061281951904296875, 0.060797664642333986, 0.06068428802490235, 0.06060348892211914, 0.061574047088623046, 0.06087680053710937, 0.06171020889282226, 0.06085337448120117, 0.06099027252197266, 0.06180268859863281, 0.06450790405273438, 0.06317670440673828, 0.06127171325683594, 0.06090118408203125, 0.061164031982421874, 0.06046239852905273, 0.06060924911499024, 0.06044876861572265, 0.06052864074707031, 0.060424190521240234, 0.06099257659912109, 0.060969921112060545, 0.06125126266479492, 0.0610302734375, 0.0608526725769043, 0.060534305572509765, 0.06182038497924805, 0.060369918823242184, 0.06128025436401367, 0.06126704025268555, 0.06097532653808594]",tokens/s,16.36578046530836,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,868.691968,561.905664,0.0,159.383552,142.313472,s,1,7.55446728515625,7.55446728515625,0.0,7.55446728515625,7.55446728515625,7.55446728515625,7.55446728515625,[7.55446728515625],,kWh,1.3997972304150608e-05,1.5364735901113956e-06,4.7244482240249575e-06,2.025889411828696e-05,,MB,1297.002496,637.403136,0.0,222.298112,185.324544,s,16,0.20267187118530272,0.01266699194908142,0.00020016616082920788,0.01259935998916626,0.012878623962402344,0.013049512147903442,0.01326603035926819,"[0.012487872123718262, 0.012600735664367676, 0.013320159912109376, 0.012797951698303223, 0.01260643196105957, 0.012574463844299317, 0.012674176216125488, 0.012556639671325684, 0.01261135959625244, 0.012608384132385254, 0.012959296226501466, 0.012566944122314454, 0.012540575981140136, 0.012585375785827637, 0.01258351993560791, 0.012597984313964843]",tokens/s,20210.00731894872,kWh,3.7514177336118203e-07,4.1371586861823924e-08,1.8705706195627435e-07,6.035704221792802e-07,tokens/kWh,424142719.04788536,MB,1335.865344,639.500288,0.0,224.395264,185.327104,s,16,9.83491278076172,0.6146820487976075,0.003068343657751304,0.6145205993652344,0.6170430297851563,0.6188123626708985,0.6229913787841798,"[0.615562255859375, 0.616036376953125, 0.6240361328125, 0.6170149536132813, 0.61330712890625, 0.6150927124023438, 0.6147727661132812, 0.6126171875, 0.6118268432617188, 0.6142684326171876, 0.6106754150390625, 0.6117315673828125, 0.6170711059570313, 0.6156666259765625, 0.612135009765625, 0.6130982666015625]",tokens/s,102.49201212762864,kWh,1.7994404238618125e-05,1.9844736561790826e-06,6.4115072446694796e-06,2.6390385139466688e-05,tokens/kWh,2387233.064885583,,s,1008,9.827555815696705,0.009749559340968965,0.0001662875261581923,0.00971731185913086,0.009828224182128906,0.009878991794586182,0.010434398727416987,"[0.009870240211486817, 0.009813568115234376, 0.009877951622009277, 0.00973209571838379, 0.009755711555480957, 0.00994921588897705, 0.009771072387695312, 0.009768992424011231, 0.009775839805603027, 0.009776960372924805, 0.009726240158081054, 0.009740192413330078, 0.009783391952514648, 0.009938336372375489, 0.00972809600830078, 0.00971225643157959, 0.009832608222961425, 0.009712448120117187, 0.00976364803314209, 0.009767135620117188, 0.009787263870239258, 0.009729920387268066, 0.009721983909606933, 0.010147680282592773, 0.009785280227661133, 0.009739968299865722, 0.009720352172851563, 0.00981164836883545, 0.009971936225891114, 0.009824352264404297, 0.009750528335571289, 0.009752639770507813, 0.0097576322555542, 0.009925632476806641, 0.009745759963989258, 0.00970803165435791, 0.009686495780944824, 0.009675647735595703, 0.009726048469543457, 0.009703136444091797, 0.009699328422546387, 0.009735424041748047, 0.009664992332458495, 0.009722111701965332, 0.009689120292663575, 0.009774751663208008, 0.009779264450073242, 0.009765151977539063, 0.009728063583374023, 0.009707584381103515, 0.009749664306640624, 0.0097325439453125, 0.009711199760437012, 0.009746432304382324, 0.009720512390136719, 0.009713120460510254, 0.009711520195007324, 0.009716352462768555, 0.00971286392211914, 0.00975324821472168, 0.009701215744018554, 0.009726240158081054, 0.009686976432800294, 0.009766847610473632, 0.00971168041229248, 0.009725536346435547, 0.009660832405090332, 0.009680607795715331, 0.009705760002136231, 0.009668000221252441, 0.00977184009552002, 0.00986508846282959, 0.009719712257385254, 0.009657504081726075, 0.00972060775756836, 0.009738656044006347, 0.009690784454345703, 0.009713664054870605, 0.00970137596130371, 0.009692992210388184, 0.009801535606384278, 0.009799039840698243, 0.009784319877624511, 0.01025385570526123, 0.009771488189697265, 0.009804991722106934, 0.009737183570861816, 0.009690976142883301, 0.00970355224609375, 0.009715583801269531, 0.009695487976074219, 0.009733887672424316, 0.009750144004821777, 0.009718144416809082, 0.009678848266601562, 0.009719296455383301, 0.009707200050354003, 0.009687392234802247, 0.009730527877807617, 0.009973759651184083, 0.009735551834106446, 0.009710176467895508, 0.009737600326538086, 0.009724255561828613, 0.009752896308898926, 0.009677120208740234, 0.00969279956817627, 0.009709823608398437, 0.009940320014953614, 0.009759200096130372, 0.00973027229309082, 0.009734272003173828, 0.009739551544189453, 0.009746591567993164, 0.009871808052062989, 0.009752351760864258, 0.009705280303955078, 0.009709055900573731, 0.009738944053649902, 0.009758720397949219, 0.009754752159118652, 0.009828224182128906, 0.010282208442687988, 0.009837344169616699, 0.010515456199645995, 0.009786399841308593, 0.009764896392822266, 0.009713919639587402, 0.009920191764831544, 0.009722592353820801, 0.011779840469360352, 0.009923871994018554, 0.009829471588134766, 0.009836416244506836, 0.009754624366760254, 0.009811807632446289, 0.009789600372314453, 0.00970956802368164, 0.009744383811950684, 0.009815808296203613, 0.009734496116638184, 0.009688447952270508, 0.009847583770751954, 0.009835647583007813, 0.00978105640411377, 0.009745344161987305, 0.009719903945922852, 0.009778176307678223, 0.009873408317565918, 0.009779999732971192, 0.009760767936706542, 0.009734047889709472, 0.009729344367980957, 0.009833215713500976, 0.009828384399414063, 0.009793279647827148, 0.00986460781097412, 0.009816831588745117, 0.009806976318359376, 0.009874624252319336, 0.0097871675491333, 0.009789088249206542, 0.009783616065979004, 0.01003059196472168, 0.011688608169555664, 0.009763039588928223, 0.00976041603088379, 0.00975881576538086, 0.009741536140441895, 0.00973299217224121, 0.009844256401062011, 0.009797696113586427, 0.009830623626708985, 0.009760736465454101, 0.010186944007873535, 0.011060159683227539, 0.010265631675720215, 0.009832448005676269, 0.009871232032775878, 0.009766688346862793, 0.009847007751464843, 0.009832096099853516, 0.009768351554870606, 0.009776063919067383, 0.01031772804260254, 0.00976905632019043, 0.00975814437866211, 0.009724032402038575, 0.009748895645141602, 0.009707743644714356, 0.009775168418884277, 0.009764800071716309, 0.009768959999084472, 0.009690143585205079, 0.00972662353515625, 0.009680607795715331, 0.009714207649230956, 0.009729280471801757, 0.009937151908874512, 0.009763296127319337, 0.009766240119934082, 0.009720576286315919, 0.009771007537841797, 0.010896384239196777, 0.009810784339904784, 0.009957504272460938, 0.009762432098388672, 0.009774751663208008, 0.009771360397338866, 0.009699904441833496, 0.009746272087097168, 0.009695232391357422, 0.009817664146423339, 0.009986528396606445, 0.009829824447631835, 0.00981056022644043, 0.0096910400390625, 0.009719327926635742, 0.009709216117858887, 0.009792320251464844, 0.009751744270324707, 0.01020809555053711, 0.00973145580291748, 0.00973583984375, 0.009818719863891602, 0.009785696029663086, 0.009736191749572755, 0.009702495574951172, 0.009956543922424316, 0.009721376419067384, 0.009792863845825195, 0.009775967597961425, 0.009773056030273437, 0.00981100845336914, 0.00972879981994629, 0.009732383728027344, 0.009709440231323242, 0.009785344123840332, 0.009777152061462402, 0.009760607719421386, 0.00972815990447998, 0.009731488227844238, 0.0097357759475708, 0.009694208145141601, 0.009750528335571289, 0.00971555233001709, 0.009715519905090331, 0.00970307159423828, 0.00978831958770752, 0.009744159698486328, 0.009783295631408692, 0.009691360473632813, 0.009679231643676758, 0.00972764778137207, 0.009703680038452149, 0.009730143547058106, 0.009707263946533203, 0.009746368408203126, 0.009701696395874024, 0.009741472244262696, 0.00977353572845459, 0.009787775993347169, 0.009840640068054199, 0.009736063957214356, 0.009736576080322266, 0.009729791641235351, 0.009730303764343261, 0.009731328010559082, 0.00972646427154541, 0.009721152305603028, 0.009728320121765137, 0.009881248474121094, 0.009703712463378906, 0.009756992340087891, 0.00970473575592041, 0.009766847610473632, 0.009767840385437012, 0.00970956802368164, 0.009690560340881348, 0.00970633602142334, 0.009766655921936035, 0.009738207817077637, 0.009739295959472656, 0.010181599617004395, 0.009693183898925782, 0.009768959999084472, 0.009729184150695801, 0.009718624114990235, 0.009676799774169922, 0.009680288314819336, 0.009662752151489257, 0.009670656204223632, 0.009675071716308594, 0.009664511680603028, 0.009710880279541015, 0.009650719642639161, 0.009674943923950196, 0.00970751953125, 0.009664511680603028, 0.009815391540527343, 0.009691424369812011, 0.009724287986755372, 0.009661664009094238, 0.00972265625, 0.009752575874328612, 0.009661952018737792, 0.009660927772521973, 0.009701120376586913, 0.009658623695373535, 0.00971776008605957, 0.009711615562438965, 0.009766079902648927, 0.009710399627685547, 0.009699328422546387, 0.009738240242004394, 0.009686464309692383, 0.009686911582946777, 0.009704511642456054, 0.009676575660705566, 0.009742079734802246, 0.009737728118896484, 0.009690976142883301, 0.009706432342529298, 0.00985267162322998, 0.009713888168334961, 0.009689023971557617, 0.009734368324279786, 0.00966256046295166, 0.00972480010986328, 0.00967353630065918, 0.00966972827911377, 0.009661055564880372, 0.009658720016479492, 0.009705504417419434, 0.009676223754882812, 0.00968553638458252, 0.00969484806060791, 0.00966207981109619, 0.009698047637939453, 0.009662176132202148, 0.00975481605529785, 0.009665920257568359, 0.009681376457214355, 0.009701631546020507, 0.009684639930725098, 0.009742719650268555, 0.009674592018127441, 0.009753888130187988, 0.009685855865478515, 0.00969315242767334, 0.010350624084472656, 0.010307583808898926, 0.010440704345703124, 0.00984489631652832, 0.009984864234924316, 0.009808639526367187, 0.00975692844390869, 0.00973209571838379, 0.009707263946533203, 0.009766464233398437, 0.009755359649658204, 0.00970355224609375, 0.009711456298828126, 0.009798975944519043, 0.009763520240783691, 0.009803647994995118, 0.009736415863037109, 0.009690976142883301, 0.009705632209777833, 0.00971548843383789, 0.009694720268249512, 0.009808511734008788, 0.009721376419067384, 0.009922719955444336, 0.009732416152954101, 0.009701055526733398, 0.009779040336608886, 0.009691488265991212, 0.009703424453735352, 0.00974847984313965, 0.00978713607788086, 0.009748736381530761, 0.009789440155029297, 0.009781248092651367, 0.009746432304382324, 0.009764863967895507, 0.009789440155029297, 0.009778271675109864, 0.009718688011169433, 0.009750528335571289, 0.009719807624816895, 0.009766528129577637, 0.009716095924377442, 0.009709919929504394, 0.01120150375366211, 0.009759424209594726, 0.009680895805358887, 0.009745823860168456, 0.009704031944274903, 0.009807871818542481, 0.009698335647583007, 0.009681887626647949, 0.009707551956176758, 0.009648256301879883, 0.009708736419677734, 0.009665216445922852, 0.009709535598754883, 0.009700448036193847, 0.009673727989196777, 0.009698399543762207, 0.00967148780822754, 0.00969279956817627, 0.009699551582336426, 0.009754783630371093, 0.009781184196472168, 0.009661888122558594, 0.009669247627258301, 0.009680255889892578, 0.00969711971282959, 0.00970627212524414, 0.009770655632019043, 0.00977667236328125, 0.009679295539855958, 0.009683327674865723, 0.009652447700500489, 0.009688384056091308, 0.009714143753051758, 0.009678784370422364, 0.009714847564697265, 0.010023327827453613, 0.009756959915161133, 0.009683487892150879, 0.0098405122756958, 0.009694784164428712, 0.009687295913696289, 0.009850879669189454, 0.009834495544433594, 0.00970307159423828, 0.00968841552734375, 0.009693471908569336, 0.009681183815002442, 0.009666560173034668, 0.009666336059570313, 0.009695455551147461, 0.00971571159362793, 0.009678591728210449, 0.009644096374511718, 0.009678175926208496, 0.009687904357910157, 0.009655360221862794, 0.00970847988128662, 0.009670944213867187, 0.009917535781860352, 0.009683584213256836, 0.009654303550720215, 0.009678624153137206, 0.00967305564880371, 0.009676639556884765, 0.009678848266601562, 0.009627584457397462, 0.009715807914733888, 0.00968291187286377, 0.009760831832885742, 0.009749855995178223, 0.009759200096130372, 0.009694687843322754, 0.00974505615234375, 0.009738304138183594, 0.009676735877990722, 0.009697312355041504, 0.009656352043151855, 0.009780256271362305, 0.009666879653930665, 0.009682527542114258, 0.009712320327758789, 0.009691424369812011, 0.009731904029846191, 0.009732319831848144, 0.009832223892211915, 0.009783552169799805, 0.009752544403076172, 0.009711008071899414, 0.009781408309936523, 0.009705920219421387, 0.00969222354888916, 0.009769375801086427, 0.00979203224182129, 0.009701024055480957, 0.00968502426147461, 0.00973203182220459, 0.009740960121154786, 0.00973094367980957, 0.009744416236877442, 0.009712160110473633, 0.00971548843383789, 0.009746848106384277, 0.009694975852966308, 0.009718111991882324, 0.009686431884765624, 0.009705792427062989, 0.009858624458312988, 0.00973692798614502, 0.00977939224243164, 0.009721887588500977, 0.00968716812133789, 0.009725248336791992, 0.009694975852966308, 0.009701600074768066, 0.009724512100219726, 0.009705471992492675, 0.009703424453735352, 0.00968505573272705, 0.0097074556350708, 0.009791616439819337, 0.009703264236450196, 0.009809696197509765, 0.009651776313781738, 0.009689791679382324, 0.009627519607543945, 0.009740480422973633, 0.009736127853393554, 0.009691136360168457, 0.009682080268859864, 0.009664575576782226, 0.01003395175933838, 0.00974233627319336, 0.009895168304443359, 0.009709888458251954, 0.009678688049316406, 0.00966537570953369, 0.009714752197265626, 0.009650879859924317, 0.009642239570617675, 0.009664575576782226, 0.009645759582519531, 0.009691455841064452, 0.009649279594421387, 0.009646304130554199, 0.009680831909179687, 0.009688511848449707, 0.009679360389709473, 0.00967465591430664, 0.009666655540466309, 0.009647775650024414, 0.009670623779296875, 0.009691231727600098, 0.009744864463806152, 0.009648192405700683, 0.009655584335327148, 0.0096812801361084, 0.009766719818115234, 0.009661184310913087, 0.009695039749145509, 0.009700832366943359, 0.009702048301696777, 0.009741632461547852, 0.009678943634033203, 0.009730815887451172, 0.009660320281982422, 0.0097259521484375, 0.009663647651672363, 0.009675583839416503, 0.009711999893188477, 0.00970307159423828, 0.009744383811950684, 0.009768480300903321, 0.00976089572906494, 0.009706208229064942, 0.009752256393432617, 0.00974233627319336, 0.009742591857910156, 0.00976255989074707, 0.009686911582946777, 0.009719743728637695, 0.009752767562866211, 0.00974028778076172, 0.009714816093444824, 0.009757568359375, 0.009711808204650878, 0.009735679626464844, 0.009694944381713868, 0.009722463607788086, 0.0097259521484375, 0.009689087867736817, 0.00974556827545166, 0.0097161283493042, 0.009784832000732421, 0.009697823524475098, 0.009766528129577637, 0.009697343826293945, 0.009759455680847169, 0.00973209571838379, 0.00978544044494629, 0.009756575584411621, 0.009817983627319336, 0.009812352180480956, 0.009829440116882325, 0.009755328178405761, 0.009893695831298828, 0.00977120018005371, 0.009779199600219727, 0.009979519844055176, 0.00974886417388916, 0.009743391990661621, 0.009731072425842285, 0.009703392028808594, 0.009777312278747558, 0.009762911796569825, 0.009684736251831055, 0.009775103569030762, 0.009752575874328612, 0.009730048179626465, 0.009719552040100098, 0.009689344406127929, 0.00972390365600586, 0.009773056030273437, 0.009750528335571289, 0.009705471992492675, 0.00971571159362793, 0.00971564769744873, 0.009748543739318848, 0.009865216255187988, 0.009727999687194825, 0.009747679710388184, 0.009663264274597167, 0.009699328422546387, 0.00970470428466797, 0.009693599700927735, 0.009647904396057129, 0.009678624153137206, 0.009680895805358887, 0.009766912460327149, 0.00974233627319336, 0.009682944297790527, 0.0096976318359375, 0.009694879531860351, 0.009675968170166015, 0.0096976318359375, 0.009680383682250977, 0.009706463813781738, 0.009709024429321289, 0.009695072174072266, 0.009675456047058105, 0.00969273567199707, 0.009665184020996094, 0.009647871971130371, 0.009766367912292481, 0.009660544395446778, 0.009677248001098633, 0.009670687675476073, 0.009643136024475097, 0.009684127807617187, 0.00969644832611084, 0.009693504333496094, 0.009664480209350585, 0.009683135986328126, 0.009664544105529785, 0.009678848266601562, 0.009651264190673829, 0.009652576446533202, 0.009664928436279297, 0.009639231681823731, 0.009646976470947265, 0.009721216201782226, 0.009667200088500976, 0.009760031700134278, 0.009752639770507813, 0.00970307159423828, 0.009644960403442383, 0.009672800064086913, 0.009668160438537597, 0.009728032112121582, 0.009660832405090332, 0.00972009563446045, 0.009776576042175293, 0.00981049633026123, 0.009674816131591797, 0.009664159774780273, 0.009637887954711915, 0.009687040328979492, 0.009654496192932129, 0.009663935661315917, 0.009671008110046387, 0.009611007690429687, 0.009693632125854492, 0.00961945629119873, 0.009696736335754394, 0.009691167831420898, 0.00967916774749756, 0.00965385627746582, 0.009761183738708497, 0.009684991836547852, 0.009641280174255371, 0.009660415649414063, 0.009668928146362304, 0.009712800025939942, 0.00967734432220459, 0.009696800231933594, 0.009753055572509765, 0.009822239875793457, 0.009744223594665527, 0.009812095642089843, 0.00984665584564209, 0.00974665641784668, 0.009774080276489258, 0.009718688011169433, 0.00973209571838379, 0.009815936088562011, 0.009928832054138184, 0.00972003173828125, 0.009690912246704101, 0.009700927734375, 0.009652671813964843, 0.009656224250793457, 0.009680895805358887, 0.009719903945922852, 0.009682944297790527, 0.009831775665283203, 0.009762944221496582, 0.009683487892150879, 0.009698304176330566, 0.009692031860351563, 0.00972332763671875, 0.009702079772949219, 0.009671999931335449, 0.009687744140625, 0.009841888427734376, 0.009701567649841309, 0.009722463607788086, 0.009695520401000976, 0.009658080101013183, 0.009642208099365234, 0.009692352294921875, 0.009661279678344726, 0.009695327758789063, 0.009704799652099609, 0.00966220760345459, 0.009704319953918458, 0.009596320152282714, 0.009658975601196289, 0.00965999984741211, 0.009665823936462403, 0.009726400375366212, 0.00966489601135254, 0.009698687553405761, 0.009634592056274415, 0.009658207893371582, 0.009645503997802734, 0.0096910400390625, 0.009683615684509277, 0.00962764835357666, 0.009670080184936524, 0.009663328170776368, 0.009651935577392579, 0.009620832443237306, 0.009616000175476074, 0.009671839714050293, 0.009699456214904785, 0.009835231781005859, 0.009704863548278809, 0.009771552085876465, 0.009758687973022461, 0.009695391654968261, 0.009737376213073731, 0.009693087577819825, 0.010098688125610352, 0.009732288360595703, 0.009830368041992188, 0.009706208229064942, 0.009750207901000977, 0.009687456130981445, 0.00976790428161621, 0.00974665641784668, 0.009804800033569335, 0.009724703788757325, 0.009739199638366699, 0.009879551887512206, 0.009718111991882324, 0.009758367538452148, 0.009672703742980958, 0.009785344123840332, 0.009721471786499023, 0.009779583930969239, 0.00974028778076172, 0.00973414421081543, 0.009752575874328612, 0.00970582389831543, 0.009761695861816405, 0.009738368034362793, 0.009820544242858886, 0.009713919639587402, 0.00972755241394043, 0.009779520034790039, 0.009744511604309083, 0.00973846435546875, 0.009760543823242188, 0.00970751953125, 0.009740480422973633, 0.009725824356079101, 0.00989363193511963, 0.010664159774780274, 0.009830368041992188, 0.009724032402038575, 0.009762687683105469, 0.009730239868164063, 0.009804896354675293, 0.00973020839691162, 0.009740863800048828, 0.00974176025390625, 0.00983020782470703, 0.010985919952392578, 0.009775487899780274, 0.009709504127502442, 0.009692959785461426, 0.009659744262695313, 0.00968832015991211, 0.009733792304992675, 0.009680543899536133, 0.00985968017578125, 0.00978547191619873, 0.009742912292480468, 0.009770815849304199, 0.009770432472229003, 0.00969580841064453, 0.00969046401977539, 0.0097391357421875, 0.00968876838684082, 0.009712127685546875, 0.009719903945922852, 0.009750592231750487, 0.009696672439575196, 0.009928192138671875, 0.009677599906921387, 0.009669631958007812, 0.009781599998474121, 0.00967734432220459, 0.009672703742980958, 0.009678720474243164, 0.00969977569580078, 0.00969321632385254, 0.009703167915344238, 0.009703424453735352, 0.00970751953125, 0.009715392112731933, 0.009731840133666993, 0.009690848350524903, 0.009693535804748535, 0.009662976264953613, 0.009777088165283204, 0.009667936325073243, 0.00969164752960205, 0.011222399711608887, 0.011254048347473145, 0.009796287536621094, 0.009725824356079101, 0.00985644817352295, 0.009693120002746582, 0.00968563175201416, 0.00971571159362793, 0.009713408470153808, 0.009828607559204101, 0.0096911039352417, 0.009717791557312012, 0.009709471702575684, 0.009672800064086913, 0.0096627836227417, 0.009711296081542968, 0.00971776008605957, 0.009706687927246094, 0.009706591606140137, 0.00971686363220215, 0.00968950366973877, 0.009702943801879882, 0.009668895721435547, 0.00973043155670166, 0.009697279930114745, 0.009713088035583496, 0.009713536262512206, 0.009697567939758301, 0.009715744018554687, 0.009693632125854492, 0.009688287734985352, 0.00961945629119873, 0.009659808158874511, 0.009657247543334961, 0.009668288230895996, 0.009659551620483398, 0.009695775985717773, 0.009771360397338866, 0.009650303840637206, 0.009713055610656739, 0.009702943801879882, 0.009699647903442382, 0.009648703575134277, 0.009680928230285645, 0.009665920257568359, 0.009638463973999024, 0.009686400413513183, 0.009667551994323731, 0.00973209571838379, 0.00970140838623047, 0.009678815841674805, 0.009675935745239258, 0.009665087699890136, 0.009691136360168457, 0.00971286392211914, 0.009703616142272949, 0.009726559638977051, 0.009682784080505371, 0.009693344116210938, 0.009664511680603028, 0.009842687606811524, 0.009663647651672363, 0.009693535804748535, 0.009703359603881835, 0.009703776359558106, 0.009737600326538086, 0.009674624443054199, 0.00976585578918457, 0.00970751953125, 0.009654047966003419, 0.009699456214904785, 0.009748703956604004, 0.009828224182128906, 0.00977244758605957, 0.009740927696228027, 0.009721823692321777, 0.009795583724975587, 0.009719807624816895, 0.009702719688415528, 0.009714367866516114, 0.009776543617248536, 0.009849280357360839, 0.009734304428100585, 0.009843999862670899, 0.009685248374938964, 0.009715840339660645, 0.009725728034973145, 0.009712191581726074, 0.00974351978302002, 0.009656319618225098, 0.009732352256774903, 0.009720288276672363, 0.009691328048706054, 0.009690527915954589, 0.009655679702758789, 0.009675488471984864, 0.009656415939331055, 0.009698495864868165, 0.009650943756103515, 0.009789312362670898, 0.009670656204223632, 0.009670656204223632, 0.009681920051574706, 0.009676799774169922, 0.009798144340515137, 0.009783807754516602, 0.009698495864868165, 0.009622336387634278, 0.009660415649414063, 0.009674752235412597, 0.009702943801879882, 0.009740768432617187, 0.009664544105529785, 0.009694208145141601, 0.009667776107788085, 0.009823840141296386, 0.009744576454162598, 0.009756671905517577, 0.009687199592590331, 0.009678560256958008, 0.009728063583374023, 0.009728032112121582, 0.009670687675476073, 0.009705696105957031, 0.009674880027770996, 0.009701024055480957, 0.010038911819458008, 0.009738847732543946, 0.009653216361999511, 0.009911231994628907, 0.009942912101745605, 0.009842687606811524, 0.009815103530883789, 0.009788352012634277, 0.009668895721435547, 0.009664223670959473, 0.009711520195007324, 0.00968131160736084, 0.009838272094726562, 0.009831680297851562, 0.00968563175201416, 0.009863231658935547, 0.009672608375549317, 0.00969040012359619, 0.00965721607208252, 0.009684415817260742, 0.009767487525939941, 0.009690239906311035, 0.00967520046234131, 0.009637824058532715, 0.009713312149047852, 0.009732959747314453, 0.009676671981811524, 0.009807616233825684, 0.009708064079284668, 0.009713024139404296, 0.009685471534729003]",tokens/s,102.56873824008281,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,7776.858112,3470.655488,0.0,3068.133376,2990.958592,s,1,17.23658984375,17.23658984375,0.0,17.23658984375,17.23658984375,17.23658984375,17.23658984375,[17.23658984375],,kWh,0.0003002787221583882,3.310893542631133e-05,0.00010156091458200789,0.00043494857216670746,,MB,4417.41312,3583.901696,0.0,3160.408064,3145.6512,s,10,1.0589946136474608,0.10589946136474611,0.0008881118612331383,0.10575214385986328,0.1062668716430664,0.10734756240844726,0.10821211502075195,"[0.10564723205566406, 0.10572998046875, 0.10602671813964844, 0.10577430725097656, 0.10577884674072266, 0.10842825317382812, 0.10578015899658202, 0.10530799865722656, 0.10496607971191406, 0.10555503845214843]",tokens/s,2417.387177431124,kWh,3.0838167503071943e-06,3.400905297377273e-07,1.816802623031732e-06,5.240709903076653e-06,tokens/kWh,48848343.97143612,MB,4421.545984,3732.799488,0.0,3307.208704,3216.658944,s,10,63.98075878906251,6.3980758789062495,0.023671191291831036,6.40327783203125,6.418080126953125,6.426238793945313,6.432765727539063,"[6.41522802734375, 6.41626708984375, 6.406935546875, 6.39089599609375, 6.4343974609375, 6.411775390625, 6.3996201171875, 6.34982666015625, 6.3878349609375, 6.3679775390625]",tokens/s,9.84671035360866,kWh,0.00018451906916302217,2.035323079981764e-05,7.401011914956786e-05,0.0002788824191124077,tokens/kWh,225901.65489997028,,s,630,63.977574279785145,0.10155170520600819,0.0012613571375929855,0.1012408332824707,0.1030190658569336,0.10504670181274414,0.10618040473937988,"[0.10103398132324219, 0.10178102111816406, 0.10163452911376954, 0.10169683074951172, 0.10122418975830078, 0.10130716705322265, 0.10132495880126953, 0.10209423828125, 0.1014134750366211, 0.10155961608886718, 0.1012741470336914, 0.10123113250732421, 0.10124658966064454, 0.10312646484375, 0.10633478546142579, 0.10196572875976563, 0.1017344970703125, 0.1018265609741211, 0.10176032257080078, 0.10155606079101563, 0.10165744018554687, 0.10166476440429688, 0.10184838104248047, 0.10191311645507813, 0.10565388488769531, 0.10174687957763671, 0.10170317077636719, 0.10121711730957031, 0.1012264633178711, 0.10114575958251953, 0.10137894439697266, 0.10139852905273437, 0.10142889404296875, 0.10203785705566407, 0.10153164672851563, 0.10106060791015625, 0.10144153594970703, 0.10162380981445313, 0.10140262603759766, 0.1007984619140625, 0.10100025939941407, 0.10133395385742187, 0.10120806121826172, 0.10163606262207031, 0.10146819305419921, 0.1015203857421875, 0.10424012756347656, 0.10179695892333984, 0.10259139251708985, 0.10117286682128906, 0.10145420837402344, 0.10138419342041016, 0.10172415924072266, 0.10210918426513672, 0.10179904174804688, 0.10185001373291015, 0.10195475006103516, 0.10360230255126954, 0.10459548950195313, 0.10101942443847656, 0.10149974060058593, 0.1011773452758789, 0.10175660705566406, 0.100565185546875, 0.10171199798583984, 0.10162448120117187, 0.1020600357055664, 0.10622771453857421, 0.104447265625, 0.10110761260986328, 0.10107724761962891, 0.10238950347900391, 0.10192364501953124, 0.10193714904785156, 0.10344038391113282, 0.10251673889160157, 0.1019202880859375, 0.1015066909790039, 0.10124374389648437, 0.10180553436279297, 0.1016115493774414, 0.10314598083496093, 0.1017364501953125, 0.10143698883056641, 0.1009873275756836, 0.10132217407226562, 0.10117996978759766, 0.10124835205078125, 0.101376220703125, 0.10133920288085937, 0.10125497436523437, 0.10225107574462891, 0.10166246032714844, 0.10151494598388672, 0.10125523376464844, 0.1012679672241211, 0.10143673706054687, 0.101542236328125, 0.10130419158935547, 0.1017635498046875, 0.101742431640625, 0.10163827514648438, 0.10163961791992188, 0.10111030578613281, 0.10157676696777344, 0.1014824981689453, 0.10119782257080077, 0.10178150177001953, 0.10199244689941406, 0.10135062408447265, 0.10224515533447266, 0.10227693176269531, 0.10208889770507812, 0.10104370880126953, 0.10141747283935547, 0.10177107238769531, 0.10151055908203124, 0.10154064178466797, 0.10149244689941406, 0.1022449951171875, 0.10195011138916016, 0.10448732757568359, 0.10332630157470703, 0.10195558166503907, 0.10154188537597657, 0.10139065551757813, 0.10128399658203124, 0.1035228500366211, 0.10224566650390625, 0.10205052947998047, 0.10553343963623046, 0.10115478515625, 0.10131459045410156, 0.10125721740722657, 0.10119760131835938, 0.10093164825439453, 0.10127565002441406, 0.10200899505615234, 0.10167705535888671, 0.1019983673095703, 0.10208688354492187, 0.10142227172851563, 0.10339376068115234, 0.10127190399169922, 0.10139414215087891, 0.1016036148071289, 0.10122444915771485, 0.10175043487548828, 0.10156626892089844, 0.10154988861083984, 0.10154876708984376, 0.10130201721191406, 0.10113990020751953, 0.10115769958496093, 0.10122383880615235, 0.10126755523681641, 0.10114841461181641, 0.10174291229248048, 0.10158688354492187, 0.1050403823852539, 0.10301235198974609, 0.10154761505126954, 0.1008598403930664, 0.10117574310302735, 0.10106047821044922, 0.1016087646484375, 0.1047273941040039, 0.10140003204345703, 0.10210153961181641, 0.10172345733642578, 0.10179449462890625, 0.10162553405761719, 0.10124934387207031, 0.10122649383544922, 0.10137728118896484, 0.10086271667480469, 0.10090290832519531, 0.10149683380126953, 0.10222592163085938, 0.10264166259765625, 0.10135552215576171, 0.10084265899658203, 0.10099987030029296, 0.10213187408447266, 0.10052326202392578, 0.10107469177246094, 0.10087843322753906, 0.10114755249023437, 0.10117046356201172, 0.10039596557617188, 0.10059347534179687, 0.10058057403564453, 0.10077897644042969, 0.10062643432617188, 0.10057318115234375, 0.10057727813720703, 0.10071244812011719, 0.10307949066162109, 0.10046857452392578, 0.10056050872802734, 0.10079561614990235, 0.10033436584472656, 0.10044515228271485, 0.10048416137695312, 0.10102674865722656, 0.10118950653076172, 0.10059788513183594, 0.10588159942626953, 0.1011404800415039, 0.10060160064697266, 0.10055919647216797, 0.1007257308959961, 0.10104291534423829, 0.10066556549072266, 0.10049302673339844, 0.10050774383544922, 0.10077817535400391, 0.10698137664794923, 0.10551705932617188, 0.10185318756103516, 0.10077184295654297, 0.100890625, 0.10051993560791016, 0.10080461120605469, 0.10077593231201172, 0.10120396423339843, 0.1014512939453125, 0.10554608154296875, 0.1016280288696289, 0.10127881622314452, 0.10093660736083984, 0.10087014770507813, 0.10067353820800781, 0.10062777709960938, 0.10091571044921875, 0.10116844940185547, 0.10173529815673828, 0.10123878479003906, 0.10592825317382812, 0.10153209686279296, 0.10416947174072265, 0.10086787414550781, 0.10085603332519531, 0.10081689453125, 0.10061619567871094, 0.10078934478759766, 0.10134595489501953, 0.10130662536621093, 0.10578556823730469, 0.10121193695068359, 0.10068345642089843, 0.10104441833496093, 0.10074076843261719, 0.10123004913330078, 0.10149078369140625, 0.1016428451538086, 0.10123078155517579, 0.10586434936523438, 0.1014955825805664, 0.10122041320800781, 0.10096844482421875, 0.10092543792724609, 0.10438041687011719, 0.10176921844482421, 0.10157785797119141, 0.10211212921142578, 0.10249199676513672, 0.10655760192871094, 0.10136681365966797, 0.1014276123046875, 0.10134108734130859, 0.1014637451171875, 0.10133808135986327, 0.10165766143798828, 0.10144630432128907, 0.10294841766357422, 0.10117193603515626, 0.10592460632324219, 0.10185318756103516, 0.10172166442871093, 0.10139263916015626, 0.10116044616699219, 0.10135417938232422, 0.1015889892578125, 0.1013043212890625, 0.10249327850341797, 0.1010854721069336, 0.10618534088134765, 0.10169548797607422, 0.10112409973144532, 0.10140262603759766, 0.10116095733642579, 0.10148659515380859, 0.10162777709960938, 0.1016627197265625, 0.10176525115966797, 0.10158694458007812, 0.10622975921630859, 0.10179174041748047, 0.10145369720458984, 0.10162326049804687, 0.10164281463623047, 0.10169261169433594, 0.10157353973388672, 0.10182032012939453, 0.10258624267578124, 0.1041983642578125, 0.10616831970214843, 0.10168729400634766, 0.10157164764404297, 0.10182752227783202, 0.10126950073242187, 0.1017671661376953, 0.10120191955566406, 0.10450943756103516, 0.10124288177490234, 0.10612844848632813, 0.10151417541503906, 0.10123673248291015, 0.10113638305664062, 0.1011568603515625, 0.10123878479003906, 0.10151936340332031, 0.10156025695800781, 0.1013719711303711, 0.10105999755859375, 0.10380553436279297, 0.10292201232910156, 0.10096867370605468, 0.10103916931152344, 0.10123062133789062, 0.10103081512451172, 0.101000732421875, 0.10166524505615235, 0.1013694076538086, 0.10175328063964843, 0.10395648193359375, 0.10506854248046875, 0.10103308868408203, 0.10073996734619141, 0.10089615631103516, 0.1006899871826172, 0.1009830093383789, 0.10143577575683593, 0.10109529876708985, 0.1011712646484375, 0.10093055725097656, 0.10544844818115234, 0.1008248291015625, 0.10095430755615234, 0.10100537872314454, 0.10079212951660156, 0.10092479705810548, 0.10110240173339843, 0.10066124725341796, 0.1007636489868164, 0.10104000091552734, 0.10657804870605468, 0.10090086364746094, 0.10135574340820312, 0.10372233581542968, 0.10095366668701172, 0.10097958374023437, 0.10178355407714844, 0.10146595001220703, 0.101308349609375, 0.10145359802246094, 0.10509315490722657, 0.10098467254638673, 0.10059584045410157, 0.10073747253417968, 0.10107263946533203, 0.10335052490234375, 0.10086195373535156, 0.10107698822021484, 0.10149443054199218, 0.10212786865234375, 0.10607123565673827, 0.10077798461914063, 0.10121011352539062, 0.10101062774658204, 0.1026343002319336, 0.1010987548828125, 0.10135743713378906, 0.10179878234863281, 0.10132479858398437, 0.10593689727783204, 0.10154188537597657, 0.10147225952148438, 0.10218291473388671, 0.10140617370605469, 0.10143702697753906, 0.1007543716430664, 0.10129408264160156, 0.10146752166748046, 0.10087894439697266, 0.10460368347167968, 0.10221977233886718, 0.10159923553466797, 0.10130226898193359, 0.1010091552734375, 0.10116534423828125, 0.10062230682373047, 0.10012876892089843, 0.1006346206665039, 0.10050355529785156, 0.10057933044433594, 0.10602086639404297, 0.10222908782958984, 0.10148735809326172, 0.10310467529296875, 0.10515455627441406, 0.10143692779541015, 0.1011060791015625, 0.10078627014160156, 0.1005322265625, 0.10030620574951171, 0.105283935546875, 0.10155455780029297, 0.10078412628173829, 0.10077184295654297, 0.10143743896484375, 0.10072809600830078, 0.10018479919433594, 0.10076761627197266, 0.10045247650146484, 0.10051583862304687, 0.10527334594726563, 0.10055884552001954, 0.10051507568359375, 0.10220825958251953, 0.10033929443359375, 0.10029302215576172, 0.10236927795410156, 0.10088240051269531, 0.10055068969726562, 0.10039295959472656, 0.10525654602050781, 0.10082345581054687, 0.10067257690429687, 0.10060387420654297, 0.1003499526977539, 0.10047078704833984, 0.10055046081542969, 0.10044844818115234, 0.10024960327148437, 0.10498662567138672, 0.10111385345458984, 0.10121001434326173, 0.10163619232177734, 0.10531839752197265, 0.10068701171875, 0.10056380462646484, 0.10060185241699218, 0.10034381103515624, 0.1006692123413086, 0.10400994873046875, 0.10211264038085938, 0.10077657318115234, 0.10071040344238281, 0.10124697875976563, 0.10049740600585938, 0.1004780502319336, 0.10028534698486329, 0.10010623931884766, 0.1005317153930664, 0.10043033599853515, 0.10041753387451172, 0.10071244812011719, 0.10135346984863282, 0.10057692718505859, 0.10024566650390625, 0.1002514877319336, 0.09985263824462891, 0.10022502136230468, 0.10083328247070313, 0.10028390502929688, 0.10049967956542968, 0.10039472198486328, 0.10075161743164063, 0.10039532470703125, 0.10010829162597656, 0.09981887817382812, 0.10013760375976563, 0.10031846618652343, 0.10015821075439453, 0.10045827484130859, 0.1004464340209961, 0.10085916900634766, 0.10044284820556641, 0.10050764465332031, 0.09992806243896485, 0.10063053131103515, 0.10086399841308594, 0.10031449890136719, 0.10057321929931641, 0.10061065673828125, 0.10050105285644531, 0.10154847717285156, 0.10134275054931641, 0.10089929962158203, 0.10022911834716797, 0.10089241790771485, 0.10075775909423829, 0.1007267837524414, 0.10082431793212891, 0.10072755432128906, 0.10340557098388672, 0.10590412902832032, 0.10141490936279297, 0.10106470489501954, 0.10084966278076171, 0.10240204620361328, 0.10204978942871094, 0.10097049713134766, 0.10127359771728515, 0.10097212982177735, 0.10170381164550782, 0.10181660461425782, 0.10055875396728516, 0.10070639801025391, 0.10140262603759766, 0.10094796752929687, 0.10075039672851563, 0.10094620513916015, 0.10089302062988281, 0.1010343017578125, 0.10187548828125, 0.10123286437988281, 0.10098265838623047, 0.10423308563232422, 0.10176102447509766, 0.10119577789306641, 0.10111385345458984, 0.10101103973388671, 0.10093580627441406, 0.10107523345947265, 0.10093567657470703, 0.10125494384765625, 0.10072415924072266, 0.10101020812988282, 0.10171974182128907, 0.10185350036621094, 0.10101545715332032, 0.1014068145751953, 0.10112989044189453, 0.10115721893310547, 0.10108089447021484, 0.1012656021118164, 0.1008353271484375, 0.10071651458740234, 0.10062000274658203, 0.10080652618408203, 0.10060173034667969, 0.10505187225341797, 0.10105123138427734, 0.10079833221435547, 0.10175296020507812, 0.10181123352050782, 0.1011987533569336, 0.10106009674072265, 0.10074988555908203, 0.10120191955566406, 0.1009026870727539, 0.10113446044921875, 0.10065110778808593, 0.10326630401611328, 0.10034146881103516, 0.10065900421142578, 0.10040195465087891, 0.10038240051269531, 0.1003499526977539, 0.1011630096435547, 0.10062847900390624, 0.10076131439208984, 0.10093186950683594, 0.10109747314453126, 0.10078733062744141, 0.10122236633300781, 0.10053270721435546, 0.10063407897949218, 0.10073744201660156, 0.10047750091552735, 0.10068582153320313, 0.10045980834960938, 0.10039574432373047, 0.10062624359130859, 0.10049964904785157, 0.10127958679199218, 0.10027008056640625, 0.10048697662353516, 0.10080086517333985, 0.10084556579589844, 0.10080579376220702, 0.10063670349121094, 0.10110428619384766, 0.10088054656982422, 0.10394134521484374, 0.10152201843261718, 0.10061638641357422, 0.10058908843994141, 0.10091567993164062, 0.10104124450683594, 0.10065145874023437, 0.1004016342163086, 0.10328377532958985, 0.10514118194580079, 0.10141887664794921, 0.1015194854736328, 0.10091852569580079, 0.10063129425048828, 0.10108313751220703, 0.10093113708496093, 0.1014721908569336, 0.10083753967285156, 0.10066979217529297, 0.10152310180664062, 0.10117961883544922, 0.10086003112792968, 0.10090496063232422, 0.10101554870605468, 0.1012542724609375, 0.10096320343017579, 0.10126236724853516, 0.10118243408203124, 0.10168934631347656, 0.1009991683959961, 0.10211532592773437, 0.10242457580566407, 0.10182860565185547]",tokens/s,9.847200477543888,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,884.727808,572.391424,0.0,169.869312,150.669312,s,1,7.52477197265625,7.52477197265625,0.0,7.52477197265625,7.52477197265625,7.52477197265625,7.52477197265625,[7.52477197265625],,kWh,1.5062103350010146e-05,1.6542731714769737e-06,5.701393449991654e-06,2.2417769971478773e-05,,MB,1279.332352,633.208832,0.0,209.7152,193.680384,s,11,0.1722556171417236,0.01565960155833851,0.0012153350131478976,0.015280672073364258,0.015472319602966309,0.01748364782333374,0.019092710399627687,"[0.015472319602966309, 0.019494976043701172, 0.015317536354064942, 0.015227231979370117, 0.015280672073364258, 0.015177568435668946, 0.015200223922729492, 0.015275327682495118, 0.015283072471618652, 0.015200672149658203, 0.015326016426086426]",tokens/s,16347.797806112361,kWh,4.5201946655057064e-07,4.9844375489747e-08,1.7070275094121894e-07,6.725665929815366e-07,tokens/kWh,380631453.7050278,MB,1292.45184,635.305984,0.0,211.812352,193.682944,s,11,10.2186669921875,0.9289697265625001,0.0035798150910810213,0.929113525390625,0.9338983154296875,0.9345405273437499,0.935054296875,"[0.9338983154296875, 0.9351827392578125, 0.9309436645507813, 0.9246932373046876, 0.9269314575195312, 0.9242373657226562, 0.925489013671875, 0.9317848510742187, 0.929113525390625, 0.9302066040039062, 0.9261862182617188]",tokens/s,67.81706464549836,kWh,2.6954751782696334e-05,2.9722717616388342e-06,9.306097759785356e-06,3.923312130412053e-05,tokens/kWh,1605786.0783404787,,s,693,10.213649888992302,0.014738311528127431,0.0002945200876563685,0.01467801570892334,0.01484417896270752,0.014993836402893066,0.016119909133911155,"[0.014794624328613282, 0.014872639656066895, 0.0148787202835083, 0.014837759971618653, 0.014784799575805665, 0.014778079986572266, 0.014979071617126465, 0.01476796817779541, 0.014784640312194824, 0.014855423927307128, 0.014885055541992187, 0.014811776161193847, 0.015007776260375976, 0.014861472129821777, 0.014815872192382812, 0.014810591697692872, 0.015100640296936035, 0.014756928443908692, 0.015268768310546875, 0.014833120346069336, 0.014862879753112793, 0.01527519989013672, 0.01482630443572998, 0.014804991722106933, 0.014841152191162109, 0.014783167839050293, 0.015056480407714843, 0.014735775947570801, 0.014718048095703125, 0.014716128349304199, 0.01470803165435791, 0.01473583984375, 0.01470464038848877, 0.014754719734191894, 0.014736384391784667, 0.014757120132446289, 0.014752544403076173, 0.014773728370666504, 0.014711296081542969, 0.014782464027404785, 0.014827520370483398, 0.014739456176757813, 0.01479475212097168, 0.014784319877624512, 0.01479263973236084, 0.014758399963378906, 0.014782464027404785, 0.014786304473876953, 0.014744895935058594, 0.0147893123626709, 0.0147640323638916, 0.014741503715515136, 0.014804991722106933, 0.014751392364501953, 0.014778719902038575, 0.014743712425231934, 0.014753215789794922, 0.014761599540710449, 0.014760160446166992, 0.014784064292907715, 0.014742815971374511, 0.014745311737060547, 0.014962688446044922, 0.014921728134155274, 0.014781472206115722, 0.01475273609161377, 0.014803999900817872, 0.01486956787109375, 0.014841535568237304, 0.014822688102722168, 0.014830719947814942, 0.014706015586853027, 0.014891263961791992, 0.014716768264770507, 0.014765631675720215, 0.014740096092224122, 0.014732671737670898, 0.014701696395874023, 0.014727999687194824, 0.01486348819732666, 0.014710528373718262, 0.01495248031616211, 0.014768287658691407, 0.014759743690490722, 0.014686207771301269, 0.014816960334777833, 0.014778688430786132, 0.014687552452087402, 0.014868351936340332, 0.014926464080810546, 0.014761216163635254, 0.014721887588500977, 0.014800000190734863, 0.01494320011138916, 0.014737407684326171, 0.014757920265197753, 0.014769984245300292, 0.014653599739074707, 0.014731295585632324, 0.014712639808654785, 0.014727328300476074, 0.014785663604736328, 0.014690591812133788, 0.014701312065124511, 0.014665568351745605, 0.014693632125854492, 0.015413663864135741, 0.017509727478027343, 0.015335424423217774, 0.014918496131896972, 0.01480515193939209, 0.014751040458679199, 0.014777024269104004, 0.014788000106811524, 0.014781023979187012, 0.014847999572753906, 0.014700672149658204, 0.014720735549926757, 0.014737567901611327, 0.014748959541320801, 0.014678208351135254, 0.014772543907165528, 0.014764320373535157, 0.014866080284118652, 0.014775808334350587, 0.014751520156860352, 0.014691455841064453, 0.014783359527587891, 0.014798463821411133, 0.014981792449951173, 0.01739068794250488, 0.015950464248657228, 0.015032608032226563, 0.014728704452514648, 0.01477622413635254, 0.01513263988494873, 0.014723423957824708, 0.014716959953308106, 0.014699775695800782, 0.014613216400146484, 0.01467801570892334, 0.014738880157470702, 0.014665568351745605, 0.014735967636108398, 0.014663552284240723, 0.015006976127624512, 0.01465772819519043, 0.014680768013000488, 0.01466585636138916, 0.014650400161743165, 0.014696800231933593, 0.014654080390930175, 0.014630911827087402, 0.014680064201354981, 0.014654911994934083, 0.014635583877563477, 0.014710783958435059, 0.014635007858276367, 0.014954496383666992, 0.014688447952270508, 0.01466476821899414, 0.0146626558303833, 0.014718943595886231, 0.014681247711181641, 0.014619263648986816, 0.014707776069641113, 0.014646207809448242, 0.014630399703979492, 0.01465004825592041, 0.014655039787292481, 0.014692607879638673, 0.014704000473022462, 0.014652031898498535, 0.014657535552978516, 0.014698687553405762, 0.014686016082763672, 0.014651071548461915, 0.014663999557495117, 0.01466153621673584, 0.01464739227294922, 0.014692352294921876, 0.014647583961486816, 0.01469007968902588, 0.014683648109436035, 0.01469279956817627, 0.014654975891113281, 0.014663871765136718, 0.014667424201965332, 0.014645855903625488, 0.014607071876525879, 0.01468614387512207, 0.014618623733520507, 0.014632960319519044, 0.014644224166870118, 0.014618656158447266, 0.014641216278076172, 0.014646368026733398, 0.014636223793029784, 0.014649696350097656, 0.014653727531433106, 0.01470464038848877, 0.014741408348083495, 0.014770272254943848, 0.014716575622558594, 0.014700672149658204, 0.014737631797790527, 0.014663840293884277, 0.014716768264770507, 0.014667712211608886, 0.014649408340454102, 0.014645248413085938, 0.014671263694763183, 0.014629183769226074, 0.014644512176513672, 0.014627967834472656, 0.014661503791809081, 0.014685919761657715, 0.014648608207702637, 0.014672767639160157, 0.014667967796325683, 0.014667712211608886, 0.014652480125427246, 0.014937024116516113, 0.01474339199066162, 0.01465071964263916, 0.014646176338195802, 0.014626720428466796, 0.014626815795898437, 0.01467801570892334, 0.014694432258605956, 0.01462992000579834, 0.014662431716918946, 0.014665311813354492, 0.014625344276428223, 0.014659839630126953, 0.014734687805175782, 0.014645312309265137, 0.01463548755645752, 0.014628735542297364, 0.014648351669311523, 0.01467910385131836, 0.0146779203414917, 0.014708736419677734, 0.014652735710144043, 0.014674816131591797, 0.014658495903015137, 0.014717951774597168, 0.014685248374938965, 0.014600959777832032, 0.014743040084838867, 0.014690048217773437, 0.014642175674438476, 0.014648223876953125, 0.014667776107788086, 0.014639103889465332, 0.014759967803955078, 0.014655455589294434, 0.014658656120300293, 0.014633184432983398, 0.014695103645324708, 0.014636063575744629, 0.014715871810913085, 0.014804224014282226, 0.014715423583984375, 0.014661087989807128, 0.014667648315429688, 0.014633855819702148, 0.014709952354431153, 0.014883872032165527, 0.014755264282226562, 0.014764608383178711, 0.014738719940185547, 0.014836511611938476, 0.014759648323059081, 0.0146527681350708, 0.014645631790161134, 0.014677760124206542, 0.014991711616516114, 0.01469046401977539, 0.01500547218322754, 0.014676416397094726, 0.01459727954864502, 0.014782976150512696, 0.014678272247314453, 0.014659647941589355, 0.014720959663391113, 0.014798463821411133, 0.014633248329162598, 0.014603520393371582, 0.014791584014892578, 0.014686047554016113, 0.014615615844726563, 0.015078335762023927, 0.014936063766479492, 0.014645567893981934, 0.014623423576354981, 0.014740703582763671, 0.014599679946899414, 0.01464140796661377, 0.01465772819519043, 0.014622559547424316, 0.014696191787719727, 0.01469814395904541, 0.014750304222106934, 0.014627936363220215, 0.014635647773742676, 0.014598431587219239, 0.01466163158416748, 0.01470083236694336, 0.014781408309936524, 0.014698911666870117, 0.014684512138366699, 0.014604384422302247, 0.01464675235748291, 0.014610303878784179, 0.014708576202392578, 0.014573023796081544, 0.014611328125, 0.014589759826660156, 0.014632960319519044, 0.014579551696777344, 0.014666111946105957, 0.014751520156860352, 0.014620896339416503, 0.014564384460449218, 0.014609151840209961, 0.014616576194763184, 0.01458937644958496, 0.014621536254882812, 0.014679327964782714, 0.014600543975830079, 0.014669919967651367, 0.014700511932373046, 0.014602272033691407, 0.014603391647338867, 0.014930815696716308, 0.014839808464050292, 0.015458304405212403, 0.014741727828979492, 0.014812959671020507, 0.014686207771301269, 0.014684160232543946, 0.014639103889465332, 0.014677023887634278, 0.014748224258422852, 0.014657952308654786, 0.01485209560394287, 0.014682399749755859, 0.014665151596069336, 0.014629152297973633, 0.014675968170166016, 0.014630399703979492, 0.014612959861755372, 0.014739775657653808, 0.014638208389282226, 0.01464908790588379, 0.014777183532714844, 0.014706015586853027, 0.014635680198669433, 0.014605792045593261, 0.01460483169555664, 0.014583807945251465, 0.014626784324645996, 0.014587488174438477, 0.01460268783569336, 0.014614527702331542, 0.014607647895812988, 0.014586591720581054, 0.014613823890686035, 0.014557888031005859, 0.014653120040893555, 0.014623040199279786, 0.014566720008850098, 0.014596799850463867, 0.014577664375305176, 0.014598079681396484, 0.014583840370178222, 0.01462070369720459, 0.014557215690612793, 0.01462054443359375, 0.014565695762634277, 0.014564607620239258, 0.014592991828918458, 0.014655263900756835, 0.014561280250549317, 0.014603903770446777, 0.014565664291381836, 0.014634367942810058, 0.014617343902587891, 0.014626784324645996, 0.014579392433166504, 0.014611871719360351, 0.014546175956726075, 0.014621631622314453, 0.014615263938903809, 0.01456332778930664, 0.014659647941589355, 0.014694208145141602, 0.014596320152282715, 0.014595999717712402, 0.014585856437683106, 0.014559231758117675, 0.014592000007629394, 0.015122271537780762, 0.014710944175720215, 0.014804479598999023, 0.014786304473876953, 0.014629440307617187, 0.014694592475891114, 0.014700544357299805, 0.014739456176757813, 0.014616576194763184, 0.01471504020690918, 0.016076128005981447, 0.01469491195678711, 0.01468131160736084, 0.014580096244812012, 0.014598560333251954, 0.014628864288330079, 0.014671008110046386, 0.014682975769042969, 0.014653440475463866, 0.01470406436920166, 0.014653120040893555, 0.014814080238342285, 0.014642784118652344, 0.014909855842590332, 0.014654591560363769, 0.014641375541687011, 0.01460700798034668, 0.014639328002929688, 0.014685983657836915, 0.0145797119140625, 0.01472003173828125, 0.014625632286071777, 0.014633088111877442, 0.01462054443359375, 0.015207839965820312, 0.014643136024475098, 0.014611264228820801, 0.014677984237670898, 0.01457759952545166, 0.014701567649841308, 0.014728192329406739, 0.014606335639953612, 0.014650495529174804, 0.014654335975646972, 0.014604000091552735, 0.01472976016998291, 0.014673184394836426, 0.014686688423156739, 0.014675968170166016, 0.014644767761230469, 0.014581567764282226, 0.014647007942199707, 0.014608415603637695, 0.01458678436279297, 0.014708736419677734, 0.014648320198059082, 0.014586976051330566, 0.014620575904846191, 0.014649056434631347, 0.014609824180603028, 0.01460108757019043, 0.014732864379882813, 0.015167519569396973, 0.014645471572875976, 0.0146659517288208, 0.014618592262268066, 0.014661503791809081, 0.014837663650512695, 0.014997023582458496, 0.014729567527770996, 0.014789119720458984, 0.014714719772338868, 0.014648447990417481, 0.014617728233337403, 0.014946399688720703, 0.015212191581726073, 0.014722528457641602, 0.014809632301330566, 0.01662339210510254, 0.01693734359741211, 0.014769696235656738, 0.014762463569641114, 0.014648223876953125, 0.014666111946105957, 0.014596672058105468, 0.01460041618347168, 0.015451711654663086, 0.014842080116271974, 0.014716927528381347, 0.014813247680664063, 0.01473145580291748, 0.01504860782623291, 0.01462816047668457, 0.014699040412902832, 0.014755200386047364, 0.014604672431945801, 0.014644927978515625, 0.014620320320129394, 0.01461955165863037, 0.014617823600769042, 0.014633760452270508, 0.014583552360534668, 0.01464297580718994, 0.014638848304748535, 0.01467039966583252, 0.014710080146789551, 0.014704959869384766, 0.014692735671997071, 0.014637215614318848, 0.01462822437286377, 0.014596735954284668, 0.014606335639953612, 0.014645248413085938, 0.014638943672180176, 0.014618335723876953, 0.014574015617370605, 0.014665727615356445, 0.01458182430267334, 0.014546239852905273, 0.014629504203796388, 0.01460223960876465, 0.014598143577575684, 0.01467801570892334, 0.01456761646270752, 0.014542655944824219, 0.014620736122131347, 0.014636032104492188, 0.014676447868347167, 0.014649184226989746, 0.014621024131774902, 0.01460431957244873, 0.014622976303100586, 0.014622112274169922, 0.014600799560546876, 0.014641056060791016, 0.01497875213623047, 0.014833663940429688, 0.014847519874572754, 0.0147542724609375, 0.014647711753845214, 0.014663680076599121, 0.014819328308105468, 0.014661279678344727, 0.0148090238571167, 0.015054271697998048, 0.014785728454589843, 0.014628640174865723, 0.014683168411254882, 0.014844703674316406, 0.014650783538818359, 0.014695199966430664, 0.014641152381896973, 0.014634688377380372, 0.014727487564086914, 0.017424127578735352, 0.0159967679977417, 0.014722975730895996, 0.014956864356994629, 0.014678175926208497, 0.014750816345214844, 0.014629280090332031, 0.014586400032043458, 0.014620672225952149, 0.01463475227355957, 0.0145283203125, 0.014589952468872071, 0.01460223960876465, 0.014558719635009766, 0.014641311645507813, 0.014594400405883789, 0.014575712203979492, 0.01460204792022705, 0.014647520065307618, 0.014619744300842286, 0.01460409641265869, 0.0145698881149292, 0.014613056182861328, 0.014607775688171386, 0.014570079803466796, 0.014635007858276367, 0.01456924819946289, 0.014655712127685547, 0.014626815795898437, 0.014595583915710449, 0.014609151840209961, 0.014620415687561035, 0.014626976013183593, 0.014589407920837402, 0.014578047752380372, 0.01458518409729004, 0.014598688125610352, 0.014626848220825195, 0.014626912117004395, 0.014610431671142577, 0.014643199920654297, 0.014629055976867675, 0.014690112113952637, 0.014663680076599121, 0.014597151756286621, 0.014580320358276368, 0.014573951721191407, 0.014654656410217286, 0.014787391662597656, 0.014932000160217285, 0.01480844783782959, 0.014676575660705566, 0.014708736419677734, 0.014675104141235351, 0.014701408386230469, 0.014682208061218262, 0.01464515209197998, 0.014601247787475586, 0.014687007904052734, 0.01472326374053955, 0.014647551536560059, 0.014730751991271973, 0.014706944465637207, 0.01468825626373291, 0.014659584045410156, 0.015007743835449219, 0.016785408020019533, 0.014739456176757813, 0.015507519721984863, 0.014706175804138183, 0.015278752326965333, 0.01778220748901367, 0.01476416015625, 0.014718048095703125, 0.01473423957824707, 0.014729215621948242, 0.01477017593383789, 0.014624735832214356, 0.014723391532897949, 0.014742912292480469, 0.014658207893371583, 0.014663711547851562, 0.014673439979553223, 0.014711232185363769, 0.014687999725341797, 0.014708671569824219, 0.014642880439758301, 0.014651968002319335, 0.01469331169128418, 0.014651552200317382, 0.01465824031829834, 0.014669856071472169, 0.014682239532470703, 0.014649375915527344, 0.014699359893798829, 0.014643520355224609, 0.014703455924987794, 0.014682111740112304, 0.014728992462158204, 0.014659456253051758, 0.014618751525878906, 0.014669440269470214, 0.014608768463134766, 0.01468019199371338, 0.01462889575958252, 0.014657567977905274, 0.014639936447143554, 0.014652640342712402, 0.014657312393188477, 0.014617856025695802, 0.01462118434906006, 0.014696767807006836, 0.014660927772521972, 0.014617216110229493, 0.014690303802490234, 0.014645248413085938, 0.01515459156036377, 0.014774880409240722, 0.014702591896057129, 0.014735008239746094, 0.01471718406677246, 0.014846048355102538, 0.014748928070068359, 0.014691295623779297, 0.014741279602050782, 0.014652607917785644, 0.01465824031829834, 0.01469814395904541, 0.014829376220703124, 0.014661408424377442, 0.0147357120513916, 0.01471132755279541, 0.014683199882507323, 0.014770976066589355, 0.014635168075561523, 0.014655391693115234]",tokens/s,67.8503774392028,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1041.678336,4527.620096,0.0,4125.097984,4116.435456,s,1,13.4006083984375,13.4006083984375,0.0,13.4006083984375,13.4006083984375,13.4006083984375,13.4006083984375,[13.4006083984375],,kWh,0.0001830996803416383,2.0190300553153533e-05,6.18275494619902e-05,0.000265117530356782,,MB,1352.224768,5400.035328,0.0,4984.930304,4693.316608,s,10,8.558330688476563,0.8558330688476563,0.004684146113828702,0.8566049499511719,0.86080185546875,0.8619366333007812,0.8628444555664062,"[0.8467933349609374, 0.8496578369140625, 0.8534113159179687, 0.858440185546875, 0.8587876586914063, 0.8569998168945312, 0.8562100830078125, 0.8544093627929688, 0.8630714111328125, 0.8605496826171875]",tokens/s,299.12375358981325,kWh,2.5059233643402953e-05,2.7618482579821533e-06,1.6486980782165716e-05,4.430806268355082e-05,tokens/kWh,5777729.480712297,MB,1377.230848,5402.13248,0.0,4984.930304,4693.319168,s,10,45.25707666015625,4.5257076660156255,0.0032420880409520267,4.525760498046875,4.529935107421875,4.530532983398437,4.531011284179687,"[4.5258359375, 4.52980224609375, 4.52568505859375, 4.52527001953125, 4.5220830078125, 4.526771484375, 4.51969921875, 4.52337890625, 4.527419921875, 4.531130859375]",tokens/s,13.920474906737489,kWh,0.00013214260760409805,1.4577986893559964e-05,8.46381834512339e-05,0.00023135877794889194,tokens/kWh,272304.3428847854,,s,630,45.25438568115231,0.07183235822405133,0.001666128072764105,0.07160406494140625,0.07194856033325196,0.07227569999694823,0.08424277359008789,"[0.08574156951904296, 0.07271603393554688, 0.07217996978759765, 0.07174553680419922, 0.07133980560302734, 0.07141996765136718, 0.071353759765625, 0.07176028442382812, 0.07131362915039062, 0.07135225677490234, 0.07152416229248047, 0.07127046203613281, 0.07131577301025391, 0.07136051177978515, 0.07128883361816406, 0.07126834869384766, 0.0713289566040039, 0.07136281585693359, 0.07128294372558594, 0.0713641586303711, 0.07133670043945313, 0.07134969329833984, 0.07140819549560547, 0.07148339080810547, 0.07176115417480469, 0.07136659240722656, 0.07164310455322266, 0.07148166656494141, 0.07144649505615235, 0.07154950714111329, 0.0714772491455078, 0.07147881317138671, 0.0715289306640625, 0.07144019317626953, 0.07152864074707031, 0.0714997787475586, 0.07148873901367188, 0.07167430114746094, 0.07210192108154297, 0.07183350372314454, 0.07212854766845703, 0.07170003509521485, 0.07161529541015625, 0.07155712127685547, 0.07154483032226562, 0.07166316986083984, 0.07167225646972657, 0.07175772857666016, 0.07174358367919922, 0.0719482879638672, 0.07170252990722656, 0.07169843292236328, 0.07201567840576172, 0.07168537902832031, 0.0717895050048828, 0.07178377532958985, 0.07190595245361328, 0.07175782775878906, 0.07178034973144531, 0.07170252990722656, 0.07170047760009765, 0.07175305938720702, 0.07180464172363281, 0.0858887710571289, 0.07280089569091797, 0.07226982116699218, 0.07195193481445313, 0.07145878601074218, 0.07180335998535156, 0.07143014526367188, 0.07138304138183593, 0.07131078338623047, 0.0713117446899414, 0.07134841918945313, 0.07126822662353516, 0.07138652801513672, 0.0712012176513672, 0.07121334075927735, 0.07146086120605469, 0.07136665344238281, 0.07136460876464844, 0.07156735992431641, 0.07149276733398438, 0.07146521759033203, 0.07252579498291016, 0.07141439819335937, 0.07141580963134765, 0.07136383819580078, 0.07135619354248048, 0.07137379455566406, 0.07134413146972657, 0.0713476791381836, 0.07144297790527344, 0.07158512115478516, 0.07153731536865235, 0.07220204925537109, 0.07257926177978516, 0.07166073608398438, 0.07266297912597657, 0.07157234954833984, 0.07165542602539063, 0.071659423828125, 0.0715607681274414, 0.07182931518554687, 0.07171965026855469, 0.07175078582763672, 0.07159024047851563, 0.07176656341552734, 0.07163686370849609, 0.07158512115478516, 0.0719040298461914, 0.07172614288330079, 0.07167890930175781, 0.07173238372802734, 0.07183650970458984, 0.07180057525634766, 0.07171068572998048, 0.07165161895751954, 0.07189913940429687, 0.07168409729003906, 0.07163823699951172, 0.07171561431884765, 0.07178854370117188, 0.07207721710205078, 0.07210601806640625, 0.07172102355957032, 0.08491862487792969, 0.07294547271728516, 0.07220146942138672, 0.07197574615478515, 0.07131712341308594, 0.07124147033691407, 0.07136335754394531, 0.07120486450195312, 0.07121920013427735, 0.07117593383789063, 0.07118669128417969, 0.07129497528076172, 0.07128985595703125, 0.07171788787841797, 0.07168723297119141, 0.07143484497070313, 0.0713629150390625, 0.07136460876464844, 0.07143628692626953, 0.07134310150146485, 0.0713306884765625, 0.07129488372802735, 0.0714197769165039, 0.07136495971679688, 0.07137689971923829, 0.0714629135131836, 0.07143424224853516, 0.0715857925415039, 0.07185612487792968, 0.07200393676757813, 0.07167555236816406, 0.07167132568359375, 0.07168022155761719, 0.07152051544189453, 0.07153049468994141, 0.07144976043701172, 0.07165628814697265, 0.07149104309082031, 0.07192412567138672, 0.07158777618408203, 0.07151602935791015, 0.07166598510742188, 0.07189084625244141, 0.07153775787353515, 0.07162127685546875, 0.07159228515625, 0.07171686553955078, 0.07169964599609376, 0.0715983657836914, 0.07161007690429687, 0.07189500427246094, 0.07171977233886719, 0.0716983642578125, 0.07170054626464843, 0.07181472015380859, 0.07224345397949218, 0.07198944091796874, 0.07180028533935547, 0.07182335662841798, 0.0717372817993164, 0.07181782531738282, 0.07189081573486328, 0.07184397125244141, 0.08468495941162109, 0.07298252868652344, 0.07218966674804687, 0.07181954956054687, 0.07123353576660156, 0.07161241912841797, 0.07131533050537109, 0.07133529663085937, 0.07115350341796875, 0.07122013092041016, 0.07123558044433594, 0.07119983673095703, 0.07120326232910157, 0.07127497863769532, 0.0712325439453125, 0.07126028442382812, 0.0713768310546875, 0.07124864196777343, 0.07143218994140625, 0.07127670288085937, 0.07130665588378907, 0.07163715362548828, 0.07214329528808594, 0.0716102752685547, 0.07157564544677734, 0.07175312042236329, 0.07150998687744141, 0.07155152130126953, 0.07152236938476562, 0.07151001739501953, 0.07187254333496093, 0.07168560028076172, 0.07152694702148438, 0.07165891265869141, 0.07148777770996094, 0.07158201599121093, 0.07166902160644531, 0.07166230773925782, 0.07156233978271484, 0.07156214141845703, 0.07146841430664062, 0.07191001892089843, 0.0717943344116211, 0.07171721649169922, 0.07171270751953125, 0.07168192291259766, 0.07166738891601562, 0.07171437072753906, 0.07161036682128906, 0.07169725036621094, 0.0717038116455078, 0.07169315338134766, 0.07164851379394531, 0.07247689819335937, 0.07174607849121094, 0.07176140594482422, 0.07170508575439453, 0.0717838363647461, 0.07180323028564453, 0.07173465728759766, 0.07178125, 0.0717496337890625, 0.07173734283447265, 0.08494924926757813, 0.07285964965820313, 0.07205868530273438, 0.07167606353759766, 0.07119184112548828, 0.07170944213867188, 0.07121273803710937, 0.07122918701171875, 0.07116790771484376, 0.07118838500976563, 0.07123545837402344, 0.07119142150878906, 0.07127244567871094, 0.0712416000366211, 0.07132787322998047, 0.0713338851928711, 0.07126834869384766, 0.07133334350585938, 0.07143651580810546, 0.07138098907470704, 0.07139353942871093, 0.07130079650878907, 0.07138272094726562, 0.07143084716796876, 0.07137612915039063, 0.07142476654052735, 0.07160390472412109, 0.07151443481445313, 0.07177375793457032, 0.07136479949951172, 0.07159417724609375, 0.07139318084716798, 0.07142991638183593, 0.07139775848388671, 0.07144448089599609, 0.07137411499023437, 0.07145081329345702, 0.07191129302978516, 0.07195101165771485, 0.07156531524658204, 0.07157145690917968, 0.07172265625, 0.0714837417602539, 0.07157350158691406, 0.07145881652832031, 0.07161856079101563, 0.07147519683837891, 0.07156735992431641, 0.0716390380859375, 0.07150931549072266, 0.07174009704589844, 0.07206297302246094, 0.07195225524902343, 0.07190131378173828, 0.07183487701416015, 0.07194086456298829, 0.07177830505371094, 0.07183769226074219, 0.07190937805175782, 0.07165920257568359, 0.07171910095214844, 0.0717333755493164, 0.07177830505371094, 0.08415641784667968, 0.0724961929321289, 0.07194412994384766, 0.07175660705566406, 0.07151020812988282, 0.07124163055419921, 0.07120905303955079, 0.07129087829589843, 0.0715179214477539, 0.07135609436035156, 0.07133206176757813, 0.07371609497070312, 0.07214870452880859, 0.07139766693115235, 0.07142809295654297, 0.07137484741210938, 0.07122124481201172, 0.07130889892578125, 0.0713384017944336, 0.07130467224121094, 0.0724219207763672, 0.07148134613037109, 0.07137474822998047, 0.07149712371826172, 0.07146505737304687, 0.07171491241455077, 0.07154534149169922, 0.07139670562744141, 0.07159056091308594, 0.07157107543945312, 0.07143666839599609, 0.07146086120605469, 0.0716021728515625, 0.0714567642211914, 0.07140457916259765, 0.0715663070678711, 0.07155506896972656, 0.07171481323242188, 0.07168204498291016, 0.07165106964111329, 0.07159833526611328, 0.07154684448242188, 0.07170256042480469, 0.0722903060913086, 0.07166969299316406, 0.07171282958984375, 0.07172828674316406, 0.0716659164428711, 0.07166831970214843, 0.07169964599609376, 0.07166445159912109, 0.07170662689208984, 0.07176099395751953, 0.0716993637084961, 0.07194198608398437, 0.07164534759521485, 0.07166361236572266, 0.0717496337890625, 0.07172444915771484, 0.07169699096679688, 0.07178995513916016, 0.07183424377441407, 0.0717127685546875, 0.08427804565429688, 0.07256063842773437, 0.07209779357910157, 0.0716042251586914, 0.07112201690673828, 0.07119692993164063, 0.07134384155273438, 0.07144515228271485, 0.07138127899169922, 0.07130726623535157, 0.07140351867675782, 0.07131136322021485, 0.07129702758789062, 0.07126834869384766, 0.0713338851928711, 0.07131132507324218, 0.07124988555908203, 0.07170668792724609, 0.07137612915039063, 0.07137059020996094, 0.07141059112548828, 0.07134003448486329, 0.07126592254638672, 0.07128921508789063, 0.07133184051513672, 0.07133798217773438, 0.07135846710205078, 0.07138444519042969, 0.07160249328613282, 0.07144889831542969, 0.07151411437988281, 0.0714525146484375, 0.07151017761230469, 0.0714567642211914, 0.0716779556274414, 0.0715120620727539, 0.07146669006347656, 0.07147145843505859, 0.071515869140625, 0.07153485107421875, 0.0716042251586914, 0.07155020904541015, 0.07167581176757812, 0.07155916595458985, 0.07153340911865234, 0.07160352325439454, 0.07173772430419922, 0.07168236541748046, 0.07170457458496093, 0.0717919692993164, 0.07168681335449219, 0.0716943359375, 0.07174143981933594, 0.07162166595458984, 0.07161273956298828, 0.07167628479003907, 0.07178883361816406, 0.07170047760009765, 0.0716688003540039, 0.07167072296142578, 0.07171385955810547, 0.07177107238769531, 0.071804931640625, 0.08406221008300781, 0.07427481842041016, 0.07223827362060548, 0.07202015686035156, 0.0713918685913086, 0.07127378845214843, 0.07123772430419922, 0.07118704223632813, 0.07128636932373048, 0.07135199737548828, 0.072391357421875, 0.07129670715332032, 0.07129942321777344, 0.07140073394775391, 0.07154557037353515, 0.07143183898925781, 0.0713829116821289, 0.07128931427001953, 0.07133164978027344, 0.07128288269042969, 0.07132889556884765, 0.07132454681396484, 0.07127756500244141, 0.07145164489746093, 0.07131340789794922, 0.07134143829345703, 0.0713406753540039, 0.07141289520263672, 0.0714698257446289, 0.07136819458007812, 0.07133859252929688, 0.07140351867675782, 0.07258076477050782, 0.07161475372314453, 0.07146419525146484, 0.07150444793701172, 0.0714181137084961, 0.07150367736816406, 0.07159008026123047, 0.07163494110107421, 0.07151821136474609, 0.07159766387939454, 0.07151439666748047, 0.07164268493652344, 0.07164985656738282, 0.07164672088623047, 0.07168000030517578, 0.07175833892822266, 0.07163699340820312, 0.07155302429199219, 0.07156883239746094, 0.07172502136230469, 0.07166575622558594, 0.07164979553222656, 0.07157945251464844, 0.07190342712402344, 0.07186022186279296, 0.07177420806884766, 0.07171193695068359, 0.07161734771728516, 0.07168819427490235, 0.0717900161743164, 0.07173375701904297, 0.08344528198242188, 0.07280818939208984, 0.07228659057617187, 0.07191792297363281, 0.07155699157714844, 0.07161190032958985, 0.07181990051269531, 0.07149158477783203, 0.07137689971923829, 0.07175552368164062, 0.07148143768310547, 0.07152041625976563, 0.07171625518798828, 0.071389404296875, 0.07145919799804687, 0.071591552734375, 0.07139491271972656, 0.07138127899169922, 0.07146137237548827, 0.07165948486328125, 0.0714749755859375, 0.07149593353271484, 0.07145881652832031, 0.07222067260742188, 0.07143014526367188, 0.07144652557373046, 0.07150550079345704, 0.07155126190185547, 0.07148336029052735, 0.07153475189208984, 0.07157321929931641, 0.07159970855712891, 0.07161241912841797, 0.071748291015625, 0.07157350158691406, 0.07162265777587891, 0.07172822570800781, 0.07168495941162109, 0.07160617828369141, 0.071542236328125, 0.07153734588623047, 0.07149158477783203, 0.07164463806152344, 0.07175631713867188, 0.07163625335693359, 0.07160700988769532, 0.07180697631835938, 0.07184384155273438, 0.07170793914794922, 0.071695068359375, 0.07173939514160156, 0.07190937805175782, 0.07170406341552735, 0.07163875579833984, 0.07176998138427734, 0.0717833251953125, 0.07175373077392579, 0.07173081970214844, 0.07181964874267578, 0.07186614227294921, 0.07183586883544922, 0.07208303833007812, 0.0717742691040039, 0.08487427520751953, 0.07293183898925781, 0.07228050994873046, 0.07210185241699218, 0.071687744140625, 0.07142774200439453, 0.07139615631103516, 0.07139440155029297, 0.07141059112548828, 0.07136665344238281, 0.07137471771240235, 0.07143177795410156, 0.07141840362548828, 0.07151821136474609, 0.07168409729003906, 0.0713888931274414, 0.07138886260986328, 0.07146675109863282, 0.07144041442871094, 0.07137769317626953, 0.07133187103271485, 0.07195852661132812, 0.07170867156982422, 0.07168934631347657, 0.0715068130493164, 0.07152845001220703, 0.07164723205566406, 0.07170252990722656, 0.0717619171142578, 0.07158080291748047, 0.07153129577636719, 0.07163228607177734, 0.071600830078125, 0.07160358428955078, 0.0715389404296875, 0.0715222396850586, 0.07169251251220703, 0.07167337799072265, 0.07164998626708985, 0.0716390380859375, 0.0718213119506836, 0.07175167846679688, 0.07179264068603515, 0.07170662689208984, 0.0717127685546875, 0.07187251281738281, 0.0718900146484375, 0.07233628845214844, 0.07173324584960937, 0.0718397445678711, 0.07178034973144531, 0.071761474609375, 0.0718770523071289, 0.07170252990722656, 0.0718635482788086, 0.07187942504882812, 0.0721569595336914, 0.07181743621826171, 0.07181298828125, 0.07194226837158203, 0.0720091552734375, 0.0719651870727539, 0.07192115020751953]",tokens/s,13.92130266531016,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1170.47296,5054.005248,0.0,4651.483136,4638.22848,s,1,13.8999677734375,13.8999677734375,0.0,13.8999677734375,13.8999677734375,13.8999677734375,13.8999677734375,[13.8999677734375],,kWh,0.00020155295912497117,2.2220501701012762e-05,6.987838923599998e-05,0.00029365185006198394,,MB,1364.8896,6180.175872,0.0,5765.070848,5418.530816,s,10,9.476246154785157,0.9476246154785157,0.006276125124535042,0.9477605895996094,0.9531595092773437,0.9553250244140625,0.9570574365234374,"[0.9333633422851563, 0.9574905395507812, 0.9423201904296875, 0.9464876098632813, 0.9483010864257813, 0.9452372436523437, 0.9509849853515625, 0.9521627807617188, 0.9472200927734375, 0.9526782836914063]",tokens/s,270.14916647213664,kWh,2.743052683637016e-05,3.0251211266084045e-06,1.810471145345534e-05,4.856035941643391e-05,tokens/kWh,5271789.646461387,MB,1384.640512,6180.175872,0.0,5765.070848,5418.533376,s,10,45.92473828125,4.592473828125,0.0067718769927258065,4.59183251953125,4.601139208984375,4.601471215820313,4.601736821289062,"[4.57913720703125, 4.591107421875, 4.58684375, 4.5866640625, 4.59230224609375, 4.59136279296875, 4.5954775390625, 4.598974609375, 4.60180322265625, 4.6010654296875]",tokens/s,13.718096685533304,kWh,0.00013467935747238035,1.4856093654191227e-05,8.937293008414575e-05,0.00023890838121071731,tokens/kWh,263699.4134769762,,s,630,45.921780349731456,0.07289171484084357,0.001666407767940548,0.07269537353515625,0.07374674987792969,0.0741532569885254,0.08375029983520509,"[0.08536144256591797, 0.07308902740478515, 0.07237017822265625, 0.07189504241943359, 0.07135231781005859, 0.07135158538818359, 0.07131526184082031, 0.07134848022460938, 0.07139155578613281, 0.07134857940673828, 0.07139328002929687, 0.07139532470703125, 0.0724677734375, 0.07401952362060547, 0.07342076873779296, 0.07334710693359375, 0.07310540771484375, 0.07239059448242187, 0.07205824279785156, 0.07151999664306641, 0.07152531433105469, 0.07271206665039062, 0.07221056365966796, 0.07191302490234375, 0.07163238525390625, 0.07159619140625, 0.07239145660400391, 0.0727162857055664, 0.07352239990234374, 0.07298332977294922, 0.07275424194335937, 0.07245919799804687, 0.0724111328125, 0.07221798706054687, 0.07174800109863282, 0.07240428924560546, 0.07315100860595704, 0.07238694763183594, 0.07184559631347656, 0.07185846710205078, 0.07309302520751954, 0.0727327651977539, 0.07260979461669922, 0.07262413024902344, 0.07415193939208985, 0.07316070556640625, 0.07255039978027343, 0.07210189056396485, 0.07252540588378906, 0.0730660171508789, 0.07251238250732422, 0.07234559631347656, 0.07211212921142578, 0.07282278442382813, 0.07333888244628907, 0.07275724792480469, 0.07288626861572266, 0.07391004943847657, 0.07347837066650391, 0.07286988830566406, 0.07326515197753906, 0.07295999908447266, 0.0725032958984375, 0.08503440093994141, 0.07356829071044922, 0.07243027496337891, 0.07206092834472656, 0.07149321746826172, 0.07165110778808594, 0.07145331573486328, 0.07152025604248047, 0.07153868865966796, 0.07161241912841797, 0.07185817718505859, 0.07160582733154297, 0.0725848617553711, 0.07484690856933594, 0.07452387237548828, 0.07312847900390625, 0.07247856140136719, 0.07211264038085938, 0.0715955810546875, 0.07287852478027344, 0.0722841567993164, 0.07191731262207031, 0.07177855682373047, 0.07184563446044921, 0.07320956420898438, 0.07256732940673828, 0.07320166778564453, 0.07378534698486328, 0.07334297943115234, 0.0731297607421875, 0.07276902770996094, 0.07232380676269531, 0.07198003387451171, 0.07315353393554687, 0.07238861083984376, 0.07210950469970703, 0.07202649688720703, 0.07251372528076172, 0.07299481964111328, 0.07283837127685547, 0.07346406555175782, 0.07366505432128906, 0.07360307312011719, 0.07323136138916016, 0.07262413024902344, 0.07256166076660156, 0.07256409454345703, 0.0735540771484375, 0.07291747283935547, 0.07249919891357422, 0.07192281341552734, 0.07234444427490234, 0.07299481964111328, 0.07315455627441406, 0.07359062194824219, 0.07374249267578124, 0.07314220428466797, 0.07260889434814453, 0.07275411224365234, 0.07296771240234375, 0.07297277069091797, 0.07316070556640625, 0.07266006469726563, 0.085938720703125, 0.07302349090576171, 0.07230464172363281, 0.0718963165283203, 0.07131622314453125, 0.07132096099853516, 0.07128905487060547, 0.07131712341308594, 0.07136745452880859, 0.07134137725830078, 0.07133663940429688, 0.0725421142578125, 0.07312934112548829, 0.07469744110107422, 0.0740679702758789, 0.07300262451171875, 0.07247090911865234, 0.07215309143066406, 0.07151955413818359, 0.07263916778564453, 0.07224320220947265, 0.0718888931274414, 0.07156326293945313, 0.071552734375, 0.07156764984130859, 0.07280335998535156, 0.07324479675292969, 0.07335558319091796, 0.07307523345947266, 0.0730808334350586, 0.07302553558349609, 0.07241490936279296, 0.07211366271972657, 0.07267993927001953, 0.07266291046142578, 0.07233785247802735, 0.07173529815673828, 0.07182080078125, 0.07296249389648438, 0.0726231689453125, 0.07338025665283203, 0.07358096313476563, 0.07325920104980468, 0.07326105499267578, 0.07306550598144532, 0.07279920196533203, 0.07262611389160156, 0.0725208969116211, 0.07258611297607422, 0.07252992248535156, 0.07248486328125, 0.07252281951904296, 0.07312044525146484, 0.07340013122558593, 0.07368099212646484, 0.07330028533935547, 0.07387506866455078, 0.07333487701416015, 0.07276338958740235, 0.07237443542480469, 0.07329344177246094, 0.07284780883789063, 0.07251158142089843, 0.08496851348876953, 0.07304851531982422, 0.07227852630615235, 0.07191081237792969, 0.07133039855957031, 0.07128678131103515, 0.07131340789794922, 0.07131702423095704, 0.07136099243164062, 0.07141375732421874, 0.07137689971923829, 0.07276748657226563, 0.07332454681396484, 0.07464105224609376, 0.07415433502197266, 0.07346585845947265, 0.07266918182373047, 0.07215216064453125, 0.07180508422851563, 0.07147337341308593, 0.07146717071533203, 0.07152467346191406, 0.07150150299072265, 0.07151859283447265, 0.07228006744384766, 0.07313407897949219, 0.07354108428955078, 0.07416995239257812, 0.07410578918457031, 0.07353548431396484, 0.07290985870361329, 0.07238716888427735, 0.0720531234741211, 0.0716759033203125, 0.07167180633544921, 0.071731201171875, 0.07175331115722657, 0.0722149429321289, 0.07301734161376953, 0.07363174438476562, 0.07344332885742187, 0.07443660736083985, 0.07372518157958985, 0.07346867370605469, 0.07298223876953125, 0.07245033264160156, 0.07193993377685547, 0.07186605072021485, 0.07184432220458985, 0.07211622619628906, 0.0731335678100586, 0.07279666900634765, 0.07384585571289062, 0.07349542236328124, 0.07387958526611328, 0.07353753662109375, 0.07443456268310547, 0.0732991714477539, 0.07278262329101562, 0.07248429107666016, 0.07208198547363281, 0.07204150390625, 0.07238873291015625, 0.08441388702392578, 0.07302012634277344, 0.07224742126464843, 0.07195865631103515, 0.07137484741210938, 0.07129702758789062, 0.07129878234863281, 0.07130960083007812, 0.07128268432617188, 0.07265074920654296, 0.07222271728515625, 0.0719269790649414, 0.07319225311279297, 0.07492323303222656, 0.07409532928466797, 0.07312185668945312, 0.07257292938232422, 0.0720547866821289, 0.07166307067871094, 0.07153231811523438, 0.07152499389648438, 0.07165760040283203, 0.07268966674804687, 0.07210189056396485, 0.07272831726074219, 0.07297615814208984, 0.07359126281738282, 0.07418428802490235, 0.07366492462158203, 0.07303705596923828, 0.07253887939453126, 0.07218585968017578, 0.07170598602294923, 0.0717523193359375, 0.07226573181152343, 0.07271392059326172, 0.07232134246826172, 0.07261558532714844, 0.07315408325195312, 0.07365510559082031, 0.07436902618408203, 0.07378534698486328, 0.07346790313720702, 0.07316588592529297, 0.07326335906982422, 0.07247532653808594, 0.07195033264160157, 0.07193507385253907, 0.0731329574584961, 0.07257855987548828, 0.07222118377685546, 0.07266508483886719, 0.07331545257568359, 0.073831298828125, 0.07340636444091797, 0.07337757110595704, 0.07342726135253906, 0.07341670227050781, 0.07343830108642578, 0.07315711975097657, 0.07267958068847656, 0.0722762222290039, 0.07346367645263673, 0.08278031921386719, 0.07296797180175782, 0.07222774505615234, 0.07184381103515625, 0.07127657318115234, 0.07129408264160156, 0.07128768157958984, 0.07129273223876953, 0.07134841918945313, 0.07263359832763672, 0.07204934692382813, 0.07173535919189453, 0.0737600326538086, 0.07522582244873047, 0.0741560287475586, 0.07315251159667968, 0.07254425811767579, 0.07204249572753907, 0.07154819488525391, 0.07165814208984375, 0.07267446136474609, 0.07209645080566406, 0.07271036529541015, 0.07226982116699218, 0.07190937805175782, 0.07249715423583984, 0.07351910400390625, 0.07419904327392578, 0.07367475128173828, 0.07302950286865234, 0.07265644836425782, 0.07226220703125, 0.07203225708007813, 0.07277772521972656, 0.07240294647216797, 0.07291836547851563, 0.07242578887939453, 0.07217910766601562, 0.072501953125, 0.07309123229980469, 0.07368412780761718, 0.07326201629638672, 0.07322418975830078, 0.0732357406616211, 0.07312802886962891, 0.0726329574584961, 0.07224114990234375, 0.07283507537841796, 0.07264035034179687, 0.0723519058227539, 0.07310540771484375, 0.07276541137695312, 0.07365020751953125, 0.07353548431396484, 0.07411625671386719, 0.07324550628662109, 0.07288992309570312, 0.07323286437988281, 0.07260979461669922, 0.0735263671875, 0.07333161926269531, 0.07274476623535156, 0.07244544219970703, 0.08376319885253906, 0.07286281585693359, 0.07218627166748047, 0.07179472351074219, 0.07193852996826172, 0.07223830413818359, 0.07189788818359374, 0.07132710266113282, 0.07139801788330079, 0.07135641479492187, 0.07252684783935547, 0.07204351806640626, 0.07359407806396484, 0.0750266876220703, 0.07408284759521484, 0.07281033325195313, 0.07262172698974609, 0.07209801483154298, 0.071806396484375, 0.07273763275146485, 0.07211622619628906, 0.07176528167724609, 0.07168688201904297, 0.0727589111328125, 0.07227177429199219, 0.0728758087158203, 0.07346656036376953, 0.0742113265991211, 0.0737496337890625, 0.07314713287353515, 0.07258684539794921, 0.07223554992675782, 0.07232476806640625, 0.07283747100830078, 0.07226982116699218, 0.07189708709716797, 0.07297638702392578, 0.0724254379272461, 0.0730827865600586, 0.07364211273193359, 0.07328358459472656, 0.07338297271728515, 0.07413801574707031, 0.07315920257568359, 0.07264256286621094, 0.07231283569335938, 0.07313817596435547, 0.07249919891357422, 0.07249260711669922, 0.07309740447998046, 0.07253427124023437, 0.07284121704101562, 0.0734166717529297, 0.07377423858642577, 0.07349747467041015, 0.07385292816162109, 0.0734207992553711, 0.07326924896240235, 0.07281040191650391, 0.07251363372802734, 0.07294771575927735, 0.07307049560546874, 0.07266236877441407, 0.0841611557006836, 0.07299072265625, 0.07225958251953125, 0.07190835571289063, 0.07127347564697266, 0.07129702758789062, 0.07251558685302735, 0.07199129486083984, 0.07165132904052735, 0.07137177276611328, 0.07206809234619141, 0.0724111328125, 0.0734493408203125, 0.0750544662475586, 0.07393507385253906, 0.0731346206665039, 0.07249839782714844, 0.07266998291015625, 0.0721506576538086, 0.07187289428710937, 0.07194624328613282, 0.07258726501464843, 0.07206912231445313, 0.0715857925415039, 0.07227536010742187, 0.07348489379882812, 0.07371968078613281, 0.07389923095703126, 0.07350160217285157, 0.07309661102294922, 0.07306403350830078, 0.0728832015991211, 0.07229440307617188, 0.07289036560058594, 0.07238201904296875, 0.0720982437133789, 0.0721447982788086, 0.07288796997070313, 0.07296864318847657, 0.07335446166992188, 0.07357315063476562, 0.07443251037597656, 0.07369728088378906, 0.07300505828857422, 0.07266099548339844, 0.07329913330078125, 0.07328646087646484, 0.07269142150878906, 0.072446044921875, 0.07277788543701172, 0.07277500915527343, 0.07243968200683594, 0.07330284881591796, 0.07381763458251953, 0.07334931182861328, 0.07337757110595704, 0.07337542724609375, 0.07337248229980468, 0.07322370910644531, 0.07306492614746093, 0.07345970916748047, 0.0728985595703125, 0.07256269073486328, 0.08371871948242188, 0.07284909057617188, 0.0721937255859375, 0.07186243438720703, 0.07139376068115234, 0.07152230072021484, 0.07249715423583984, 0.07190528106689453, 0.07153033447265625, 0.07133404541015625, 0.07255654144287109, 0.07199948883056641, 0.07380889892578125, 0.07580518341064453, 0.07410684967041016, 0.07333942413330079, 0.07260569763183594, 0.07209369659423828, 0.07220738983154297, 0.07247289276123046, 0.07208003234863282, 0.07163859558105469, 0.07273107147216797, 0.07211622619628906, 0.07185203552246094, 0.07348406219482422, 0.07365142059326171, 0.0742158432006836, 0.07356265258789063, 0.07348770904541016, 0.07289929962158204, 0.07224320220947265, 0.07284121704101562, 0.07239875030517579, 0.07222406768798828, 0.07290755462646484, 0.07238409423828125, 0.07290716552734375, 0.07295913696289062, 0.07361599731445312, 0.07399241638183594, 0.07398502349853515, 0.07336367797851563, 0.07300176239013671, 0.072476318359375, 0.07348595428466796, 0.07287471771240234, 0.07247046661376953, 0.07305836486816407, 0.07258470153808594, 0.07222306823730469, 0.07305436706542968, 0.07362969970703125, 0.07365631866455079, 0.07374642944335938, 0.07338317108154296, 0.07343577575683594, 0.07322608184814453, 0.0730524444580078, 0.07325596618652344, 0.0727394256591797, 0.0736153564453125, 0.07321024322509766, 0.08365465545654296, 0.07279821014404297, 0.07214899444580078, 0.07176953887939454, 0.0712586898803711, 0.07220626831054687, 0.07235385894775391, 0.07192781066894531, 0.07134413146972657, 0.07135958099365235, 0.07259340667724609, 0.07202409362792969, 0.07375084686279297, 0.07572672271728516, 0.07403916931152343, 0.0731240005493164, 0.07240566253662109, 0.0720404510498047, 0.07200153350830078, 0.07268761444091797, 0.07207929229736328, 0.07198480224609374, 0.07281705474853516, 0.07215513610839844, 0.07189910125732422, 0.07329939270019531, 0.07343106842041015, 0.0741299819946289, 0.07352114868164063, 0.07306240081787109, 0.07251891326904297, 0.0728542709350586, 0.07232921600341796, 0.07201337432861328, 0.07287648010253907, 0.07266508483886719, 0.07239020538330078, 0.07244429016113281, 0.07363385772705078, 0.07331407928466797, 0.07357257843017578, 0.07440793609619141, 0.07320745849609375, 0.07293113708496093, 0.07339794921875, 0.07278268432617188, 0.07245209503173829, 0.07243775939941406, 0.07343507385253906, 0.07300307464599609, 0.07261090850830078, 0.07316162872314454, 0.07329897308349609, 0.07378633880615235, 0.07334502410888671, 0.07338304138183593, 0.07338662719726563, 0.07339622497558594, 0.07340672302246094, 0.07324467468261718, 0.07269932556152343, 0.07291910552978516, 0.07385753631591797]",tokens/s,13.718980300895156,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1071.243264,1620.967424,0.0,1218.445312,1206.173696,s,1,9.04040625,9.04040625,0.0,9.04040625,9.04040625,9.04040625,9.04040625,[9.04040625],,kWh,5.862635152501146e-05,6.459827923634388e-06,1.941418219800206e-05,8.450036164664791e-05,,MB,1437.917184,1916.665856,0.0,1501.560832,1463.359488,s,10,1.882818161010742,0.18828181610107422,0.0023188123239283216,0.18900495910644532,0.189459846496582,0.18961526870727538,0.18973960647583007,"[0.18977069091796875, 0.18856594848632813, 0.18862226867675783, 0.18942530822753906, 0.18904287719726562, 0.1814215087890625, 0.1890272674560547, 0.18854185485839844, 0.18941778564453124, 0.18898265075683593]",tokens/s,1359.6639617209398,kWh,5.693061443349588e-06,6.278052811347906e-07,3.781514777346528e-06,1.0102381501830907e-05,tokens/kWh,25340559.5456481,MB,1462.984704,1918.763008,0.0,1501.560832,1463.362048,s,10,22.465515380859376,2.2465515380859378,0.012813343195942519,2.244156005859375,2.2562972412109374,2.2672335815429685,2.275982653808594,"[2.278169921875, 2.240012451171875, 2.253692626953125, 2.242864013671875, 2.24546240234375, 2.253866943359375, 2.243907958984375, 2.228453125, 2.244404052734375, 2.234681884765625]",tokens/s,28.04298006609544,kWh,6.585902629955904e-05,7.264205355914748e-06,3.207601390865426e-05,0.00010519924556412805,tokens/kWh,598863.6103059888,,s,630,22.46282686233521,0.035655280733865403,0.0004535664866364251,0.03552896118164063,0.03613306083679199,0.036396834754943846,0.03740851394653322,"[0.03595731353759766, 0.03580518341064453, 0.03556147384643555, 0.03545695877075195, 0.03555744171142578, 0.035530494689941405, 0.0355781135559082, 0.0365404167175293, 0.03692748641967773, 0.03656828689575195, 0.03609177780151367, 0.035947425842285156, 0.035999744415283204, 0.03624755096435547, 0.036332897186279293, 0.036442623138427735, 0.03603267288208008, 0.036076801300048825, 0.036227840423583985, 0.0365219841003418, 0.03653017425537109, 0.03620630264282226, 0.03619868850708008, 0.036229118347167966, 0.03653017425537109, 0.0361324462890625, 0.03638723373413086, 0.036421630859375, 0.03632332611083984, 0.036732799530029295, 0.03654252624511719, 0.0363355827331543, 0.036302303314208986, 0.0366352653503418, 0.03686355209350586, 0.03699756622314453, 0.03643334579467773, 0.036332096099853516, 0.03590348815917969, 0.03569424057006836, 0.0361207046508789, 0.03634198379516602, 0.036203617095947264, 0.037768096923828126, 0.03612057495117187, 0.03590883255004883, 0.03566262435913086, 0.03584758377075195, 0.035934814453125, 0.036005088806152344, 0.03573635101318359, 0.035751136779785156, 0.036039329528808596, 0.03610432052612305, 0.036345855712890625, 0.03621478271484375, 0.03600352096557617, 0.03605657577514648, 0.035743873596191404, 0.035705535888671876, 0.035649120330810545, 0.035666336059570314, 0.03582511901855469, 0.035857761383056644, 0.03564380645751953, 0.03563955307006836, 0.03546291351318359, 0.03535443115234375, 0.03544713592529297, 0.0353076171875, 0.03537472152709961, 0.035549022674560546, 0.03584463882446289, 0.0356864013671875, 0.035530975341796875, 0.03548086547851562, 0.035529216766357424, 0.03544627380371094, 0.03537561416625976, 0.03552867126464844, 0.03540963363647461, 0.03630303955078125, 0.0354837760925293, 0.03559628677368164, 0.036033695220947265, 0.03578966522216797, 0.03556480026245117, 0.035819297790527345, 0.03547235107421875, 0.035325824737548826, 0.03547763061523437, 0.03600716781616211, 0.03556224060058594, 0.03603267288208008, 0.03548944091796875, 0.03553094482421875, 0.03547545623779297, 0.03555526351928711, 0.035874881744384766, 0.035612319946289064, 0.03580950546264648, 0.035825790405273436, 0.035622337341308596, 0.03550838470458984, 0.03545334243774414, 0.03545087814331055, 0.035282943725585936, 0.03535251235961914, 0.03537926483154297, 0.035332096099853515, 0.03544678497314453, 0.035432449340820314, 0.03564051055908203, 0.03577727890014649, 0.03543782424926758, 0.03561145782470703, 0.035286304473876956, 0.03531439971923828, 0.03531366348266601, 0.0352911376953125, 0.03569868850708008, 0.03544268798828125, 0.03539763259887695, 0.03542537689208984, 0.035310272216796876, 0.035463390350341795, 0.03638905715942383, 0.0361082878112793, 0.035827713012695314, 0.03563043212890625, 0.03542208099365234, 0.0355351676940918, 0.035534431457519534, 0.03605184173583984, 0.03594598388671875, 0.037556129455566405, 0.036942272186279296, 0.03751702499389648, 0.03619884872436523, 0.03599564743041992, 0.035913440704345705, 0.036052833557128905, 0.03577695846557617, 0.03558348846435547, 0.03550783920288086, 0.035543937683105466, 0.03549184036254883, 0.035450496673583985, 0.035479934692382815, 0.03578675079345703, 0.03557785415649414, 0.03552191925048828, 0.03566201782226563, 0.035774177551269534, 0.03574585723876953, 0.03593078231811524, 0.03571712112426758, 0.03551027297973633, 0.03546931076049804, 0.03545315170288086, 0.035509376525878905, 0.03547724914550781, 0.03562083053588867, 0.035478401184082034, 0.03605424118041992, 0.03562377548217773, 0.03618815994262695, 0.035727359771728515, 0.035546817779541016, 0.035445056915283206, 0.035489791870117186, 0.03537100982666016, 0.03542959976196289, 0.03554793548583984, 0.03549728012084961, 0.035506881713867185, 0.03550822448730469, 0.03569427108764649, 0.03547983932495117, 0.035485729217529294, 0.03601408004760742, 0.03565884780883789, 0.03601500701904297, 0.03604188919067383, 0.03570732879638672, 0.03550249481201172, 0.03573526382446289, 0.03559862518310547, 0.03589529418945313, 0.03608063888549805, 0.03606790542602539, 0.035755615234375, 0.03558486557006836, 0.03808870315551758, 0.036099231719970704, 0.03581020736694336, 0.035542976379394534, 0.0355568962097168, 0.03540835189819336, 0.03562496185302735, 0.03539763259887695, 0.03542540740966797, 0.03546406555175781, 0.03555737686157227, 0.035487743377685545, 0.035659774780273434, 0.03600998306274414, 0.03562700653076172, 0.035499393463134764, 0.035455615997314456, 0.03537919998168945, 0.03548137664794922, 0.035424480438232424, 0.03543174362182617, 0.03540822219848633, 0.036108638763427736, 0.03587481689453125, 0.03616153717041016, 0.03585638427734375, 0.03572531127929687, 0.035522174835205075, 0.03549017715454102, 0.03541584014892578, 0.03541424179077148, 0.03542425537109375, 0.036278270721435545, 0.03537100982666016, 0.03535222244262695, 0.03566422271728516, 0.03541196823120117, 0.035297279357910154, 0.035388736724853515, 0.03543519973754883, 0.03544678497314453, 0.03562662506103516, 0.03557392120361328, 0.03531695938110352, 0.03542937469482422, 0.035399166107177735, 0.035315265655517576, 0.035363712310791016, 0.03538336181640625, 0.03537919998168945, 0.035422206878662106, 0.03549593734741211, 0.035493663787841793, 0.03539580917358399, 0.03540787124633789, 0.03530137634277344, 0.03556966400146484, 0.035355934143066405, 0.035422943115234376, 0.035639297485351565, 0.0380948486328125, 0.035865825653076173, 0.03562985610961914, 0.035579902648925785, 0.03559219360351563, 0.03582963180541992, 0.03562895965576172, 0.03554531097412109, 0.03541584014892578, 0.0354285774230957, 0.03559139251708984, 0.03591017532348633, 0.035901695251464846, 0.0365107536315918, 0.03637142562866211, 0.03631513595581055, 0.035917823791503906, 0.035452606201171875, 0.03546758270263672, 0.03542015838623047, 0.03531980895996094, 0.03539513778686523, 0.035485313415527346, 0.03537334442138672, 0.035410465240478514, 0.0353955192565918, 0.035436607360839846, 0.03536896133422852, 0.03568016052246094, 0.03553699111938476, 0.03557785415649414, 0.035590145111083986, 0.03576422500610352, 0.035748958587646484, 0.03573974227905274, 0.035711807250976564, 0.03555110549926758, 0.03565990447998047, 0.03540377426147461, 0.03548160171508789, 0.03542771148681641, 0.03538188934326172, 0.03525222396850586, 0.03554304122924805, 0.03552204895019531, 0.03534000015258789, 0.03539023971557617, 0.03537715148925781, 0.03540707015991211, 0.03660675048828125, 0.03553878402709961, 0.0355063362121582, 0.035458465576171876, 0.03542806243896485, 0.03608163070678711, 0.03531254577636719, 0.03559628677368164, 0.03546931076049804, 0.035639297485351565, 0.03538937759399414, 0.03544070434570312, 0.035381248474121094, 0.03638083267211914, 0.035582176208496095, 0.03594873428344727, 0.0354752311706543, 0.03562518310546875, 0.035678207397460936, 0.03566105651855469, 0.03539142227172851, 0.03546121597290039, 0.03541823959350586, 0.03535318374633789, 0.03549568176269531, 0.03552691268920898, 0.035568992614746095, 0.03544745635986328, 0.035776512145996094, 0.035432449340820314, 0.03547750473022461, 0.03567923355102539, 0.03557833480834961, 0.0355753288269043, 0.03558092880249023, 0.03562828826904297, 0.03554790496826172, 0.03577439880371094, 0.03595792007446289, 0.036553630828857424, 0.036311038970947264, 0.0364031982421875, 0.0357184944152832, 0.03569116973876953, 0.035676158905029294, 0.03583916854858398, 0.0358039665222168, 0.03570000076293945, 0.03556630325317383, 0.03577036666870117, 0.03555104064941406, 0.03559852981567383, 0.0355676155090332, 0.035783935546875, 0.035739711761474606, 0.03582550430297852, 0.036170753479003906, 0.036130016326904296, 0.035947135925292965, 0.03587276840209961, 0.035652801513671874, 0.03562374496459961, 0.03563520050048828, 0.035655681610107424, 0.035606369018554684, 0.03568246459960937, 0.035536895751953124, 0.035595806121826175, 0.03563967895507812, 0.03563119888305664, 0.03567001724243164, 0.03586191940307617, 0.035524833679199216, 0.03960812759399414, 0.03581087875366211, 0.035633216857910155, 0.03593027114868164, 0.0357176628112793, 0.035800640106201174, 0.03542073440551758, 0.03539558410644531, 0.035506175994873046, 0.03544784164428711, 0.035519454956054686, 0.035606529235839846, 0.03552460861206055, 0.03598950576782227, 0.035522560119628906, 0.03590758514404297, 0.035536895751953124, 0.03566387176513672, 0.035856063842773435, 0.03714284896850586, 0.03665644836425781, 0.036636478424072264, 0.03612063980102539, 0.035920639038085934, 0.036227134704589846, 0.036068958282470705, 0.03548201751708984, 0.03545862579345703, 0.035347999572753905, 0.035377376556396486, 0.035465217590332034, 0.03538604736328125, 0.0354856948852539, 0.03590553665161133, 0.035737598419189456, 0.035581310272216796, 0.03593875122070313, 0.03556780624389649, 0.03541401672363281, 0.03551232147216797, 0.035449886322021486, 0.03550851058959961, 0.03538604736328125, 0.03539961624145508, 0.03537516784667969, 0.03545459365844727, 0.03540620803833008, 0.03562656021118164, 0.035657886505126954, 0.035450206756591794, 0.035472320556640624, 0.03550796890258789, 0.03555728149414063, 0.0356662712097168, 0.03528444671630859, 0.035313182830810544, 0.035544384002685545, 0.035313343048095705, 0.03539750289916992, 0.03527484893798828, 0.03535055923461914, 0.03522067260742188, 0.03541484832763672, 0.03516211318969727, 0.03538739013671875, 0.03532185745239258, 0.03579612731933594, 0.03559100723266601, 0.035471359252929685, 0.03562496185302735, 0.03574784088134766, 0.03557785415649414, 0.03533820724487305, 0.03525225448608398, 0.035216705322265625, 0.03536966323852539, 0.035433631896972656, 0.03612249755859375, 0.036832225799560546, 0.03544678497314453, 0.035334144592285156, 0.03525360107421875, 0.03519251251220703, 0.035230686187744144, 0.03510476684570313, 0.0353331184387207, 0.035111934661865234, 0.035305118560791014, 0.03526486587524414, 0.035186527252197265, 0.03537315368652344, 0.035364383697509764, 0.03556396865844726, 0.03530758285522461, 0.035354656219482423, 0.03530752182006836, 0.035225601196289064, 0.03517350387573242, 0.03515891265869141, 0.03518463897705078, 0.035729408264160156, 0.035315711975097655, 0.03514777755737305, 0.03522969436645508, 0.0354856948852539, 0.03537100982666016, 0.035399681091308595, 0.03527475357055664, 0.0351558723449707, 0.035216800689697264, 0.03531782531738281, 0.03530815887451172, 0.03515523147583008, 0.03544137573242188, 0.035251232147216795, 0.03523043060302734, 0.035259998321533204, 0.035265151977539065, 0.03519823837280273, 0.03523455810546875, 0.03534000015258789, 0.03519049453735352, 0.03523231887817383, 0.035231166839599606, 0.035316287994384764, 0.035388702392578124, 0.03559702301025391, 0.03530137634277344, 0.035442432403564456, 0.035815807342529295, 0.03543983840942383, 0.035244831085205076, 0.035288288116455076, 0.035203872680664064, 0.03515907287597656, 0.03525116729736328, 0.03599350357055664, 0.03528051376342774, 0.035426334381103514, 0.03988729476928711, 0.03558367919921875, 0.03543891143798828, 0.035345664978027345, 0.03556224060058594, 0.035385311126708986, 0.03560451126098633, 0.035421375274658204, 0.03543532943725586, 0.035325473785400394, 0.035434944152832035, 0.03531760025024414, 0.03616284942626953, 0.0358675537109375, 0.03623116683959961, 0.03613859176635742, 0.03585248184204102, 0.03591190338134766, 0.03598470306396485, 0.03547999954223633, 0.03573990249633789, 0.03571283340454102, 0.03562931060791016, 0.03537913513183594, 0.035422206878662106, 0.03536281585693359, 0.03544678497314453, 0.03552870559692383, 0.03553238296508789, 0.03527471923828125, 0.03536326217651367, 0.03526041412353516, 0.035465217590332034, 0.03527446365356445, 0.03578704071044922, 0.03544172668457031, 0.03549894332885742, 0.035605918884277346, 0.03569724655151367, 0.03537715148925781, 0.03540707015991211, 0.035367710113525394, 0.035915775299072264, 0.0355491828918457, 0.035671550750732424, 0.03556972885131836, 0.035640960693359376, 0.03540633773803711, 0.03631721496582031, 0.03551663970947266, 0.03550601577758789, 0.03534211349487305, 0.035485599517822264, 0.03568854522705078, 0.035488960266113284, 0.035531585693359374, 0.03540694427490235, 0.0353903694152832, 0.035264511108398434, 0.03528704071044922, 0.03528908920288086, 0.036421119689941404, 0.03542591857910156, 0.035482177734375, 0.03537443161010742, 0.03527318572998047, 0.035430912017822266, 0.03574771118164063, 0.0358419189453125, 0.03563888168334961, 0.0355805778503418, 0.03539286422729492, 0.03537168121337891, 0.03534425735473633, 0.03529043197631836, 0.03533699035644531, 0.035272705078125, 0.03534595108032226, 0.03530387115478516, 0.0352743034362793, 0.035278656005859374, 0.03538399887084961, 0.03520236968994141, 0.03536742401123047, 0.035401920318603515, 0.035504127502441404, 0.035510208129882814, 0.03534601593017578, 0.035389919281005856, 0.03544063949584961, 0.03517545700073242, 0.035377471923828126, 0.035291839599609375, 0.03532137680053711, 0.03536297607421875, 0.03548713684082031, 0.035608959197998044, 0.036856319427490236, 0.035631103515625, 0.03541401672363281, 0.035381248474121094, 0.03534438323974609, 0.035436351776123046, 0.03535481643676758, 0.03561062240600586, 0.035332096099853515, 0.03545702362060547, 0.03542835235595703, 0.035370559692382814, 0.03562937545776367, 0.03574508666992188, 0.03550291061401367, 0.03546112060546875, 0.03563315200805664, 0.03542835235595703, 0.035432449340820314]",tokens/s,28.046336458941397,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,1136.173056,717.094912,0.0,314.5728,299.62752,s,1,7.77460888671875,7.77460888671875,0.0,7.77460888671875,7.77460888671875,7.77460888671875,7.77460888671875,[7.77460888671875],,kWh,2.4986066145841806e-05,2.7486393619371435e-06,8.643895803989166e-06,3.6378601311768115e-05,,MB,1342.988288,805.175296,0.0,381.681664,359.87456,s,10,0.29142966270446774,0.02914296627044678,0.000218134492940454,0.02920481586456299,0.029377436447143555,0.029408381843566894,0.029433138160705564,"[0.02930191993713379, 0.029206367492675783, 0.029439327239990234, 0.02922217559814453, 0.02916012763977051, 0.02896499252319336, 0.029370559692382812, 0.029203264236450196, 0.028747167587280274, 0.02881376075744629]",tokens/s,8784.28083209923,kWh,8.556456550954236e-07,9.436323750614015e-08,3.315170459237133e-07,1.281525938525277e-06,tokens/kWh,199761856.0062806,MB,1355.83744,828.243968,0.0,404.750336,361.449984,s,10,18.04887744140625,1.804887744140625,0.012009411403353676,1.805103454589844,1.8158305053710937,1.823798065185547,1.8301721130371094,"[1.80677490234375, 1.831765625, 1.80576953125, 1.8052503662109376, 1.80249755859375, 1.803319091796875, 1.8140599365234376, 1.80495654296875, 1.7876373291015626, 1.7868465576171875]",tokens/s,34.90521790317584,kWh,5.27074589340766e-05,5.8133208852796235e-06,1.8408053501278386e-05,7.692883332063462e-05,tokens/kWh,818938.7162212626,,s,630,18.043907413482653,0.028641122878543913,0.0005902896914517371,0.028561952590942383,0.028928157043457033,0.02912388925552368,0.030426826667785657,"[0.028427743911743165, 0.028614591598510743, 0.028645503997802736, 0.028615039825439455, 0.02869766426086426, 0.028633184432983398, 0.028606943130493164, 0.02849420738220215, 0.028528511047363283, 0.028464672088623046, 0.028786304473876954, 0.02878767967224121, 0.02889727973937988, 0.028684192657470704, 0.02861270332336426, 0.028631040573120117, 0.028896287918090822, 0.02868230438232422, 0.02859846305847168, 0.028588768005371093, 0.028591520309448244, 0.028609119415283202, 0.028680192947387696, 0.028705791473388673, 0.028623807907104493, 0.02860873603820801, 0.028686080932617188, 0.028910911560058594, 0.029004575729370118, 0.02872831916809082, 0.028606464385986328, 0.02857881546020508, 0.02871500778198242, 0.028598272323608398, 0.02853913688659668, 0.028643072128295897, 0.02861289596557617, 0.0290546875, 0.028589248657226562, 0.02859414482116699, 0.02861961555480957, 0.028611616134643556, 0.028588319778442384, 0.028658016204833985, 0.028590431213378908, 0.028970848083496092, 0.02864579200744629, 0.02884124755859375, 0.028729824066162108, 0.02872879981994629, 0.02873193550109863, 0.02877235221862793, 0.028659360885620117, 0.02855561637878418, 0.028830911636352537, 0.028835872650146484, 0.02870147132873535, 0.028635135650634767, 0.028596223831176756, 0.02851840019226074, 0.028504159927368163, 0.028579999923706054, 0.02876531219482422, 0.028391199111938478, 0.028553216934204102, 0.02854911994934082, 0.028618751525878908, 0.028517759323120118, 0.02847107124328613, 0.028484447479248047, 0.02851958465576172, 0.028518815994262696, 0.02852412796020508, 0.028515167236328125, 0.028590240478515626, 0.028493312835693358, 0.028506208419799804, 0.028514623641967773, 0.028632896423339844, 0.02936025619506836, 0.028885183334350587, 0.028788543701171874, 0.028521568298339843, 0.02863929557800293, 0.028752607345581056, 0.028829824447631835, 0.02870649528503418, 0.03345849609375, 0.03447334289550781, 0.029202880859375, 0.029769983291625977, 0.02892185592651367, 0.028922847747802734, 0.02918000030517578, 0.02906540870666504, 0.02887516784667969, 0.029135135650634764, 0.028649471282958985, 0.02854012870788574, 0.028532608032226563, 0.028486560821533204, 0.028676095962524413, 0.028657472610473633, 0.02849967956542969, 0.02848611259460449, 0.028458112716674804, 0.03748742294311523, 0.028776479721069337, 0.028557344436645506, 0.028559295654296876, 0.028397632598876954, 0.02858598327636719, 0.029379743576049805, 0.03012403106689453, 0.02937276840209961, 0.029163936614990234, 0.028759231567382814, 0.028815263748168944, 0.028656511306762694, 0.028758144378662108, 0.028872703552246092, 0.02874163246154785, 0.028737535476684572, 0.028960224151611327, 0.028891679763793945, 0.028801088333129884, 0.02856287956237793, 0.02875449562072754, 0.028678144454956055, 0.02859212875366211, 0.028860416412353516, 0.028628992080688476, 0.0285914249420166, 0.028609312057495118, 0.028692384719848633, 0.028643327713012694, 0.028456384658813477, 0.02851487922668457, 0.028477439880371092, 0.028546239852905272, 0.028481727600097657, 0.02868486404418945, 0.028594079971313476, 0.028583200454711914, 0.02851046371459961, 0.028603008270263672, 0.028580863952636718, 0.02862940788269043, 0.0285251522064209, 0.029014015197753908, 0.028613855361938476, 0.028524927139282227, 0.028668479919433595, 0.029110143661499024, 0.029154367446899414, 0.028975807189941406, 0.02877052879333496, 0.028534528732299804, 0.028655519485473634, 0.028527135848999022, 0.02854854393005371, 0.02844838333129883, 0.02855792045593262, 0.02851580810546875, 0.029043392181396486, 0.03053126335144043, 0.028698463439941407, 0.028709344863891602, 0.028619808197021486, 0.028524799346923826, 0.028643327713012694, 0.028432159423828124, 0.02839852714538574, 0.028440671920776366, 0.02854287910461426, 0.028510112762451172, 0.028516639709472658, 0.028638431549072266, 0.028522207260131837, 0.028574592590332033, 0.028688383102416993, 0.028823392868041992, 0.028582048416137696, 0.028614656448364258, 0.02853436851501465, 0.02853638458251953, 0.028535648345947264, 0.0285347843170166, 0.028655487060546873, 0.02852060890197754, 0.028931583404541016, 0.02916592025756836, 0.029038335800170897, 0.028892608642578126, 0.028801855087280274, 0.0287457275390625, 0.028512256622314453, 0.028503328323364257, 0.028523231506347658, 0.028925216674804688, 0.028517087936401366, 0.028502304077148436, 0.028716896057128908, 0.029203935623168944, 0.028555551528930665, 0.028555391311645507, 0.028515327453613282, 0.028564479827880858, 0.02860851287841797, 0.02854297637939453, 0.028619903564453125, 0.028600351333618164, 0.02850899124145508, 0.028467519760131836, 0.028540735244750978, 0.028485055923461913, 0.028608671188354494, 0.02860416030883789, 0.02849849510192871, 0.02852659225463867, 0.028473119735717773, 0.02860076713562012, 0.028539840698242187, 0.028525407791137696, 0.02864883232116699, 0.028594816207885742, 0.029839359283447265, 0.0301711368560791, 0.028672351837158203, 0.028501760482788085, 0.028491071701049805, 0.028504255294799805, 0.028485567092895507, 0.028475648880004884, 0.028707040786743163, 0.02860851287841797, 0.028559392929077148, 0.028489696502685548, 0.02872540855407715, 0.02860233688354492, 0.028554847717285156, 0.028522687911987303, 0.028534879684448244, 0.02846447944641113, 0.028503871917724608, 0.02861452865600586, 0.028547935485839844, 0.028540384292602538, 0.02847817611694336, 0.028579776763916015, 0.028429664611816407, 0.02847593688964844, 0.028424192428588867, 0.028674047470092775, 0.028601600646972657, 0.028637632369995118, 0.028620288848876952, 0.028717056274414062, 0.028639104843139647, 0.02874844741821289, 0.029241632461547852, 0.029271072387695312, 0.028947391510009766, 0.028949600219726562, 0.028658720016479493, 0.02858998489379883, 0.0285347843170166, 0.028577951431274413, 0.028618719100952147, 0.028624319076538087, 0.028602815628051757, 0.028663808822631837, 0.028623104095458984, 0.0286246395111084, 0.028482784271240236, 0.028492128372192383, 0.028494272232055664, 0.0285614070892334, 0.028487680435180664, 0.02857478332519531, 0.028482336044311524, 0.02847760009765625, 0.02845859146118164, 0.028520479202270507, 0.028533279418945314, 0.028443967819213867, 0.028715551376342772, 0.028573728561401366, 0.02842825508117676, 0.028424192428588867, 0.02848348808288574, 0.028455007553100587, 0.028544351577758788, 0.0286627197265625, 0.028440288543701172, 0.028390623092651366, 0.028539680480957032, 0.0288720645904541, 0.02848627281188965, 0.02843212890625, 0.02857721519470215, 0.028431167602539064, 0.028445823669433594, 0.02843123245239258, 0.028505311965942384, 0.028367647171020506, 0.028588287353515623, 0.02857708740234375, 0.028684736251831055, 0.028709087371826172, 0.028417343139648436, 0.028452991485595703, 0.028385631561279295, 0.029816127777099608, 0.02855792045593262, 0.028379199981689453, 0.028498016357421874, 0.02863929557800293, 0.028552928924560548, 0.02852467155456543, 0.02850912094116211, 0.028417055130004882, 0.028516319274902342, 0.028469247817993162, 0.02860598373413086, 0.028543264389038085, 0.02857369613647461, 0.028524608612060548, 0.028579904556274415, 0.029108287811279297, 0.02858915138244629, 0.029008800506591798, 0.028907520294189453, 0.02853436851501465, 0.028709407806396484, 0.028458944320678713, 0.028675071716308592, 0.028529695510864258, 0.028502111434936524, 0.028730880737304686, 0.028532480239868162, 0.028672224044799806, 0.028590431213378908, 0.02869228744506836, 0.028993728637695313, 0.028540672302246092, 0.028633344650268556, 0.028669952392578125, 0.02884160041809082, 0.0286376953125, 0.0287271671295166, 0.02852249526977539, 0.028464128494262695, 0.028627296447753907, 0.028572128295898436, 0.028907487869262696, 0.02855344009399414, 0.028554784774780274, 0.028518304824829102, 0.02860214424133301, 0.028597024917602538, 0.028532127380371093, 0.028635744094848634, 0.028454336166381836, 0.028492191314697265, 0.02850422477722168, 0.028534688949584962, 0.028518495559692384, 0.028598272323608398, 0.02853001594543457, 0.028492448806762695, 0.02857740783691406, 0.02885260772705078, 0.028545024871826172, 0.02886822319030762, 0.028696895599365235, 0.028841856002807618, 0.02861075210571289, 0.028548576354980468, 0.028647968292236328, 0.028732959747314452, 0.02871139144897461, 0.028729343414306642, 0.028686080932617188, 0.028872831344604492, 0.028522432327270506, 0.028862560272216797, 0.02856150436401367, 0.028604415893554686, 0.02951078414916992, 0.028594079971313476, 0.02857263946533203, 0.028487199783325194, 0.02856723213195801, 0.02870675277709961, 0.028647327423095705, 0.02908460807800293, 0.028924095153808595, 0.02875775909423828, 0.02902432060241699, 0.02872319984436035, 0.028712959289550782, 0.02855116844177246, 0.02862505531311035, 0.02867344093322754, 0.028700479507446287, 0.029264511108398436, 0.028753919601440428, 0.028811519622802734, 0.02904447937011719, 0.028684288024902343, 0.028548799514770507, 0.02861087989807129, 0.02856550407409668, 0.028414176940917968, 0.028562400817871095, 0.028738208770751953, 0.028592287063598634, 0.028551200866699218, 0.02871401596069336, 0.02846953582763672, 0.028568223953247072, 0.02869152069091797, 0.029068416595458984, 0.028680288314819335, 0.02851932716369629, 0.028481536865234375, 0.028437503814697264, 0.028695552825927735, 0.028560192108154296, 0.02995609664916992, 0.02972435188293457, 0.029384544372558594, 0.029737695693969727, 0.028796672821044922, 0.028631040573120117, 0.0289751033782959, 0.029061119079589845, 0.029076799392700196, 0.028956832885742186, 0.02887455940246582, 0.02834147262573242, 0.02863372802734375, 0.02862940788269043, 0.02860220718383789, 0.028487680435180664, 0.028487360000610352, 0.03167795181274414, 0.02906604766845703, 0.031938560485839845, 0.02964233589172363, 0.028649887084960936, 0.028690528869628907, 0.028524160385131836, 0.028567840576171875, 0.028825599670410155, 0.02853276824951172, 0.028946399688720703, 0.028862464904785157, 0.02858393669128418, 0.028507808685302734, 0.028664159774780273, 0.028489343643188475, 0.028848703384399415, 0.028509056091308594, 0.028569984436035156, 0.028889408111572267, 0.028748031616210937, 0.02843027114868164, 0.028526847839355468, 0.028927776336669923, 0.028486719131469728, 0.029037471771240234, 0.02861471939086914, 0.028864511489868162, 0.02858598327636719, 0.028499967575073244, 0.02900726318359375, 0.028543584823608397, 0.028241920471191406, 0.028272640228271483, 0.028138751983642577, 0.02845913505554199, 0.028178464889526366, 0.02816022491455078, 0.028254592895507812, 0.02822755241394043, 0.02823958396911621, 0.028111167907714844, 0.028159263610839844, 0.028359392166137695, 0.028327104568481445, 0.028412736892700196, 0.028520479202270507, 0.028434528350830077, 0.028428159713745117, 0.02839756774902344, 0.028207103729248048, 0.028299360275268554, 0.029267871856689453, 0.028208864212036132, 0.028164384841918945, 0.02832614326477051, 0.028228511810302736, 0.02819011116027832, 0.02824041557312012, 0.028598272323608398, 0.02880102348327637, 0.03162227249145508, 0.028957536697387695, 0.028827680587768554, 0.029071359634399413, 0.02864067268371582, 0.02834239959716797, 0.028471839904785155, 0.028325664520263673, 0.028166303634643553, 0.028136512756347657, 0.028134527206420897, 0.028329792022705077, 0.028366847991943358, 0.028274688720703125, 0.028141151428222655, 0.028137887954711914, 0.028073984146118162, 0.02841129684448242, 0.028119647979736328, 0.02841142463684082, 0.028117151260375978, 0.02820947265625, 0.028219392776489258, 0.0281844482421875, 0.028115072250366212, 0.028251455307006835, 0.028144319534301757, 0.028430335998535155, 0.028278783798217775, 0.02812726402282715, 0.0280798397064209, 0.028229888916015626, 0.028272640228271483, 0.028196863174438477, 0.028210239410400392, 0.028172256469726563, 0.028443199157714844, 0.0282956485748291, 0.028751808166503905, 0.028423839569091797, 0.028399967193603517, 0.028300352096557617, 0.028289920806884767, 0.02812233543395996, 0.02830761528015137, 0.028119455337524413, 0.028258687973022462, 0.028347455978393554, 0.02833839988708496, 0.02820159912109375, 0.028287008285522462, 0.02820707130432129, 0.028458431243896486, 0.0281464958190918, 0.02824575996398926, 0.028258304595947265, 0.028268543243408203, 0.028391424179077147, 0.028260351181030274, 0.028264448165893553, 0.0282063045501709, 0.028132287979125977, 0.028149311065673827, 0.02805379295349121, 0.028065280914306642, 0.028268447875976564, 0.02815446472167969, 0.028127296447753906, 0.02819273567199707, 0.028243967056274414, 0.02824617576599121, 0.028544832229614257, 0.028270591735839845, 0.028839647293090822, 0.028591808319091798, 0.028375360488891603, 0.02828326416015625, 0.028295072555541992, 0.028243616104125978, 0.028223136901855468, 0.02828767967224121, 0.02851430320739746, 0.02836854362487793, 0.028207456588745117, 0.028387392044067383, 0.028291296005249024, 0.028500864028930664, 0.02823664093017578, 0.028186975479125978, 0.028427679061889647, 0.028383487701416014, 0.028254207611083985, 0.028420095443725587, 0.02880102348327637, 0.028465152740478516, 0.028546367645263672, 0.028869312286376955, 0.028807167053222657, 0.028851903915405274, 0.02859779167175293, 0.028479999542236328, 0.028336416244506835, 0.028284160614013672, 0.02826316833496094, 0.02814784049987793, 0.028256128311157227, 0.028223487854003908, 0.028213119506835936, 0.028213375091552733, 0.02895462417602539, 0.028504064559936523, 0.028498176574707032, 0.028268287658691407, 0.028215295791625978, 0.02817638397216797, 0.028219263076782228, 0.02834214401245117, 0.028387584686279298, 0.02822105598449707, 0.028170080184936525, 0.02825270462036133, 0.02855331230163574]",tokens/s,34.91483222360446,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,1930.207232,1065.222144,0.0,662.700032,622.833664,s,1,8.8821357421875,8.8821357421875,0.0,8.8821357421875,8.8821357421875,8.8821357421875,8.8821357421875,[8.8821357421875],,kWh,5.4556100841652246e-05,6.008144206364278e-06,1.8316403541995818e-05,7.888064859001235e-05,,MB,1978.53184,1188.954112,0.0,765.46048,735.57504,s,10,0.5625825576782227,0.056258255767822264,0.0003946334828564453,0.056334592819213866,0.05668949432373047,0.05680925903320313,0.056905070800781256,"[0.056929023742675784, 0.05644198226928711, 0.05666287994384766, 0.05635753631591797, 0.05624582290649414, 0.056454463958740236, 0.05577040100097656, 0.05631164932250977, 0.055644927978515626, 0.055763870239257815]",tokens/s,4550.443246170155,kWh,1.6498098588513618e-06,1.8194565711964455e-07,6.789616228927724e-07,2.510717138863779e-06,tokens/kWh,101962899.77765174,MB,1983.553536,1201.537024,0.0,778.043392,751.3984,s,10,35.36420190429688,3.5364201904296877,0.00961305118144429,3.534966552734375,3.550522998046875,3.5510461669921876,3.5514647021484373,"[3.55040673828125, 3.54607177734375, 3.5515693359375, 3.53940185546875, 3.5355927734375, 3.52822802734375, 3.523607666015625, 3.53434033203125, 3.5282744140625, 3.526708984375]",tokens/s,17.81462513150771,kWh,0.00010312824348323384,1.1374182725746187e-05,3.6494179226704556e-05,0.0001509966054356846,tokens/kWh,417227.9225630286,,s,630,35.35867765808102,0.05612488517155722,0.0006847323852249661,0.056002399444580075,0.056586468505859376,0.05685952987670899,0.05932263256072998,"[0.056170303344726565, 0.05657632064819336, 0.05606582260131836, 0.05607788848876953, 0.06425859069824219, 0.056571041107177734, 0.05633744049072266, 0.056129470825195316, 0.05578329467773437, 0.055822559356689457, 0.055820064544677736, 0.05589811325073242, 0.05561753463745117, 0.055766368865966795, 0.05581401443481445, 0.055917407989501955, 0.05622713470458984, 0.05615212631225586, 0.056371776580810544, 0.056389633178710936, 0.055906494140625, 0.05606585693359375, 0.05629951858520508, 0.05609369659423828, 0.05642160034179688, 0.05614972686767578, 0.056308990478515626, 0.056130081176757815, 0.05621990585327148, 0.05591247940063476, 0.056025089263916014, 0.05626265716552734, 0.05654048156738281, 0.056181217193603514, 0.056823551177978514, 0.05619760131835937, 0.05675439834594727, 0.056586017608642576, 0.056629249572753906, 0.05621964645385742, 0.05590224075317383, 0.05597740936279297, 0.05628982543945313, 0.05642758560180664, 0.056027328491210934, 0.056146305084228514, 0.0560766716003418, 0.05560931015014649, 0.055793441772460935, 0.0559475212097168, 0.055947265625, 0.056815616607666014, 0.056386878967285156, 0.055970497131347656, 0.05671052932739258, 0.05656607818603516, 0.05639718246459961, 0.056520927429199216, 0.05690006256103516, 0.056344833374023434, 0.056883201599121094, 0.056395423889160155, 0.056299873352050785, 0.05534899139404297, 0.056159969329833984, 0.05615574264526367, 0.05605868911743164, 0.05602316665649414, 0.056233985900878906, 0.05663302230834961, 0.05685027313232422, 0.05753200149536133, 0.05634239959716797, 0.05647849655151367, 0.05601289749145508, 0.055974014282226564, 0.0558919677734375, 0.056180736541748044, 0.056823711395263675, 0.05653497695922852, 0.05634268951416015, 0.055795711517333986, 0.05600223922729492, 0.05584121704101563, 0.05628691101074219, 0.05585343933105469, 0.05627619171142578, 0.0560810546875, 0.055874526977539064, 0.05622841644287109, 0.05593766403198242, 0.05620099258422852, 0.056438976287841794, 0.05624947357177734, 0.05590012741088867, 0.05562441635131836, 0.05616230392456055, 0.05603737640380859, 0.05576691055297851, 0.0556360969543457, 0.055640064239501956, 0.055575809478759765, 0.05573651123046875, 0.05591507339477539, 0.05578124618530273, 0.05613375854492188, 0.05600393676757812, 0.05659052658081055, 0.056603103637695315, 0.060852161407470705, 0.05919699096679688, 0.05662771224975586, 0.05653664016723633, 0.055871936798095705, 0.05612748718261719, 0.05620230484008789, 0.0563001594543457, 0.05650457763671875, 0.05610931015014648, 0.05624115371704102, 0.05653171157836914, 0.055864799499511716, 0.05612604904174805, 0.05602431869506836, 0.056277664184570315, 0.0563590087890625, 0.05580815887451172, 0.05616598510742187, 0.056061183929443356, 0.056111328125, 0.05597788619995117, 0.05625513458251953, 0.05635299301147461, 0.056079360961914064, 0.055940097808837894, 0.055877632141113284, 0.05619481658935547, 0.05607244873046875, 0.055948959350585935, 0.05631011199951172, 0.05682790374755859, 0.056282752990722655, 0.05637705612182617, 0.057256702423095704, 0.056081951141357424, 0.055966079711914064, 0.056128768920898436, 0.05618486404418945, 0.05773712158203125, 0.05670755386352539, 0.05672380828857422, 0.056514144897460934, 0.05610947036743164, 0.055975936889648435, 0.05595769500732422, 0.056126720428466795, 0.056211040496826174, 0.05632688140869141, 0.05619068908691406, 0.05625814437866211, 0.05632931137084961, 0.056377185821533206, 0.05633436965942383, 0.05622787094116211, 0.05590745544433594, 0.055910846710205075, 0.0560766716003418, 0.056135135650634764, 0.05660316848754883, 0.057161727905273435, 0.056761856079101565, 0.05654937744140625, 0.056571872711181644, 0.05649462509155274, 0.056624576568603514, 0.056875137329101565, 0.05678124618530273, 0.05665788650512695, 0.05653916931152344, 0.05653673553466797, 0.058409313201904296, 0.05638553619384765, 0.05629465484619141, 0.05644547271728516, 0.056215774536132815, 0.05610198211669922, 0.056134559631347655, 0.056430591583251956, 0.05605542373657227, 0.05600672149658203, 0.05603702545166016, 0.05686710357666016, 0.05627699279785156, 0.056066047668457034, 0.05655561447143555, 0.056403873443603515, 0.05628623962402344, 0.055844863891601565, 0.055921630859375, 0.055959552764892576, 0.0558653450012207, 0.05579785537719727, 0.05591151809692383, 0.05582118225097656, 0.05624825668334961, 0.056338432312011716, 0.05618495941162109, 0.05606592178344726, 0.05597359848022461, 0.05606172943115234, 0.056519168853759766, 0.056387584686279295, 0.056213504791259764, 0.05689139175415039, 0.05608467102050781, 0.05586016082763672, 0.05614707183837891, 0.055964801788330076, 0.055980670928955076, 0.055828609466552735, 0.0559073600769043, 0.05596255874633789, 0.05614105606079101, 0.05652137756347656, 0.056177791595458985, 0.05603158569335937, 0.057348640441894534, 0.055971775054931644, 0.05622380828857422, 0.05736764907836914, 0.056295902252197265, 0.05635017776489258, 0.056294368743896483, 0.056043582916259764, 0.05625190353393555, 0.055927230834960935, 0.05579289627075195, 0.055933441162109375, 0.05575641632080078, 0.05577177429199219, 0.05575680160522461, 0.05625222396850586, 0.0564205436706543, 0.05605980682373047, 0.05598831939697266, 0.05651046371459961, 0.05635891342163086, 0.05596118545532226, 0.056293407440185544, 0.05658252716064453, 0.056164352416992185, 0.05607424163818359, 0.055389408111572266, 0.05604156875610351, 0.05575299072265625, 0.055869663238525394, 0.055914432525634765, 0.05628969573974609, 0.05637331390380859, 0.05680310440063477, 0.05594240188598633, 0.056310527801513674, 0.05603942489624023, 0.05637516784667969, 0.056082302093505856, 0.05595366287231445, 0.05972377777099609, 0.056322048187255856, 0.05613536071777344, 0.05591212844848633, 0.056029823303222655, 0.055786911010742186, 0.0558699836730957, 0.0557138557434082, 0.055564289093017576, 0.05579596710205078, 0.056169727325439456, 0.056658046722412106, 0.05614771270751953, 0.05616502380371094, 0.05581545639038086, 0.056091232299804686, 0.05609891128540039, 0.05736447906494141, 0.060598270416259765, 0.056997886657714845, 0.05627414321899414, 0.055989025115966796, 0.05588582229614258, 0.05597183990478516, 0.05568534469604492, 0.055803680419921876, 0.0555428466796875, 0.055440414428710935, 0.055620574951171876, 0.0561673583984375, 0.05624422454833984, 0.056357921600341795, 0.055863582611083984, 0.055894718170166016, 0.055828479766845705, 0.056033279418945314, 0.056569950103759765, 0.05600556945800781, 0.05582912063598633, 0.05549910354614258, 0.05543526458740235, 0.05566595077514649, 0.05575772857666016, 0.055707233428955075, 0.05557884979248047, 0.055287872314453125, 0.05558265686035156, 0.05559910583496094, 0.05580335998535156, 0.05551267242431641, 0.05579203033447266, 0.055876895904541014, 0.055860031127929685, 0.05570550537109375, 0.05556633758544922, 0.0559081916809082, 0.05532067108154297, 0.059803104400634764, 0.056199710845947264, 0.05584697723388672, 0.055826431274414064, 0.05577036666870117, 0.055675647735595704, 0.05608591842651367, 0.05599087905883789, 0.05607331085205078, 0.056373344421386716, 0.056013824462890625, 0.055871009826660156, 0.055993633270263674, 0.05595852661132812, 0.05629747009277344, 0.0559185905456543, 0.05559312057495117, 0.05540192031860352, 0.05628281784057617, 0.055814559936523435, 0.05659199905395508, 0.055739070892333986, 0.055613311767578125, 0.05542684936523438, 0.05565999984741211, 0.055780223846435543, 0.05574655914306641, 0.056289279937744144, 0.056260543823242186, 0.05603868865966797, 0.05573017501831055, 0.056113887786865234, 0.05615212631225586, 0.05909481430053711, 0.05619529724121094, 0.055656448364257816, 0.05559283065795898, 0.0556360969543457, 0.05575065612792969, 0.05582438278198242, 0.05570889663696289, 0.05548521423339844, 0.05579702377319336, 0.055798206329345706, 0.055814369201660156, 0.05617846298217773, 0.05646364974975586, 0.05599462509155274, 0.056402782440185546, 0.055796638488769534, 0.05602304077148437, 0.05579679870605469, 0.05590480041503906, 0.05574812698364258, 0.05553343963623047, 0.055286079406738284, 0.055670783996582034, 0.055798912048339845, 0.0558902702331543, 0.055841537475585935, 0.05571152114868164, 0.055736320495605465, 0.055744510650634765, 0.05616831970214844, 0.05621059036254883, 0.05647974395751953, 0.0559666862487793, 0.05593907165527344, 0.05608652877807617, 0.05574627304077148, 0.055873825073242185, 0.05559500885009765, 0.055744510650634765, 0.05611119842529297, 0.05566252899169922, 0.055683040618896486, 0.05574041748046875, 0.055731231689453126, 0.057336414337158206, 0.055994335174560546, 0.055888286590576174, 0.05627091217041016, 0.05688655853271484, 0.056715553283691406, 0.056099201202392576, 0.056096927642822265, 0.05583651351928711, 0.055910400390625, 0.05563593673706055, 0.05552851104736328, 0.0557088623046875, 0.055760448455810546, 0.05550102233886719, 0.05541059112548828, 0.055457279205322264, 0.05581270217895508, 0.055691455841064455, 0.05643040084838867, 0.055992321014404295, 0.055894016265869144, 0.05660671997070312, 0.056068191528320314, 0.05576249694824219, 0.05593532943725586, 0.05598214340209961, 0.05567667388916016, 0.05623984146118164, 0.055637535095214845, 0.055645408630371096, 0.055573886871337894, 0.055839073181152346, 0.055664161682128906, 0.055992767333984374, 0.05584489440917969, 0.055938270568847655, 0.05660752105712891, 0.056016895294189455, 0.055760894775390625, 0.05635276794433594, 0.05629663848876953, 0.056371456146240236, 0.05608915328979492, 0.05572198486328125, 0.05570912170410156, 0.05549465560913086, 0.055433792114257814, 0.05565235137939453, 0.05553324890136719, 0.05584707260131836, 0.05583456039428711, 0.05585737609863281, 0.055669822692871095, 0.05592364883422852, 0.055801631927490235, 0.0556627197265625, 0.05617782211303711, 0.05900576019287109, 0.058695808410644534, 0.056368896484375, 0.056205440521240234, 0.05569340896606445, 0.05553081512451172, 0.05557731246948242, 0.05554380798339844, 0.055717792510986325, 0.055578784942626955, 0.05558595275878906, 0.055638015747070314, 0.05576131057739258, 0.05857523345947266, 0.05621712112426758, 0.05604201507568359, 0.05567628860473633, 0.05589404678344727, 0.056084449768066404, 0.0562213134765625, 0.05621241760253906, 0.05688115310668945, 0.05583871841430664, 0.05556838226318359, 0.05573427200317383, 0.05616659164428711, 0.055979839324951174, 0.055589054107666014, 0.055510848999023435, 0.05607014465332031, 0.05567897415161133, 0.05602099227905273, 0.056784767150878906, 0.05580582427978516, 0.05592211151123047, 0.056032062530517575, 0.05675158309936523, 0.05593705749511719, 0.05612294387817383, 0.05623494338989258, 0.056825855255126956, 0.05736243057250977, 0.05607823944091797, 0.05596921539306641, 0.055685279846191406, 0.05505148696899414, 0.05561849594116211, 0.055478046417236325, 0.05587289428710938, 0.05688764953613281, 0.0558639030456543, 0.055721504211425785, 0.05552694320678711, 0.055837375640869144, 0.055803905487060546, 0.056309120178222656, 0.05600739288330078, 0.05568502426147461, 0.05572528076171875, 0.055691295623779294, 0.05572275161743164, 0.05565030288696289, 0.056406017303466796, 0.056256511688232425, 0.05600255966186524, 0.055697025299072264, 0.05595379257202148, 0.055651424407958984, 0.05575772857666016, 0.05603123092651367, 0.05630323028564453, 0.059773311614990235, 0.05638883209228516, 0.05937395095825195, 0.05608607864379883, 0.05569235229492187, 0.05571152114868164, 0.05580291366577148, 0.055800800323486326, 0.055588417053222657, 0.0555852165222168, 0.056569854736328126, 0.055784671783447266, 0.05579017639160156, 0.05597100830078125, 0.05598515319824219, 0.05563187026977539, 0.055820289611816405, 0.055586814880371094, 0.056026718139648435, 0.05568569564819336, 0.05629526519775391, 0.05621964645385742, 0.05596713638305664, 0.055726688385009764, 0.05560550308227539, 0.055725791931152346, 0.05550211334228516, 0.05569612884521485, 0.055769088745117185, 0.05589737701416016, 0.05567510223388672, 0.0558001594543457, 0.056811935424804685, 0.055690273284912106, 0.05609545516967773, 0.05633772659301758, 0.05574111938476563, 0.05534527969360352, 0.05623260879516601, 0.05604774475097656, 0.05571158218383789, 0.05553363037109375, 0.05576697540283203, 0.055516799926757815, 0.05548915100097656, 0.05593667221069336, 0.055842464447021484, 0.05792550277709961, 0.056115455627441406, 0.05641247940063476, 0.05574825668334961, 0.05572438430786133, 0.055826335906982424, 0.05614396667480469, 0.055785472869873044, 0.055861248016357425, 0.05607833480834961, 0.0563870735168457, 0.05585948944091797, 0.05572201538085937, 0.05595155334472656, 0.05570150375366211, 0.055795360565185546, 0.05586723327636719, 0.05568972778320312, 0.05571142578125, 0.05611347198486328, 0.05616572952270508, 0.056208030700683594, 0.05645507049560547, 0.05623110580444336, 0.055941600799560544, 0.05560329437255859, 0.05581654357910156, 0.05608652877807617, 0.05618179321289062, 0.05656675338745117, 0.05586054229736328, 0.055726593017578124, 0.056291519165039064, 0.05566873550415039, 0.05663948822021484, 0.055827903747558597, 0.05587529754638672, 0.056132671356201175, 0.05595334243774414, 0.05581187057495117, 0.055835968017578126, 0.05673855972290039, 0.056095008850097654, 0.05589987182617188, 0.0561580810546875, 0.05596995162963867, 0.055989952087402345, 0.055925216674804684, 0.05579872131347656, 0.055586814880371094, 0.055583614349365235, 0.05584431838989258, 0.05583107376098633]",tokens/s,17.817408391006857,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,900.878336,584.974336,0.0,182.452224,179.733504,s,1,7.7556279296875,7.7556279296875,0.0,7.7556279296875,7.7556279296875,7.7556279296875,7.7556279296875,[7.7556279296875],,kWh,2.1330402537508535e-05,2.3457023701911522e-06,6.58278304400961e-06,3.02588879517093e-05,,MB,1316.655104,681.443328,0.0,266.338304,224.295424,s,10,0.26677497673034667,0.026677497673034668,0.0002502789886672474,0.02667860794067383,0.02709178295135498,0.027117443752288816,0.02713797239303589,"[0.026726879119873048, 0.02668025588989258, 0.02643212890625, 0.02708608055114746, 0.027143104553222656, 0.026715328216552734, 0.02644742393493652, 0.026376447677612304, 0.026490367889404298, 0.026676959991455078]",tokens/s,9596.102420758978,kWh,7.708575346306464e-07,8.50115068008032e-08,5.075253253983983e-07,1.363394366829848e-06,tokens/kWh,187766655.2160171,MB,1355.341824,694.02624,0.0,278.921216,224.297984,s,10,11.850684204101562,1.1850684204101563,0.010149397751049907,1.1846315307617188,1.1937120483398438,1.200467596435547,1.2058720349121095,"[1.20722314453125, 1.1884498291015626, 1.1922108154296875, 1.1907896728515626, 1.1864822998046876, 1.182372802734375, 1.1714342041015624, 1.1713831787109374, 1.18278076171875, 1.1775574951171874]",tokens/s,53.161487484575346,kWh,3.426520432744976e-05,3.7790001923389995e-06,1.2748040834601944e-05,5.07922453543907e-05,tokens/kWh,1240346.8198822995,,s,630,11.845569683074952,0.01880249156043643,0.00044349340531640874,0.018703136444091797,0.019063356018066406,0.01935838384628296,0.020638778514862077,"[0.019151424407958983, 0.018990240097045898, 0.018894912719726563, 0.01883590316772461, 0.018712127685546875, 0.018815231323242188, 0.018682367324829103, 0.01865116882324219, 0.018734207153320314, 0.01866851234436035, 0.01869526481628418, 0.01880531120300293, 0.01955673599243164, 0.019005407333374025, 0.018812063217163087, 0.018884576797485352, 0.018993120193481445, 0.01874947166442871, 0.018703136444091797, 0.018778112411499022, 0.018931711196899414, 0.018822463989257812, 0.018766592025756836, 0.01891913604736328, 0.018907487869262694, 0.018926687240600586, 0.01875606346130371, 0.018997631072998046, 0.01879852867126465, 0.018808799743652342, 0.018745471954345703, 0.01878620719909668, 0.01889206314086914, 0.018952287673950196, 0.019044992446899413, 0.01884774398803711, 0.019775711059570312, 0.025347808837890624, 0.021063072204589844, 0.019607263565063475, 0.021778911590576173, 0.01981180763244629, 0.019038816452026368, 0.019040191650390625, 0.01926803207397461, 0.018995168685913087, 0.019100896835327147, 0.019077152252197267, 0.019153984069824218, 0.019019519805908203, 0.018960384368896483, 0.019058687210083008, 0.02165555191040039, 0.01889023971557617, 0.018741952896118165, 0.01877791976928711, 0.01879612731933594, 0.01899305534362793, 0.01876019287109375, 0.018747392654418944, 0.018671871185302735, 0.01872640037536621, 0.01875551986694336, 0.018448383331298827, 0.01876780891418457, 0.018759103775024415, 0.018892704010009767, 0.018952287673950196, 0.018860671997070314, 0.018796735763549805, 0.018845024108886718, 0.018856416702270506, 0.018788095474243163, 0.018688255310058594, 0.018905088424682616, 0.018832864761352538, 0.018784799575805665, 0.018744512557983397, 0.018886655807495118, 0.0189116153717041, 0.018784832000732422, 0.018881568908691405, 0.01913225555419922, 0.018856128692626952, 0.01882521629333496, 0.018715456008911134, 0.018876415252685547, 0.01923855972290039, 0.018716255187988282, 0.01872768020629883, 0.019152992248535155, 0.018703712463378906, 0.018747743606567384, 0.019251487731933595, 0.01879033660888672, 0.01877984046936035, 0.018952735900878905, 0.01900111961364746, 0.018786304473876952, 0.01884569549560547, 0.018769920349121092, 0.018800640106201173, 0.01894419288635254, 0.019067935943603516, 0.019339616775512696, 0.019116704940795898, 0.01902288055419922, 0.018889471054077147, 0.019038047790527344, 0.019314815521240234, 0.018807872772216797, 0.018877151489257813, 0.018741504669189453, 0.018724863052368163, 0.018728416442871095, 0.018688896179199218, 0.018806047439575195, 0.01887068748474121, 0.018612192153930663, 0.01861222457885742, 0.019133951187133787, 0.01905411148071289, 0.018600608825683592, 0.018612543106079103, 0.01868191909790039, 0.01862444877624512, 0.01841152000427246, 0.018660575866699218, 0.018678464889526368, 0.0186778564453125, 0.0186429443359375, 0.018671615600585938, 0.018692096710205077, 0.01866067123413086, 0.01867215919494629, 0.018769920349121092, 0.018669343948364257, 0.018742816925048828, 0.01869500732421875, 0.01879859161376953, 0.018722816467285155, 0.018667423248291015, 0.018964223861694336, 0.01882761573791504, 0.019021055221557618, 0.019138496398925783, 0.01891164779663086, 0.01893212890625, 0.018888832092285156, 0.018808704376220703, 0.018732959747314454, 0.018686048507690428, 0.018694175720214843, 0.0195664005279541, 0.020873376846313477, 0.019119327545166015, 0.018994047164916993, 0.018831264495849608, 0.018894752502441405, 0.018819520950317383, 0.02029545593261719, 0.018624704360961915, 0.01871628761291504, 0.0186911678314209, 0.01870742416381836, 0.018666559219360352, 0.018567552566528322, 0.0185677433013916, 0.018562623977661133, 0.01854934310913086, 0.0185567684173584, 0.018677759170532226, 0.018630016326904298, 0.018573951721191407, 0.018738176345825194, 0.018724159240722658, 0.018808223724365233, 0.019005727767944337, 0.018916576385498048, 0.019030719757080077, 0.019355743408203126, 0.0194150390625, 0.019458047866821288, 0.01945599937438965, 0.01943779182434082, 0.019480607986450196, 0.019431167602539063, 0.019283008575439454, 0.019186336517333983, 0.018967199325561523, 0.018976959228515625, 0.019037727355957032, 0.019101984024047853, 0.0196177921295166, 0.019167232513427734, 0.019111936569213867, 0.018865503311157227, 0.019315296173095704, 0.018791967391967774, 0.019010080337524413, 0.01871049690246582, 0.018810911178588866, 0.01860403251647949, 0.018646848678588866, 0.019765439987182616, 0.021403743743896485, 0.019025888442993164, 0.01874336051940918, 0.01867865562438965, 0.01858892822265625, 0.01865247917175293, 0.019726783752441406, 0.018896160125732423, 0.018662111282348633, 0.01864614486694336, 0.018553728103637694, 0.018553951263427734, 0.018645023345947264, 0.018633152008056642, 0.018678207397460938, 0.018667520523071288, 0.01857155227661133, 0.018827999114990234, 0.01871993637084961, 0.018719839096069335, 0.01879750442504883, 0.018872447967529297, 0.018951839447021484, 0.01899958419799805, 0.01886089515686035, 0.018832447052001954, 0.018808511734008788, 0.018887136459350588, 0.01884435272216797, 0.01870537567138672, 0.018792448043823243, 0.01880678367614746, 0.018771392822265625, 0.018868799209594726, 0.01882521629333496, 0.0188723201751709, 0.01884569549560547, 0.0189333438873291, 0.018968992233276367, 0.018759679794311524, 0.018615936279296873, 0.018659551620483397, 0.01861020851135254, 0.018601951599121095, 0.018569183349609376, 0.018921791076660158, 0.01917145538330078, 0.01857926368713379, 0.0189051513671875, 0.018725088119506836, 0.01874073600769043, 0.01880086326599121, 0.01890278434753418, 0.018875808715820314, 0.01880729675292969, 0.018925888061523437, 0.018819360733032225, 0.018831104278564454, 0.01869824028015137, 0.018838848114013672, 0.018795455932617187, 0.018814720153808594, 0.01876393508911133, 0.018828351974487303, 0.019360544204711914, 0.018744415283203125, 0.018707359313964844, 0.018857599258422852, 0.018837087631225585, 0.018950464248657227, 0.01880339241027832, 0.018763647079467774, 0.01880035209655762, 0.01877180862426758, 0.01891481590270996, 0.018702175140380858, 0.018752511978149415, 0.01883545684814453, 0.01875488090515137, 0.018755552291870117, 0.018875104904174805, 0.018694143295288086, 0.018683904647827147, 0.01865727996826172, 0.01864499282836914, 0.018771583557128907, 0.01883785629272461, 0.018950048446655272, 0.01886534309387207, 0.018799552917480467, 0.018852991104125978, 0.018677663803100587, 0.018789344787597657, 0.01930988883972168, 0.01895244789123535, 0.018983423233032228, 0.019060800552368164, 0.01888844871520996, 0.01952720069885254, 0.018891040802001952, 0.01886240005493164, 0.01871017646789551, 0.01867123222351074, 0.01870476722717285, 0.018788639068603515, 0.018694463729858397, 0.018745407104492188, 0.018710367202758788, 0.018743135452270507, 0.018681760787963866, 0.01854755210876465, 0.01876313591003418, 0.018694623947143555, 0.018797536849975587, 0.018731168746948242, 0.01914556884765625, 0.01864054489135742, 0.018642175674438478, 0.018762687683105468, 0.018587648391723634, 0.018660863876342772, 0.018651136398315428, 0.018645503997802734, 0.018581504821777343, 0.01879859161376953, 0.01871993637084961, 0.018830144882202148, 0.01884774398803711, 0.018757728576660155, 0.019031967163085937, 0.018837440490722657, 0.019062847137451173, 0.01904025650024414, 0.019050111770629884, 0.018913663864135743, 0.018865888595581054, 0.018761600494384766, 0.018620832443237305, 0.01999667167663574, 0.0187906551361084, 0.018735040664672853, 0.01872467231750488, 0.019826784133911132, 0.01867350387573242, 0.018604095458984376, 0.018564191818237305, 0.01864588737487793, 0.018726367950439454, 0.01858208084106445, 0.018685951232910156, 0.019347679138183593, 0.01875312042236328, 0.018714815139770507, 0.018785472869873046, 0.01861471939086914, 0.018547231674194337, 0.018570720672607424, 0.01854627227783203, 0.018598527908325196, 0.01867100715637207, 0.018671808242797853, 0.018614847183227538, 0.018604095458984376, 0.018614208221435547, 0.018509824752807616, 0.01865727996826172, 0.0185380802154541, 0.018567583084106446, 0.018598016738891603, 0.019027008056640624, 0.018549568176269533, 0.018571264266967775, 0.018648319244384766, 0.01833795166015625, 0.018616159439086913, 0.018728736877441407, 0.01862883186340332, 0.018646560668945312, 0.01864374351501465, 0.018625280380249024, 0.01862505531311035, 0.0186778564453125, 0.01855264091491699, 0.018592159271240236, 0.01851171112060547, 0.018679264068603516, 0.01859459114074707, 0.018619392395019533, 0.01858857536315918, 0.01861846351623535, 0.018582815170288085, 0.018613183975219726, 0.018786079406738283, 0.018581504821777343, 0.018570432662963866, 0.018477888107299806, 0.018538272857666016, 0.019082912445068358, 0.018557823181152344, 0.01858937644958496, 0.0185262393951416, 0.018570207595825197, 0.018568191528320312, 0.019167232513427734, 0.018585567474365235, 0.01857561683654785, 0.018574623107910155, 0.018605728149414063, 0.018547487258911134, 0.01850579261779785, 0.018487295150756835, 0.018490495681762694, 0.01850457572937012, 0.018542591094970702, 0.01845452880859375, 0.018511871337890624, 0.01851116752624512, 0.01847395133972168, 0.018502527236938477, 0.018545503616333007, 0.018499584197998048, 0.018569215774536133, 0.01860799980163574, 0.018632831573486327, 0.018564735412597656, 0.018467199325561522, 0.018646528244018554, 0.01891321563720703, 0.018452863693237304, 0.018581695556640625, 0.018515680313110353, 0.018495359420776368, 0.018579328536987304, 0.018510080337524413, 0.018442527770996094, 0.018559263229370116, 0.018436288833618163, 0.018628671646118165, 0.01862246322631836, 0.018593791961669923, 0.018552576065063477, 0.018512128829956054, 0.01862451171875, 0.01861203193664551, 0.018546880722045897, 0.018636608123779298, 0.018860223770141602, 0.01862451171875, 0.01855219268798828, 0.018723743438720703, 0.018663135528564453, 0.018513824462890623, 0.018713727951049804, 0.018545631408691407, 0.01863065528869629, 0.018925535202026368, 0.018520320892333984, 0.018703136444091797, 0.0186790714263916, 0.01852387237548828, 0.018561376571655273, 0.018502656936645507, 0.018557600021362305, 0.018552255630493165, 0.01866991996765137, 0.01857302474975586, 0.01852672004699707, 0.01847228813171387, 0.018576128005981445, 0.018589599609375, 0.018591808319091796, 0.01846463966369629, 0.018503904342651367, 0.01852422332763672, 0.018599679946899414, 0.01904025650024414, 0.018490463256835937, 0.018586559295654295, 0.018540544509887694, 0.018486751556396484, 0.018567968368530273, 0.01844723129272461, 0.018486143112182617, 0.018661376953125, 0.018579456329345705, 0.01858780860900879, 0.018542367935180663, 0.01861244773864746, 0.018628639221191408, 0.018556896209716796, 0.01850886344909668, 0.018596639633178712, 0.018567455291748046, 0.01849728012084961, 0.01851388740539551, 0.018489343643188477, 0.018529792785644532, 0.018633440017700197, 0.018542367935180663, 0.01869740867614746, 0.018660287857055664, 0.01902387237548828, 0.020239551544189452, 0.01892639923095703, 0.01869603157043457, 0.01861347198486328, 0.018666431427001952, 0.01866707229614258, 0.018569440841674806, 0.018634431838989256, 0.018618335723876955, 0.01945043182373047, 0.018597888946533202, 0.01859174346923828, 0.018610015869140625, 0.018662975311279296, 0.0185916805267334, 0.018588064193725586, 0.01858995246887207, 0.018692096710205077, 0.01853558349609375, 0.018578271865844726, 0.018591840744018553, 0.018554271697998045, 0.018536928176879883, 0.018541696548461915, 0.018506464004516603, 0.018589344024658203, 0.018577983856201172, 0.018601951599121095, 0.018763776779174804, 0.01943142318725586, 0.01863270378112793, 0.018610176086425782, 0.018503679275512695, 0.018544639587402344, 0.018560192108154298, 0.0186561279296875, 0.01861625671386719, 0.01863884735107422, 0.018518016815185546, 0.018567455291748046, 0.018607839584350586, 0.018550912857055665, 0.018697568893432617, 0.018506271362304687, 0.018530303955078126, 0.018621696472167968, 0.018593568801879883, 0.018573951721191407, 0.018622079849243165, 0.018917728424072265, 0.019640703201293946, 0.020077856063842773, 0.01868435287475586, 0.019263776779174804, 0.02029363250732422, 0.0187064323425293, 0.018808832168579103, 0.018741247177124023, 0.018782527923583984, 0.019007455825805663, 0.020779008865356444, 0.01866729545593262, 0.01862063980102539, 0.018563072204589845, 0.01862860870361328, 0.018620288848876954, 0.018606208801269532, 0.01863270378112793, 0.01865875244140625, 0.01870697593688965, 0.018622495651245116, 0.018693439483642577, 0.01857209587097168, 0.01865020751953125, 0.018631296157836916, 0.01858550453186035, 0.018673280715942382, 0.018684032440185547, 0.01864463996887207, 0.01860492706298828, 0.01870844841003418, 0.018626848220825196, 0.018773727416992188, 0.018687999725341797, 0.01863680076599121, 0.018601984024047852, 0.019099647521972657, 0.01852012825012207, 0.018646528244018554, 0.01880131149291992, 0.018652671813964843, 0.018585887908935547, 0.018768032073974608, 0.018601823806762695, 0.018650976181030274, 0.018634271621704102, 0.0186213436126709, 0.018736928939819337, 0.018736864089965822, 0.018716896057128906, 0.01860201644897461, 0.01871219253540039, 0.018640960693359375, 0.018616607666015625, 0.01880508804321289, 0.018714048385620116, 0.01856537628173828, 0.018567136764526367, 0.018587039947509765, 0.018556896209716796, 0.018616960525512694, 0.018638879776000976, 0.018601343154907225, 0.018569791793823242, 0.018558879852294922, 0.01859187126159668, 0.018511104583740234, 0.01858585548400879, 0.018565631866455077, 0.01893939208984375, 0.01856972885131836, 0.018663488388061523, 0.018630464553833007]",tokens/s,53.184440837839084,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,876.474368,572.391424,0.0,169.869312,150.669312,s,1,7.59078466796875,7.59078466796875,0.0,7.59078466796875,7.59078466796875,7.59078466796875,7.59078466796875,[7.59078466796875],,kWh,1.460183888334541e-05,1.603265505189464e-06,4.730559339971574e-06,2.0935663728506447e-05,,MB,1295.13472,633.208832,0.0,209.7152,193.680384,s,10,0.16802544021606444,0.016802544021606445,0.00013902293019306107,0.016764927864074707,0.016980772018432618,0.01706070613861084,0.017124653434753417,"[0.016963008880615235, 0.016808223724365235, 0.016803936004638673, 0.01672015953063965, 0.01669718360900879, 0.016670976638793945, 0.01669145584106445, 0.016802431106567383, 0.017140640258789062, 0.01672742462158203]",tokens/s,15235.788084876242,kWh,5.014886350372158e-07,5.53052150741272e-08,1.9431907489006882e-07,7.511129250014119e-07,tokens/kWh,340827579.2877866,MB,1334.075392,637.403136,0.0,211.812352,193.682944,s,10,10.053950988769532,1.0053950988769533,0.0032120955344396864,1.0041283874511717,1.0090252136230469,1.0103114654541017,1.0113404669189454,"[1.0085194702148437, 1.0115977172851562, 1.0072411499023437, 1.0040411987304687, 1.0016893310546875, 1.0020423583984375, 1.004215576171875, 1.0030364379882812, 1.002828369140625, 1.0087393798828126]",tokens/s,62.66193267738452,kWh,2.9254223513717752e-05,3.2262458015114492e-06,1.0107105832909215e-05,4.25875751481384e-05,tokens/kWh,1479304.6981627427,,s,630,10.049367769241345,0.015951377411494178,0.00022637587456587172,0.015909104347229003,0.01603966007232666,0.016172957134246827,0.016641813220977783,"[0.015998432159423828, 0.01618409538269043, 0.016045663833618166, 0.015980704307556153, 0.015955519676208497, 0.015972800254821777, 0.016001279830932618, 0.015996543884277344, 0.016068735122680665, 0.01598464012145996, 0.01591500759124756, 0.015935487747192383, 0.01594777584075928, 0.015958016395568847, 0.015933024406433106, 0.015933856010437012, 0.01595417594909668, 0.015947520256042482, 0.015935359954833986, 0.01617228889465332, 0.015989248275756835, 0.015945376396179198, 0.016022239685058594, 0.016130048751831053, 0.01601535987854004, 0.015959327697753906, 0.01595907211303711, 0.015945407867431642, 0.016183296203613282, 0.0160830078125, 0.015991071701049804, 0.015963839530944823, 0.01606857681274414, 0.016154975891113282, 0.01603753662109375, 0.016049503326416015, 0.015944704055786133, 0.016123519897460938, 0.015957663536071778, 0.016031103134155275, 0.016374975204467773, 0.016213823318481445, 0.016031648635864256, 0.01597433567047119, 0.015967583656311034, 0.015934271812438963, 0.01596947193145752, 0.01601430320739746, 0.016031103134155275, 0.016013792037963867, 0.01606800079345703, 0.01596275234222412, 0.01593545627593994, 0.01590681552886963, 0.015935744285583496, 0.01590841579437256, 0.015899968147277833, 0.01590384006500244, 0.015929280281066896, 0.01595580768585205, 0.015912896156311036, 0.015894911766052247, 0.015918496131896973, 0.01594262409210205, 0.01587382411956787, 0.01590019226074219, 0.015878623962402343, 0.015925248146057128, 0.015919103622436523, 0.016082271575927735, 0.015891103744506835, 0.015887871742248535, 0.01586457633972168, 0.015865568161010743, 0.015829024314880372, 0.01589568042755127, 0.01581510353088379, 0.01585913562774658, 0.01588278388977051, 0.015870047569274903, 0.01590726375579834, 0.016036895751953124, 0.015915712356567382, 0.01585529613494873, 0.015922783851623535, 0.01591801643371582, 0.015900671958923338, 0.015882240295410157, 0.015874048233032227, 0.015824895858764648, 0.015896575927734375, 0.015887935638427733, 0.01590112018585205, 0.015986175537109376, 0.015974623680114746, 0.015953984260559082, 0.016109792709350586, 0.01600726318359375, 0.015965824127197267, 0.01590924835205078, 0.01594313621520996, 0.015918911933898924, 0.016163455963134767, 0.0162890567779541, 0.01593446445465088, 0.015946592330932617, 0.016460927963256836, 0.015963168144226075, 0.01585641574859619, 0.015921216011047363, 0.015893728256225585, 0.015866656303405762, 0.017776512145996095, 0.020121599197387697, 0.01644905662536621, 0.01613667106628418, 0.016000864028930663, 0.015938847541809083, 0.015952768325805663, 0.015988287925720216, 0.016246816635131837, 0.01588809585571289, 0.015929023742675782, 0.01593887996673584, 0.015913887977600096, 0.015889408111572266, 0.01590112018585205, 0.01591820812225342, 0.015899200439453125, 0.015892671585083007, 0.01591644763946533, 0.015915552139282228, 0.016062559127807616, 0.015906240463256834, 0.015899200439453125, 0.015859392166137694, 0.01592966365814209, 0.015971487998962402, 0.01587235164642334, 0.015880703926086426, 0.01588636779785156, 0.015884256362915038, 0.015915295600891114, 0.015950624465942382, 0.015954879760742186, 0.01596396827697754, 0.015943872451782228, 0.01593958377838135, 0.01595779228210449, 0.01602707290649414, 0.015946687698364256, 0.015948800086975096, 0.015894783973693847, 0.01589718437194824, 0.01589452838897705, 0.01601945686340332, 0.0159267520904541, 0.016334623336791993, 0.015966976165771484, 0.015949824333190917, 0.016401536941528322, 0.016075103759765626, 0.015995200157165528, 0.01592751979827881, 0.016545791625976563, 0.016009183883666993, 0.016045536041259766, 0.01596406364440918, 0.01594156837463379, 0.016452320098876955, 0.016109792709350586, 0.015940928459167482, 0.015954400062561037, 0.01599078369140625, 0.015954943656921388, 0.01601446342468262, 0.016002464294433593, 0.015924927711486816, 0.015917856216430663, 0.015943167686462402, 0.0159619197845459, 0.015984992027282714, 0.015968928337097166, 0.015944543838500976, 0.015961983680725097, 0.01595900821685791, 0.01593958377838135, 0.015935808181762694, 0.015990464210510254, 0.015936287879943847, 0.015867903709411622, 0.015837183952331545, 0.01588633632659912, 0.016000383377075194, 0.01605081558227539, 0.015908864021301268, 0.015911231994628905, 0.0159202880859375, 0.015905471801757814, 0.01592713642120361, 0.01601126480102539, 0.015892479896545412, 0.015906208038330077, 0.015901280403137208, 0.01592908763885498, 0.01587337589263916, 0.01588111972808838, 0.015963232040405274, 0.01595689582824707, 0.01594179153442383, 0.015963583946228026, 0.015927935600280763, 0.015918880462646483, 0.015918975830078126, 0.015923328399658203, 0.015960063934326172, 0.01590272045135498, 0.01597644805908203, 0.01600102424621582, 0.01589574432373047, 0.01592198371887207, 0.015946975708007814, 0.015886303901672363, 0.015973312377929687, 0.015971296310424803, 0.01591926383972168, 0.016246912002563476, 0.016113983154296876, 0.015954336166381835, 0.01597225570678711, 0.015910911560058593, 0.015854687690734864, 0.016153440475463868, 0.01590073585510254, 0.015880191802978515, 0.015893664360046387, 0.01594825553894043, 0.015851807594299317, 0.015857760429382323, 0.015908320426940917, 0.01587846374511719, 0.01585599994659424, 0.01592713642120361, 0.015882559776306152, 0.016009183883666993, 0.015880031585693358, 0.01586367988586426, 0.015871999740600586, 0.015905887603759765, 0.01587401580810547, 0.015889344215393068, 0.015884287834167482, 0.015815679550170898, 0.01583737564086914, 0.01586467170715332, 0.015867072105407713, 0.015914175987243654, 0.015891039848327636, 0.015863936424255373, 0.01589235210418701, 0.015834367752075196, 0.01584607982635498, 0.01588041591644287, 0.015860896110534668, 0.01586246395111084, 0.01584931182861328, 0.01587756824493408, 0.015891136169433592, 0.015796256065368653, 0.01593139171600342, 0.015906463623046874, 0.015886783599853516, 0.01589647960662842, 0.015851519584655763, 0.01588633632659912, 0.015851519584655763, 0.015929344177246094, 0.01595302391052246, 0.01591107177734375, 0.01589647960662842, 0.015895359992980956, 0.01577340793609619, 0.015852928161621094, 0.0160283203125, 0.015880512237548827, 0.015894463539123535, 0.015887680053710936, 0.015878944396972655, 0.015976351737976076, 0.016336000442504883, 0.015926143646240235, 0.015974592208862305, 0.015937408447265624, 0.015855551719665528, 0.016091136932373046, 0.015876095771789552, 0.015925248146057128, 0.015925024032592775, 0.015957632064819337, 0.015841312408447265, 0.0158437442779541, 0.01590233612060547, 0.015835519790649413, 0.015892640113830566, 0.015913984298706055, 0.015833760261535646, 0.015821151733398438, 0.01584332847595215, 0.015822784423828125, 0.015853631973266603, 0.01590681552886963, 0.015852928161621094, 0.015856255531311036, 0.015968223571777344, 0.01581059169769287, 0.015781760215759277, 0.015881343841552733, 0.015867679595947266, 0.015859199523925782, 0.015808768272399902, 0.015912608146667482, 0.015923487663269044, 0.01594371223449707, 0.015898783683776854, 0.015920991897583007, 0.015893792152404784, 0.015883551597595216, 0.015890144348144532, 0.015912960052490235, 0.015865856170654297, 0.015888383865356445, 0.015998687744140625, 0.01593990421295166, 0.01592428779602051, 0.015876511573791503, 0.01585334396362305, 0.015893343925476075, 0.01586508846282959, 0.015942272186279298, 0.01589414405822754, 0.01589081573486328, 0.015921152114868165, 0.015921024322509764, 0.015910304069519043, 0.015922975540161134, 0.015870911598205566, 0.015895711898803712, 0.01585852813720703, 0.015857215881347655, 0.015919903755187988, 0.015883935928344726, 0.015859711647033693, 0.015999232292175293, 0.015953215599060058, 0.01589200019836426, 0.015870528221130373, 0.016091487884521485, 0.015847423553466796, 0.015865856170654297, 0.015896032333374024, 0.015913503646850586, 0.01585955238342285, 0.01593769645690918, 0.015931136131286622, 0.015910240173339845, 0.015923744201660157, 0.015978879928588867, 0.015898015975952147, 0.015878751754760743, 0.015879424095153808, 0.01584748840332031, 0.015891136169433592, 0.01593958377838135, 0.015865535736083985, 0.015898528099060057, 0.015841695785522462, 0.015876095771789552, 0.015857664108276368, 0.015850111961364745, 0.015861663818359375, 0.015840383529663087, 0.01584233570098877, 0.015873536109924317, 0.015849120140075685, 0.015868895530700682, 0.01582265567779541, 0.015863807678222656, 0.016039007186889647, 0.0158503999710083, 0.015931136131286622, 0.01588601589202881, 0.015864383697509764, 0.015869888305664062, 0.01582703971862793, 0.01663692855834961, 0.015907103538513184, 0.0158786563873291, 0.015888447761535644, 0.015865983963012694, 0.015874048233032227, 0.015844544410705565, 0.015887167930603026, 0.015853599548339845, 0.016550943374633788, 0.01588111972808838, 0.015849504470825195, 0.015841279983520508, 0.015964159965515135, 0.015865856170654297, 0.01582806396484375, 0.015870335578918458, 0.01587459182739258, 0.015839232444763183, 0.015855615615844726, 0.01589360046386719, 0.015884448051452636, 0.015878911972045898, 0.015937888145446777, 0.01607868766784668, 0.01586678409576416, 0.015823840141296387, 0.015831135749816896, 0.015834976196289062, 0.015834367752075196, 0.01585580825805664, 0.0158471040725708, 0.015837120056152343, 0.015807071685791017, 0.01586630439758301, 0.015824799537658692, 0.015831040382385253, 0.01584665584564209, 0.016791776657104494, 0.01659753608703613, 0.016643808364868163, 0.01592144012451172, 0.015871071815490724, 0.01593158435821533, 0.015874303817749024, 0.015887871742248535, 0.01587459182739258, 0.015882080078125, 0.015947680473327636, 0.015867775917053222, 0.015911711692810058, 0.015808768272399902, 0.015967167854309083, 0.01583353614807129, 0.015937567710876465, 0.015908512115478515, 0.015864319801330566, 0.01588278388977051, 0.015880127906799317, 0.015887776374816896, 0.015900992393493654, 0.015883999824523924, 0.015902624130249024, 0.015873791694641114, 0.01588665580749512, 0.015866175651550293, 0.01587548828125, 0.01590944004058838, 0.015994815826416015, 0.015902815818786623, 0.01590227222442627, 0.015980992317199707, 0.01599078369140625, 0.016006208419799803, 0.01587705612182617, 0.016046016693115235, 0.015896896362304687, 0.015916128158569336, 0.01588924789428711, 0.01586975955963135, 0.01589583969116211, 0.015855968475341795, 0.015954303741455077, 0.015975775718688964, 0.01596063995361328, 0.016077056884765625, 0.015930848121643068, 0.015929727554321288, 0.015908960342407227, 0.015815839767456055, 0.01585536003112793, 0.01583558368682861, 0.015829471588134764, 0.01584547233581543, 0.015831040382385253, 0.015831040382385253, 0.015955231666564942, 0.015894463539123535, 0.015883040428161622, 0.015873184204101564, 0.015932191848754884, 0.01587622356414795, 0.015945183753967286, 0.015871456146240234, 0.015909695625305177, 0.01597366428375244, 0.015910976409912108, 0.016337215423583986, 0.015941856384277343, 0.015997247695922853, 0.015839232444763183, 0.01585993576049805, 0.015886112213134764, 0.01582694435119629, 0.01580031967163086, 0.015964159965515135, 0.01589840030670166, 0.015882304191589355, 0.0161814079284668, 0.015912960052490235, 0.015885791778564452, 0.015860480308532716, 0.015971327781677248, 0.01589020824432373, 0.015924223899841307, 0.015857664108276368, 0.015903008460998534, 0.01589628791809082, 0.016005119323730468, 0.015839232444763183, 0.015849663734436036, 0.01579520034790039, 0.015886752128601075, 0.015978464126586912, 0.016173503875732423, 0.015986880302429198, 0.016252735137939452, 0.015908864021301268, 0.015888671875, 0.015890144348144532, 0.015812800407409667, 0.01592300796508789, 0.01583676815032959, 0.01588060760498047, 0.015880319595336916, 0.015953791618347167, 0.015906880378723145, 0.016118047714233398, 0.015904704093933106, 0.015907999992370605, 0.015901503562927247, 0.01593126392364502, 0.015921024322509764, 0.015914048194885254, 0.01592742443084717, 0.01598752021789551, 0.015928704261779784, 0.015878527641296387, 0.015915167808532714, 0.015859840393066406, 0.01584323215484619, 0.015869471549987794, 0.015862112045288087, 0.015906880378723145, 0.01588441562652588, 0.015824864387512207, 0.015871456146240234, 0.015825471878051757, 0.015862879753112794, 0.0158503999710083, 0.015883520126342775, 0.015952799797058107, 0.015959263801574707, 0.015895999908447266, 0.015872032165527343, 0.015843456268310546, 0.015845919609069826, 0.01586742401123047, 0.015857343673706056, 0.015878239631652833, 0.016085695266723633, 0.016257152557373047, 0.01600502395629883, 0.0158535680770874, 0.01592460823059082, 0.015948415756225586, 0.015896384239196776, 0.015886207580566407, 0.0159235200881958, 0.01588825607299805, 0.015859007835388184, 0.015885120391845704, 0.01592848014831543, 0.015920991897583007, 0.01662233543395996, 0.016771327972412108, 0.016887935638427734, 0.015970175743103027, 0.01626553535461426, 0.01646883201599121, 0.01686000061035156, 0.016017375946044923, 0.01614236831665039, 0.015947360038757324, 0.015954336166381835, 0.01593564796447754, 0.015904607772827147, 0.015940959930419923, 0.016166624069213868, 0.01594991970062256, 0.015967071533203123, 0.01590892791748047, 0.01587603187561035, 0.015964159965515135, 0.015930848121643068, 0.01596668815612793, 0.01590041637420654, 0.01590662384033203, 0.01590950393676758, 0.01590873622894287, 0.015953920364379884, 0.01590998363494873, 0.01592105579376221, 0.015873023986816406, 0.015861311912536622, 0.015931008338928222, 0.01593017578125, 0.016006336212158204, 0.015892319679260254, 0.015899840354919433, 0.015900447845458986, 0.015891712188720705, 0.015987104415893554, 0.01593337631225586, 0.016133567810058595, 0.01589142417907715]",tokens/s,62.69051093226746,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1068.724224,1620.967424,0.0,1218.445312,1206.173696,s,1,9.013009765625,9.013009765625,0.0,9.013009765625,9.013009765625,9.013009765625,9.013009765625,[9.013009765625],,kWh,5.871187393750005e-05,6.469290850468602e-06,1.9600571235983688e-05,8.478173602395234e-05,,MB,1389.395968,1918.763008,0.0,1503.657984,1463.228416,s,10,1.8191019897460936,0.1819101989746094,0.00048335684475992445,0.18189046478271487,0.1824812774658203,0.1825545425415039,0.18261315460205077,"[0.18168630981445313, 0.18224217224121095, 0.18170877075195313, 0.18207215881347658, 0.18217864990234375, 0.18157958984375, 0.18089173889160157, 0.18164979553222657, 0.1826278076171875, 0.18246499633789062]",tokens/s,1407.287779591357,kWh,5.499645672067445e-06,6.061535600429272e-07,3.6578784407039397e-06,9.763677672814314e-06,tokens/kWh,26219628.359178487,MB,1409.585152,1918.763008,0.0,1503.657984,1463.230976,s,10,19.005864379882812,1.9005864379882813,0.008079249480737941,1.9010186767578126,1.9096440673828126,1.9129290893554687,1.9155571069335937,"[1.9039832763671876, 1.901081298828125, 1.9089140625, 1.8956964111328125, 1.9009560546875, 1.916214111328125, 1.891734619140625, 1.894821044921875, 1.90500146484375, 1.8874620361328125]",tokens/s,33.14766365831999,kWh,5.601272070667822e-05,6.178361998942703e-06,2.889992538329759e-05,9.109100808891848e-05,tokens/kWh,691616.0148156727,,s,630,19.002603410720834,0.03016286255669972,0.0004757813832224128,0.030072416305541992,0.03052075538635254,0.030827867984771728,0.03179479892730713,"[0.030644256591796874, 0.03059903907775879, 0.030250463485717773, 0.03037251281738281, 0.030352928161621093, 0.030471744537353514, 0.03016387176513672, 0.030273536682128906, 0.030091264724731445, 0.03016294479370117, 0.030250431060791016, 0.030058624267578125, 0.030126527786254884, 0.03015884780883789, 0.03028793525695801, 0.030465984344482423, 0.03027699279785156, 0.03026598358154297, 0.030193119049072265, 0.03009129524230957, 0.030197343826293944, 0.030152896881103515, 0.03047478485107422, 0.03020614433288574, 0.029947839736938476, 0.030016895294189452, 0.03013465690612793, 0.030168384552001954, 0.030067007064819337, 0.02997475242614746, 0.03031497573852539, 0.030263296127319338, 0.03011324882507324, 0.030519840240478515, 0.03081622314453125, 0.030219295501708984, 0.03013324737548828, 0.03043891143798828, 0.030310495376586914, 0.030134239196777345, 0.030303775787353517, 0.03018844795227051, 0.029997055053710937, 0.03017523193359375, 0.030262720108032225, 0.030128351211547853, 0.030318431854248047, 0.030079488754272462, 0.030389375686645508, 0.03006947135925293, 0.03028153610229492, 0.030310752868652344, 0.029999103546142578, 0.030066688537597655, 0.030015296936035156, 0.030092735290527344, 0.030165760040283204, 0.030074880599975585, 0.029988895416259764, 0.030128095626831056, 0.030181407928466797, 0.030200992584228516, 0.030073440551757813, 0.030504959106445313, 0.030199712753295898, 0.030139551162719727, 0.030180288314819337, 0.030216192245483397, 0.030511104583740234, 0.029859647750854493, 0.029911231994628907, 0.029936800003051756, 0.029999807357788087, 0.029871904373168945, 0.029865760803222657, 0.03162940788269043, 0.030876256942749022, 0.03264886474609375, 0.030933248519897462, 0.03011199951171875, 0.030365440368652345, 0.030539775848388673, 0.03052899169921875, 0.03010188865661621, 0.0300743350982666, 0.03013497543334961, 0.029840768814086913, 0.029949567794799806, 0.029875200271606447, 0.029830495834350587, 0.029899423599243163, 0.029859039306640626, 0.02985379219055176, 0.030061439514160158, 0.029957952499389647, 0.030041599273681642, 0.02988489532470703, 0.02996227264404297, 0.02995199966430664, 0.029973695755004883, 0.03012076759338379, 0.02999091148376465, 0.030166143417358397, 0.03014956855773926, 0.029992895126342775, 0.030688480377197267, 0.03003267288208008, 0.030093311309814453, 0.031268863677978515, 0.030130271911621095, 0.03032646369934082, 0.030076480865478514, 0.029966175079345705, 0.02993440055847168, 0.029887903213500978, 0.030042720794677735, 0.02997248077392578, 0.029970432281494142, 0.030008447647094726, 0.03002672004699707, 0.030009151458740235, 0.029988960266113283, 0.02997052764892578, 0.029986656188964844, 0.03001759910583496, 0.02995347213745117, 0.030638080596923828, 0.030749919891357422, 0.03038275146484375, 0.030268928527832032, 0.030268320083618162, 0.030141664505004884, 0.030166559219360352, 0.03001206398010254, 0.030091583251953127, 0.030906368255615234, 0.029999103546142578, 0.03014656066894531, 0.030103551864624024, 0.03021126365661621, 0.030128671646118165, 0.030103071212768555, 0.030147296905517578, 0.030124063491821288, 0.03148569679260254, 0.031367424011230466, 0.030503040313720704, 0.030879615783691406, 0.030441471099853516, 0.030229951858520506, 0.03017580795288086, 0.03009712028503418, 0.030187807083129882, 0.03033497619628906, 0.030074880599975585, 0.030201087951660155, 0.030300928115844727, 0.03012531280517578, 0.030225151062011717, 0.03028553581237793, 0.03038447952270508, 0.030221664428710937, 0.03004444885253906, 0.030236543655395506, 0.030271167755126952, 0.03040127944946289, 0.030007295608520508, 0.030289920806884765, 0.03140812873840332, 0.03014406394958496, 0.03074608039855957, 0.030323455810546875, 0.030083295822143554, 0.030142463684082032, 0.030108959197998046, 0.030237567901611327, 0.030011232376098634, 0.030113792419433592, 0.0302891845703125, 0.030353471755981444, 0.030151296615600585, 0.030294271469116212, 0.03014838409423828, 0.03001158332824707, 0.030369152069091798, 0.030072320938110353, 0.030199935913085937, 0.030044736862182616, 0.03007251167297363, 0.030438304901123047, 0.03030745506286621, 0.030030719757080077, 0.030126079559326172, 0.030021055221557617, 0.030206527709960938, 0.03096998405456543, 0.03006835174560547, 0.029879615783691405, 0.029903743743896486, 0.029757503509521485, 0.02985487937927246, 0.02986390495300293, 0.029851871490478514, 0.02974787139892578, 0.029876224517822264, 0.029836511611938475, 0.030106399536132814, 0.02993971252441406, 0.03000912094116211, 0.030019712448120118, 0.030168767929077148, 0.029881919860839844, 0.02987504005432129, 0.02975334358215332, 0.02981427192687988, 0.029932031631469725, 0.030156320571899414, 0.02989695930480957, 0.029937887191772462, 0.03000934410095215, 0.030027231216430663, 0.03012838363647461, 0.029913375854492188, 0.029954048156738283, 0.030029823303222656, 0.029929471969604493, 0.0299683837890625, 0.030100543975830077, 0.030161855697631836, 0.03038198471069336, 0.03041289520263672, 0.03039641571044922, 0.03074790382385254, 0.030613439559936523, 0.030192127227783205, 0.030619968414306642, 0.030095359802246095, 0.03016499137878418, 0.02998067283630371, 0.030119455337524415, 0.030091487884521484, 0.030089120864868164, 0.030106111526489256, 0.030231807708740236, 0.030313056945800783, 0.03005174446105957, 0.03007753562927246, 0.030018783569335936, 0.029997856140136718, 0.030064640045166017, 0.03019980812072754, 0.030045984268188476, 0.03145011138916016, 0.030742847442626953, 0.03039507293701172, 0.03019481658935547, 0.030649215698242187, 0.030052064895629883, 0.030026016235351564, 0.030117887496948242, 0.030088319778442382, 0.030090112686157227, 0.030070783615112305, 0.030003200531005858, 0.030023679733276368, 0.02998214340209961, 0.031410751342773435, 0.030072544097900392, 0.030023967742919922, 0.0299451847076416, 0.02998918342590332, 0.02994825553894043, 0.02999295997619629, 0.030047935485839845, 0.03003219223022461, 0.02994175910949707, 0.03017932891845703, 0.02997222328186035, 0.030060800552368164, 0.02993561553955078, 0.029990367889404297, 0.03001785659790039, 0.030023487091064453, 0.030021663665771484, 0.03009929656982422, 0.029980447769165038, 0.03006559944152832, 0.029974336624145507, 0.029958303451538087, 0.0300214729309082, 0.02997599983215332, 0.031796831130981446, 0.03016531181335449, 0.030032543182373046, 0.029870080947875976, 0.029913087844848633, 0.030078847885131835, 0.02997056007385254, 0.030023551940917968, 0.030207904815673828, 0.02991542434692383, 0.030083232879638672, 0.030178943634033204, 0.030658048629760744, 0.03000752067565918, 0.030013759613037108, 0.030146112442016603, 0.030132736206054687, 0.030036031723022463, 0.030215839385986328, 0.02995030403137207, 0.030387807846069335, 0.030245279312133787, 0.030271295547485352, 0.03079187202453613, 0.03139993667602539, 0.032045055389404296, 0.03018547248840332, 0.030076927185058593, 0.030029760360717774, 0.03016499137878418, 0.029964000701904296, 0.02996873664855957, 0.030082624435424806, 0.029946592330932616, 0.02991231918334961, 0.03016569519042969, 0.029948863983154297, 0.029903039932250977, 0.030163616180419923, 0.03041043281555176, 0.030284095764160156, 0.030209856033325197, 0.030310016632080078, 0.030507583618164063, 0.030758815765380858, 0.030734432220458983, 0.030369792938232422, 0.030266815185546875, 0.03029971122741699, 0.03023155212402344, 0.03021004867553711, 0.030190912246704102, 0.030243520736694337, 0.03041689682006836, 0.03018320083618164, 0.03032111930847168, 0.03104332733154297, 0.030212095260620117, 0.030789823532104493, 0.03037932777404785, 0.03062416076660156, 0.030248735427856447, 0.03350969696044922, 0.035278526306152344, 0.029978879928588865, 0.029978111267089845, 0.029972063064575196, 0.029944799423217774, 0.030035776138305666, 0.030067968368530273, 0.029997152328491213, 0.030126911163330078, 0.030025760650634767, 0.03003593635559082, 0.030056480407714845, 0.02997977638244629, 0.03013532829284668, 0.03023347282409668, 0.03046268844604492, 0.030455263137817382, 0.0301760311126709, 0.030168384552001954, 0.03016364860534668, 0.030451711654663087, 0.030656511306762696, 0.030249216079711913, 0.0302609920501709, 0.03522009658813476, 0.030914623260498045, 0.030799615859985353, 0.030603391647338867, 0.03032851219177246, 0.030536127090454102, 0.03003536033630371, 0.029895263671875, 0.029884416580200194, 0.03018547248840332, 0.02998201560974121, 0.02997318458557129, 0.029917280197143556, 0.02987612724304199, 0.029870080947875976, 0.02999443244934082, 0.029784639358520507, 0.029757312774658203, 0.030006879806518554, 0.029946495056152343, 0.029822879791259766, 0.029728607177734376, 0.02987433624267578, 0.02982054328918457, 0.029736448287963867, 0.03017919921875, 0.029932544708251952, 0.030044160842895507, 0.029863935470581054, 0.029997055053710937, 0.02987932777404785, 0.03039126396179199, 0.030007295608520508, 0.029803840637207032, 0.029737119674682618, 0.029710559844970702, 0.029720895767211913, 0.02967094421386719, 0.029759552001953123, 0.029855520248413085, 0.029610624313354494, 0.029685760498046877, 0.030070688247680662, 0.029853696823120116, 0.030101600646972655, 0.029828800201416015, 0.029765663146972657, 0.02973257637023926, 0.029690431594848632, 0.029730815887451172, 0.029578367233276368, 0.02978201675415039, 0.030046304702758788, 0.029897504806518555, 0.029865760803222657, 0.029796575546264647, 0.029918527603149413, 0.0297192325592041, 0.02977791976928711, 0.029900800704956054, 0.029947519302368164, 0.02986134338378906, 0.030294944763183593, 0.0302696647644043, 0.030031871795654298, 0.02995199966430664, 0.029859840393066408, 0.0309552001953125, 0.031088096618652344, 0.03027337646484375, 0.029872896194458008, 0.029958015441894532, 0.029784448623657228, 0.02975129508972168, 0.02983526420593262, 0.02969599914550781, 0.029679231643676758, 0.029725343704223632, 0.02981449508666992, 0.030010496139526367, 0.029809343338012696, 0.02979654312133789, 0.03021414375305176, 0.030021631240844726, 0.029846784591674804, 0.029872896194458008, 0.02995964813232422, 0.029911775588989258, 0.029892416000366212, 0.030113311767578126, 0.029849504470825194, 0.029790784835815428, 0.030027072906494142, 0.030220544815063477, 0.030083520889282227, 0.029947359085083006, 0.029872159957885742, 0.02988697624206543, 0.029809856414794923, 0.029748031616210938, 0.030007295608520508, 0.029895807266235353, 0.029889408111572265, 0.029841407775878907, 0.030100736618041992, 0.031789823532104494, 0.029831167221069335, 0.029916736602783205, 0.02995039939880371, 0.029911327362060546, 0.030217952728271484, 0.029890111923217774, 0.029825504302978516, 0.030105567932128905, 0.02975948715209961, 0.03080601692199707, 0.03242393493652344, 0.030086687088012695, 0.029871999740600588, 0.02984329605102539, 0.02996284866333008, 0.029950111389160157, 0.030092416763305666, 0.030781408309936524, 0.030145408630371094, 0.030461984634399412, 0.030251327514648436, 0.03002217674255371, 0.029879968643188478, 0.029906719207763673, 0.02992972755432129, 0.02981920051574707, 0.030059743881225585, 0.02987481689453125, 0.02977939224243164, 0.02984956741333008, 0.02982307243347168, 0.03081648063659668, 0.029910943984985353, 0.02997302436828613, 0.03100841522216797, 0.030044511795043947, 0.030085119247436523, 0.030066783905029298, 0.030176736831665038, 0.030171167373657225, 0.030187936782836915, 0.030238719940185548, 0.030236223220825195, 0.030349599838256837, 0.03039411163330078, 0.030149023056030275, 0.030142463684082032, 0.030074880599975585, 0.030650367736816408, 0.030715904235839843, 0.031243520736694334, 0.03105254364013672, 0.031037439346313478, 0.0308569278717041, 0.030568735122680664, 0.030432416915893556, 0.030191808700561523, 0.030128799438476562, 0.030117887496948242, 0.030150272369384765, 0.03005196762084961, 0.030054176330566406, 0.03005948829650879, 0.03005788803100586, 0.030046016693115234, 0.03004444885253906, 0.030398975372314452, 0.030097055435180663, 0.03014076805114746, 0.030111743927001954, 0.030133663177490236, 0.030087039947509765, 0.03011846351623535, 0.03033718490600586, 0.030238592147827148, 0.030185728073120116, 0.030402431488037108, 0.030328832626342773, 0.03015679931640625, 0.030281728744506835, 0.0302260799407959, 0.030204256057739257, 0.030208063125610352, 0.03147660827636719, 0.030053951263427733, 0.029978464126586914, 0.029914911270141602, 0.02990982437133789, 0.03061555290222168, 0.029918752670288085, 0.0298767032623291, 0.030045280456542967, 0.029913536071777345, 0.03019824028015137, 0.02994175910949707, 0.030064640045166017, 0.02987353515625, 0.029837120056152345, 0.029880256652832032, 0.02982387161254883, 0.029918272018432616, 0.030098239898681642, 0.030009056091308595, 0.029776224136352537, 0.030132287979125975, 0.029783615112304686, 0.03083718490600586, 0.03016908836364746, 0.030474239349365235, 0.029813888549804688, 0.029887359619140626, 0.02978835105895996, 0.029835071563720703, 0.02978396797180176, 0.029767776489257814, 0.029695871353149415, 0.0297445125579834, 0.029829183578491212, 0.029790912628173828, 0.02996428871154785, 0.02998886489868164, 0.029962240219116212, 0.029911039352416992, 0.029880128860473632, 0.02984582328796387, 0.029724544525146484, 0.029763328552246095, 0.03001785659790039, 0.029822912216186524, 0.029855648040771485, 0.029743200302124025, 0.029730815887451172, 0.02978175926208496, 0.02970585632324219, 0.02990729522705078, 0.029785503387451173, 0.029851615905761717, 0.030204416275024414, 0.02991315269470215, 0.029832767486572265, 0.030020383834838866, 0.029739007949829102, 0.03026259231567383, 0.029858495712280272, 0.029833120346069338, 0.02987001609802246]",tokens/s,33.153352010944396,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1015, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 840, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1267.679232,8455.585792,0.0,8053.06368,7930.605568,s,1,19.472814453125,19.472814453125,0.0,19.472814453125,19.472814453125,19.472814453125,19.472814453125,[19.472814453125],,kWh,0.00036346699567504096,4.008575619899076e-05,0.00012734926854601758,0.0005309020204200493,,MB,1359.72864,10221.387776,0.0,9806.282752,9135.58528,s,10,17.336366088867187,1.7336366088867188,0.00715857964548333,1.7358570556640625,1.7396066040039062,1.740523028564453,1.7412561682128906,"[1.7155333251953124, 1.7261685791015624, 1.7329794921875, 1.735396240234375, 1.73631787109375, 1.741439453125, 1.7353553466796876, 1.7367783203125, 1.7369945068359376, 1.7394029541015625]",tokens/s,147.66647098228637,kWh,5.0387990989581035e-05,5.557416725181193e-06,3.338919337800006e-05,8.933460109276228e-05,tokens/kWh,2865630.9746564776,MB,1379.86048,10221.387776,0.0,9806.282752,9135.58784,s,10,84.8021435546875,8.48021435546875,0.009905659618896043,8.482072265625,8.490750878906251,8.491805126953125,8.492648525390624,"[8.462275390625, 8.46676171875, 8.4721220703125, 8.47878125, 8.4782978515625, 8.48536328125, 8.485837890625, 8.489328125, 8.4905166015625, 8.492859375]",tokens/s,7.429057493030508,kWh,0.00024802750348833496,2.7359373356772738e-05,0.00016489388191499865,0.0004402807587601063,tokens/kWh,143090.51382898726,,s,630,84.79812712097169,0.13460020177932014,0.0015743293898621002,0.13445733642578125,0.13553261108398437,0.13607331619262697,0.14411522109985353,"[0.1446451873779297, 0.13289773559570311, 0.1330380859375, 0.13287532043457032, 0.1331735382080078, 0.13261276245117187, 0.13361094665527343, 0.1363850860595703, 0.1333186492919922, 0.13296832275390624, 0.13349696350097656, 0.13287014770507813, 0.1330831298828125, 0.13364837646484376, 0.13496319580078125, 0.13458432006835938, 0.13344358825683594, 0.13339340209960937, 0.13336268615722657, 0.13415213012695312, 0.13395321655273437, 0.13435093688964844, 0.13527194213867189, 0.1335181121826172, 0.134193115234375, 0.1336443176269531, 0.13340585327148438, 0.1340649871826172, 0.1340331268310547, 0.13425897216796875, 0.13391651916503905, 0.1352889862060547, 0.13390777587890626, 0.13381292724609375, 0.13462118530273437, 0.1339473876953125, 0.1343098907470703, 0.134434814453125, 0.1346314239501953, 0.1345003204345703, 0.13403068542480467, 0.1344349365234375, 0.13433914184570311, 0.13409030151367188, 0.13458393859863282, 0.13489353942871093, 0.13470396423339845, 0.13377690124511718, 0.1345479736328125, 0.13423820495605468, 0.13445733642578125, 0.13459455871582032, 0.13469075012207032, 0.13605462646484376, 0.13422320556640624, 0.134806396484375, 0.1343488006591797, 0.13465533447265626, 0.13489753723144532, 0.13473052978515626, 0.13486483764648438, 0.13460076904296875, 0.13470870971679688, 0.14285130310058594, 0.13233641052246095, 0.13295411682128908, 0.13257673645019533, 0.13195840454101562, 0.13386381530761718, 0.13451858520507812, 0.13782704162597656, 0.13394125366210938, 0.13406002807617187, 0.13298074340820312, 0.13333094787597657, 0.1330043182373047, 0.13341110229492187, 0.13575852966308594, 0.1345876159667969, 0.13466499328613282, 0.13314866638183595, 0.13381222534179688, 0.13343901062011718, 0.13340106201171875, 0.13425050354003906, 0.1351365509033203, 0.1343474884033203, 0.1335683135986328, 0.13404588317871094, 0.1337548828125, 0.13321830749511718, 0.1344932098388672, 0.13455357360839842, 0.13485772705078125, 0.134582275390625, 0.13408460998535157, 0.13386341857910156, 0.13374208068847657, 0.1344189453125, 0.13466624450683592, 0.13498495483398437, 0.13414682006835937, 0.13401023864746095, 0.13414463806152344, 0.13404570007324218, 0.13433445739746094, 0.13428070068359374, 0.13460914611816407, 0.13421311950683593, 0.13516876220703125, 0.13442822265625, 0.13471583557128905, 0.13461660766601563, 0.13473951721191407, 0.13441116333007813, 0.13473965454101564, 0.13481607055664063, 0.13484576416015626, 0.13464851379394532, 0.1349591064453125, 0.13474815368652343, 0.1345098876953125, 0.1355701141357422, 0.13510861206054686, 0.13485781860351562, 0.13466307067871094, 0.14344601440429688, 0.1324564514160156, 0.13363813781738282, 0.132780029296875, 0.13315887451171876, 0.13255477905273438, 0.13442218017578125, 0.13747622680664062, 0.1345781707763672, 0.13262460327148437, 0.133496826171875, 0.13340467834472655, 0.13310975646972656, 0.1339656982421875, 0.13538726806640625, 0.13455360412597656, 0.13350262451171874, 0.13474032592773438, 0.1330257873535156, 0.13386341857910156, 0.13391258239746093, 0.1343488006591797, 0.13524140930175782, 0.1338924102783203, 0.13372416687011718, 0.13452076721191406, 0.1336381072998047, 0.13406195068359375, 0.13426844787597655, 0.13483282470703126, 0.13448733520507813, 0.13385574340820314, 0.13433584594726564, 0.13338287353515624, 0.1342976989746094, 0.13440943908691405, 0.13464662170410155, 0.1348485107421875, 0.13511065673828124, 0.13410499572753906, 0.13427290344238282, 0.13409295654296874, 0.13459619140625, 0.13419874572753906, 0.135795654296875, 0.13498361206054688, 0.13414617919921876, 0.1340885467529297, 0.13416188049316408, 0.1348451232910156, 0.1348032989501953, 0.13486000061035155, 0.13604051208496093, 0.134237060546875, 0.13513113403320312, 0.134434814453125, 0.13465533447265626, 0.13525794982910155, 0.13474400329589845, 0.136186279296875, 0.13480598449707032, 0.1346826171875, 0.13454873657226563, 0.14405885314941405, 0.13243597412109376, 0.1329603271484375, 0.13309542846679687, 0.13305445861816406, 0.1331480255126953, 0.134337158203125, 0.13784640502929688, 0.13433279418945313, 0.13349478149414062, 0.13295616149902345, 0.13384848022460938, 0.13366534423828125, 0.13345794677734374, 0.13504634094238283, 0.13529376220703124, 0.13432217407226563, 0.13364224243164063, 0.1336231689453125, 0.13361555480957032, 0.1339823303222656, 0.1343780517578125, 0.135278076171875, 0.1344148406982422, 0.13379994201660156, 0.13365452575683595, 0.13381222534179688, 0.13383065795898438, 0.13391667175292968, 0.13514073181152345, 0.13491468811035157, 0.1343846435546875, 0.13443292236328125, 0.13423907470703125, 0.1339404754638672, 0.13440896606445313, 0.13515968322753907, 0.13467047119140624, 0.13446144104003907, 0.1350328369140625, 0.1345781707763672, 0.1341317138671875, 0.13451029968261718, 0.13465411376953124, 0.13558592224121094, 0.1346129913330078, 0.1343585205078125, 0.13464627075195312, 0.13438975524902344, 0.13493218994140624, 0.13503721618652345, 0.1352244415283203, 0.1351832275390625, 0.1352683563232422, 0.1346334686279297, 0.13463682556152343, 0.13439248657226563, 0.1352265625, 0.13528871154785158, 0.1352236785888672, 0.13563127136230468, 0.13502272033691406, 0.1351046142578125, 0.14471577453613282, 0.1327104034423828, 0.1337139129638672, 0.13267062377929686, 0.13297938537597656, 0.13370384216308595, 0.13428694152832032, 0.13744073486328126, 0.13463139343261718, 0.13381526184082032, 0.13320191955566407, 0.13400679016113282, 0.13372621154785155, 0.13360675048828125, 0.13632374572753905, 0.13445465087890626, 0.13379414367675782, 0.13417692565917969, 0.13383897399902345, 0.13350912475585938, 0.13435289001464842, 0.13421568298339845, 0.1352906951904297, 0.13411106872558592, 0.13407574462890626, 0.1338559112548828, 0.13366297912597655, 0.13383894348144532, 0.13438088989257813, 0.1349698944091797, 0.13433612060546876, 0.13414451599121094, 0.13403340148925783, 0.13377127075195314, 0.13397401428222655, 0.13420098876953124, 0.13505775451660157, 0.13489561462402344, 0.13476864624023438, 0.13502201843261719, 0.13412818908691407, 0.13397811889648437, 0.13435427856445312, 0.1349638671875, 0.13496730041503907, 0.13462118530273437, 0.1349181365966797, 0.13493247985839843, 0.1343662109375, 0.13528147888183595, 0.13469033813476564, 0.13485737609863283, 0.1352683563232422, 0.13451878356933594, 0.1344996795654297, 0.13426141357421875, 0.13547314453125, 0.1349181365966797, 0.13490380859375, 0.13537388610839843, 0.13508819580078124, 0.13475724792480467, 0.13458128356933594, 0.14694578552246093, 0.1329145965576172, 0.133342041015625, 0.13230892944335937, 0.13303404235839844, 0.13280613708496095, 0.13442489624023438, 0.1380435791015625, 0.13421568298339845, 0.13407232666015625, 0.13312409973144532, 0.13294950866699218, 0.13348095703125, 0.13445733642578125, 0.13543218994140624, 0.13544569396972655, 0.13508076477050782, 0.13315072631835936, 0.13369549560546876, 0.13345753479003905, 0.133857666015625, 0.13495706176757813, 0.13499801635742187, 0.1352395782470703, 0.13439952087402343, 0.13379437255859375, 0.13414402770996095, 0.13346595764160157, 0.13427110290527344, 0.1347973175048828, 0.1349959716796875, 0.134719482421875, 0.13403546142578124, 0.13401283264160158, 0.13415843200683594, 0.13419725036621094, 0.13496115112304688, 0.1351331787109375, 0.13466213989257814, 0.13447935485839843, 0.13445785522460937, 0.13399859619140625, 0.13541075134277344, 0.13430006408691406, 0.1344147186279297, 0.13520501708984375, 0.13561395263671874, 0.13481216430664061, 0.1342705535888672, 0.13443522644042968, 0.13467033386230468, 0.13436245727539062, 0.13587933349609374, 0.13525759887695313, 0.1352619171142578, 0.1342594299316406, 0.1342136993408203, 0.1345575714111328, 0.13568159484863282, 0.13566621398925782, 0.1352274169921875, 0.13612442016601561, 0.13510450744628907, 0.14488925170898437, 0.1332266845703125, 0.1331531219482422, 0.1331826629638672, 0.1336117401123047, 0.13311247253417968, 0.134619140625, 0.1375001220703125, 0.13432476806640625, 0.1334599609375, 0.13366476440429687, 0.1331994171142578, 0.1340952911376953, 0.13456729125976563, 0.13553318786621094, 0.134656005859375, 0.13436927795410156, 0.1341744384765625, 0.13357904052734376, 0.1338817901611328, 0.134191162109375, 0.13539532470703125, 0.13473997497558593, 0.1348784637451172, 0.1342758331298828, 0.1335562286376953, 0.13373167419433593, 0.13389875793457032, 0.13481901550292968, 0.134566650390625, 0.13507810974121093, 0.1347071990966797, 0.1337487335205078, 0.1338299560546875, 0.13381814575195314, 0.13547196960449218, 0.1344012451171875, 0.13511561584472656, 0.13523548889160156, 0.13436703491210938, 0.13425827026367188, 0.13415040588378907, 0.1351561584472656, 0.1346519012451172, 0.13506883239746093, 0.13571527099609376, 0.13457801818847656, 0.13433013916015624, 0.13460765075683595, 0.1346553955078125, 0.13494064331054687, 0.1347139892578125, 0.1363291473388672, 0.13459461975097656, 0.13478656005859374, 0.13472000122070313, 0.1347870788574219, 0.13437919616699218, 0.1351164093017578, 0.13642396545410157, 0.13531765747070312, 0.13516505432128906, 0.13438861083984374, 0.14413824462890626, 0.13233740234375, 0.13348069763183593, 0.13254360961914063, 0.13334002685546875, 0.1328594512939453, 0.1355184020996094, 0.13795968627929686, 0.1347256317138672, 0.13324082946777344, 0.13287423706054688, 0.1336320037841797, 0.13301536560058594, 0.13435427856445312, 0.13631983947753906, 0.13581304931640625, 0.13457618713378905, 0.13298445129394532, 0.134142333984375, 0.13341494750976562, 0.13386749267578124, 0.1351331787109375, 0.13505282592773438, 0.13462576293945314, 0.13366470336914063, 0.13510047912597656, 0.13320191955566407, 0.13430105590820313, 0.13443341064453124, 0.1345366973876953, 0.13544053649902343, 0.1340985870361328, 0.13520147705078125, 0.13408038330078126, 0.13439193725585938, 0.13384608459472655, 0.13517007446289062, 0.13448284912109376, 0.1362186279296875, 0.1345269775390625, 0.1341513214111328, 0.13425340270996095, 0.13577626037597657, 0.134819580078125, 0.13539488220214843, 0.135998046875, 0.13494451904296875, 0.1346051483154297, 0.1348607940673828, 0.13465394592285157, 0.1348335723876953, 0.13514607238769533, 0.13586572265625, 0.13476722717285156, 0.13455148315429688, 0.13462106323242187, 0.13457994079589844, 0.13525244140625, 0.13608140563964843, 0.13553254699707032, 0.1351772766113281, 0.13435539245605468, 0.13606553649902345, 0.14620672607421875, 0.13244371032714844, 0.13319801330566405, 0.1329072570800781, 0.13374668884277344, 0.13330021667480468, 0.13452493286132813, 0.13788131713867188, 0.13413404846191407, 0.13341439819335937, 0.13297509765625, 0.1332527313232422, 0.13361190795898437, 0.134582275390625, 0.13655772399902344, 0.13507980346679688, 0.13379478454589844, 0.1338653106689453, 0.13355952453613282, 0.1333931884765625, 0.13364973449707032, 0.13582156372070311, 0.13582806396484376, 0.13440205383300782, 0.1336432647705078, 0.13365679931640626, 0.13364694213867187, 0.1339107208251953, 0.13480531311035157, 0.1357129669189453, 0.1344593963623047, 0.13511474609375, 0.13409893798828126, 0.13403053283691407, 0.13405471801757812, 0.13462118530273437, 0.13542092895507812, 0.13503794860839843, 0.13549363708496093, 0.13479676818847655, 0.13466012573242186, 0.13453701782226563, 0.13476693725585936, 0.13537849426269533, 0.13508677673339844, 0.13539897155761718, 0.13490406799316407, 0.1344351043701172, 0.1351527099609375, 0.13607968139648438, 0.13454605102539063, 0.134444580078125, 0.1358135986328125, 0.13471087646484375, 0.134816162109375, 0.13443072509765625, 0.13516184997558595, 0.13511836242675782, 0.13501283264160155, 0.1356083221435547, 0.13550387573242187, 0.1350100555419922, 0.1349586181640625, 0.14521688842773436, 0.13259759521484374, 0.13361990356445314, 0.1328605194091797, 0.13296231079101561, 0.13359455871582032, 0.13501625061035155, 0.13834317016601563, 0.1351471405029297, 0.13290264892578124, 0.13369203186035156, 0.13334713745117188, 0.1332635498046875, 0.13361766052246093, 0.13605628967285155, 0.13594064331054687, 0.1343488006591797, 0.13393257141113282, 0.13344790649414062, 0.1334888916015625, 0.1340948486328125, 0.13518643188476562, 0.13620428466796874, 0.1354130859375, 0.1338006134033203, 0.13372210693359374, 0.13365452575683595, 0.13390028381347657, 0.1350061492919922, 0.13510252380371093, 0.13494454956054688, 0.13468019104003906, 0.13391523742675782, 0.13459986877441407, 0.13426124572753906, 0.13409925842285156, 0.1355345916748047, 0.13539433288574218, 0.13486524963378907, 0.13424703979492186, 0.1354854431152344, 0.13435699462890624, 0.1341251220703125, 0.13453152465820312, 0.1357243194580078, 0.13464649963378905, 0.13576393127441405, 0.1347440948486328, 0.13473178100585936, 0.13487513732910156, 0.13431365966796874, 0.13592169189453124, 0.13491795349121094, 0.13570506286621092, 0.13498777770996093, 0.13486213684082032, 0.13463623046875, 0.13639625549316406, 0.13495890808105468, 0.13451670837402344, 0.13587644958496095, 0.13536349487304689, 0.13505075073242187]",tokens/s,7.429409367747613,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,917.696512,664.666112,0.0,262.144,258.163712,s,1,7.6300888671875,7.6300888671875,0.0,7.6300888671875,7.6300888671875,7.6300888671875,7.6300888671875,[7.6300888671875],,kWh,1.91309995333692e-05,2.1028023985737865e-06,6.643616425994203e-06,2.787741835793719e-05,,MB,1319.268352,765.329408,0.0,350.224384,317.820928,s,10,0.23583388710021977,0.023583388710021976,0.00035628649426394925,0.023497424125671386,0.023711647987365723,0.024158624172210694,0.02451620512008667,"[0.023349599838256838, 0.023612319946289064, 0.023518495559692383, 0.02361190414428711, 0.024605600357055665, 0.02357606315612793, 0.023476352691650392, 0.023384384155273438, 0.023307615280151368, 0.023391551971435547]",tokens/s,10855.098185750145,kWh,6.837934715165661e-07,7.541062417628593e-08,4.523943249649445e-07,1.2115984206577964e-06,tokens/kWh,211291130.48943514,MB,1358.286848,790.495232,0.0,373.293056,317.823488,s,10,11.557419067382813,1.1557419067382813,0.00930108353397948,1.1524498291015626,1.1684163696289063,1.1725587097167969,1.1758725817871094,"[1.167495849609375, 1.151405029296875, 1.152125, 1.1767010498046875, 1.15904296875, 1.152774658203125, 1.1570340576171876, 1.146061767578125, 1.146626708984375, 1.1481519775390625]",tokens/s,54.510440118761224,kWh,3.353997128140862e-05,3.699005921674041e-06,1.2515893827433532e-05,4.97548710305162e-05,tokens/kWh,1266207.6836931233,,s,630,11.552553209304802,0.01833738604651557,0.0004008423388117604,0.018266992568969728,0.018514899826049803,0.018706135654449462,0.020546100502014164,"[0.01823910331726074, 0.018311328887939453, 0.01820460891723633, 0.018280927658081054, 0.018437952041625977, 0.018329599380493163, 0.018288639068603514, 0.018239711761474608, 0.018315040588378906, 0.018173952102661133, 0.018460512161254883, 0.01830928039550781, 0.0182620792388916, 0.01834115219116211, 0.018518688201904297, 0.018380800247192384, 0.01825382423400879, 0.018337791442871093, 0.018249727249145507, 0.01862611198425293, 0.018384576797485352, 0.018371679306030272, 0.018575071334838867, 0.01851795196533203, 0.018839199066162108, 0.019063135147094727, 0.01912335968017578, 0.020586431503295897, 0.019098527908325197, 0.01902592086791992, 0.018974720001220705, 0.018946048736572265, 0.018739519119262697, 0.018931392669677735, 0.018902912139892578, 0.018901119232177733, 0.018783584594726562, 0.018511871337890624, 0.018339487075805665, 0.018357343673706054, 0.0183417911529541, 0.018329599380493163, 0.01838934326171875, 0.01841862487792969, 0.01839302444458008, 0.018461408615112303, 0.018378816604614257, 0.018418752670288085, 0.018332128524780274, 0.018332096099853517, 0.018328863143920897, 0.019087167739868165, 0.01841980743408203, 0.018397567749023437, 0.018630624771118164, 0.018291040420532225, 0.01832566452026367, 0.01839923286437988, 0.01846272087097168, 0.018337087631225588, 0.018330623626708984, 0.0183540153503418, 0.018294559478759766, 0.017964544296264647, 0.018251327514648436, 0.018264543533325194, 0.018264448165893555, 0.018736223220825195, 0.018353151321411132, 0.018221023559570313, 0.018249536514282228, 0.018204896926879884, 0.018439903259277343, 0.018284832000732422, 0.018184192657470705, 0.018229375839233397, 0.018300800323486327, 0.01846272087097168, 0.018306528091430664, 0.018412063598632813, 0.018327104568481446, 0.01825993537902832, 0.018267808914184572, 0.01826633644104004, 0.01834659194946289, 0.01822105598449707, 0.01817388725280762, 0.01815135955810547, 0.018287872314453123, 0.01826291275024414, 0.01827840042114258, 0.018173952102661133, 0.01817945671081543, 0.01816815948486328, 0.01827222442626953, 0.018179807662963867, 0.018145055770874025, 0.018236032485961916, 0.01815513610839844, 0.018159936904907227, 0.018286239624023436, 0.018305280685424804, 0.018241920471191407, 0.018242847442626952, 0.018204736709594726, 0.018622623443603516, 0.018337791442871093, 0.01822105598449707, 0.0182379207611084, 0.018218143463134766, 0.018258783340454103, 0.018197824478149414, 0.018262720108032225, 0.01820483207702637, 0.018204639434814453, 0.018216255187988282, 0.01839606475830078, 0.01837251281738281, 0.018258943557739257, 0.018270015716552734, 0.01832803153991699, 0.018473440170288086, 0.01822233581542969, 0.0182873592376709, 0.018207807540893555, 0.01820502471923828, 0.017932287216186525, 0.018233600616455077, 0.01824246406555176, 0.01824140739440918, 0.01828553581237793, 0.01832713508605957, 0.01827471923828125, 0.01845884895324707, 0.018300703048706055, 0.01832979202270508, 0.01839027214050293, 0.018414560317993163, 0.018367488861083983, 0.018270816802978516, 0.01826799964904785, 0.0182969913482666, 0.018364416122436524, 0.01827996826171875, 0.01838947105407715, 0.018356224060058594, 0.01828659248352051, 0.018311168670654295, 0.018237728118896485, 0.018368255615234374, 0.018289791107177735, 0.01827833557128906, 0.018479808807373047, 0.018248096466064453, 0.018243232727050782, 0.018280672073364257, 0.01824118423461914, 0.01818780708312988, 0.018217504501342772, 0.018565536499023438, 0.0182413444519043, 0.01848944091796875, 0.018277759552001952, 0.018227071762084962, 0.018182559967041014, 0.01818227195739746, 0.018372800827026366, 0.018182239532470702, 0.018206560134887695, 0.018182144165039063, 0.018260128021240236, 0.01830076789855957, 0.01823744010925293, 0.018190336227416993, 0.018217216491699217, 0.018160768508911133, 0.018084096908569335, 0.018312768936157228, 0.018262720108032225, 0.018222976684570312, 0.018555135726928712, 0.018250879287719728, 0.01825267219543457, 0.01816713523864746, 0.018326271057128907, 0.018250751495361327, 0.018273536682128905, 0.01820022392272949, 0.01830441665649414, 0.01846883201599121, 0.018574975967407228, 0.01851456069946289, 0.018407199859619142, 0.018225151062011717, 0.018267648696899414, 0.018301631927490233, 0.018345087051391602, 0.018299840927124025, 0.01834163284301758, 0.018739200592041014, 0.02111305618286133, 0.019760927200317382, 0.018555135726928712, 0.018549631118774414, 0.018432735443115234, 0.018396383285522462, 0.018453439712524413, 0.018354175567626953, 0.018382848739624022, 0.018404544830322264, 0.01873209571838379, 0.021286527633666993, 0.023779455184936522, 0.018687999725341797, 0.02068191909790039, 0.019323648452758788, 0.01870649528503418, 0.018561023712158203, 0.018519744873046876, 0.01849171257019043, 0.018384992599487306, 0.018460575103759765, 0.01860812759399414, 0.018468095779418946, 0.01834060859680176, 0.018370847702026367, 0.01827734375, 0.018346975326538086, 0.01830409622192383, 0.018348703384399413, 0.018280448913574218, 0.018324607849121093, 0.01857219123840332, 0.0183701114654541, 0.018367263793945314, 0.018499456405639648, 0.01833260726928711, 0.018346879959106447, 0.01846268844604492, 0.018392416000366212, 0.0183191032409668, 0.018346912384033204, 0.018505727767944336, 0.018487295150756835, 0.01859561538696289, 0.01845430374145508, 0.0183175048828125, 0.018384288787841797, 0.018278303146362303, 0.01835215950012207, 0.018354143142700195, 0.018307231903076173, 0.020877504348754884, 0.01861164855957031, 0.01861894416809082, 0.01861631965637207, 0.018479103088378905, 0.018232704162597656, 0.01822960090637207, 0.018354719161987304, 0.01823308753967285, 0.01822105598449707, 0.018225151062011717, 0.01825382423400879, 0.01818828773498535, 0.018239391326904296, 0.018563167572021484, 0.01822256088256836, 0.018209152221679688, 0.018233503341674805, 0.018332767486572265, 0.018344863891601563, 0.01828188705444336, 0.018327680587768555, 0.018280927658081054, 0.018342079162597655, 0.018286399841308594, 0.01830463981628418, 0.018196863174438478, 0.018304479598999022, 0.01830147171020508, 0.018306079864501952, 0.018326496124267577, 0.01829248046875, 0.018233600616455077, 0.01858889579772949, 0.018324256896972656, 0.01832713508605957, 0.01835612869262695, 0.018284639358520507, 0.01831977653503418, 0.0183723201751709, 0.01832374382019043, 0.018387136459350587, 0.018370431900024416, 0.018429407119750975, 0.018526687622070312, 0.01865727996826172, 0.01860540771484375, 0.018508447647094726, 0.018386943817138672, 0.01830297660827637, 0.018269279479980468, 0.018318048477172853, 0.01827203178405762, 0.01828691291809082, 0.01829248046875, 0.01827849578857422, 0.018378175735473633, 0.018450368881225587, 0.018502880096435546, 0.01827737617492676, 0.01834832000732422, 0.018336128234863282, 0.01837273597717285, 0.01803878402709961, 0.01835036849975586, 0.018705696105957032, 0.018350528717041015, 0.01841574478149414, 0.018273344039916994, 0.018207551956176758, 0.018257919311523436, 0.018241695404052734, 0.01825980758666992, 0.018520383834838866, 0.018222784042358397, 0.018227039337158205, 0.01812291145324707, 0.018294912338256836, 0.018222976684570312, 0.019139616012573243, 0.019427839279174804, 0.01832374382019043, 0.01831324768066406, 0.018262176513671874, 0.018257919311523436, 0.018339872360229492, 0.018251264572143554, 0.018165279388427734, 0.018164319992065428, 0.01827014350891113, 0.018145248413085936, 0.018151872634887694, 0.018276351928710938, 0.018179872512817382, 0.018198112487792968, 0.01830726432800293, 0.01830499267578125, 0.018360000610351562, 0.018385696411132812, 0.01833366394042969, 0.018298912048339843, 0.01823251152038574, 0.018436447143554687, 0.01822768020629883, 0.018315200805664063, 0.018249792098999025, 0.018279808044433594, 0.01818604850769043, 0.01825436782836914, 0.018065696716308595, 0.018233503341674805, 0.018402175903320314, 0.018270944595336912, 0.018306367874145506, 0.018278367996215822, 0.018236383438110352, 0.018096128463745118, 0.01828201675415039, 0.018143199920654298, 0.018248191833496095, 0.018067455291748045, 0.018117919921875, 0.018361055374145507, 0.018128896713256838, 0.01818524742126465, 0.01812396812438965, 0.018089664459228515, 0.01846028709411621, 0.01845529556274414, 0.018358272552490236, 0.018455968856811524, 0.01837936019897461, 0.018559104919433595, 0.018198400497436523, 0.018184255599975586, 0.018214399337768555, 0.01823174476623535, 0.01821900749206543, 0.018188064575195312, 0.01818377685546875, 0.021211008071899413, 0.01833830451965332, 0.01826633644104004, 0.018251167297363282, 0.018183040618896484, 0.018236223220825194, 0.01815443229675293, 0.0182574405670166, 0.01829692840576172, 0.018145439147949218, 0.018211231231689454, 0.01839904022216797, 0.018394784927368166, 0.018329343795776366, 0.018319711685180665, 0.018480863571166992, 0.01825027275085449, 0.018415712356567384, 0.01823529624938965, 0.018333696365356447, 0.018200735092163085, 0.01847609519958496, 0.018211360931396484, 0.01837900733947754, 0.018200832366943358, 0.018391008377075194, 0.018216480255126954, 0.018275903701782226, 0.018378591537475585, 0.018185056686401368, 0.018327295303344728, 0.018245887756347657, 0.018231296539306642, 0.0184586238861084, 0.018435199737548827, 0.018161951065063478, 0.018276960372924804, 0.018155872344970705, 0.01881785583496094, 0.02044735908508301, 0.01833033561706543, 0.018139232635498048, 0.018096031188964842, 0.018136512756347655, 0.018067968368530272, 0.018133056640625, 0.01808793640136719, 0.018085920333862304, 0.01811414337158203, 0.01785651206970215, 0.018192384719848635, 0.01825008010864258, 0.018054912567138672, 0.018340896606445313, 0.018104768753051757, 0.018106912612915037, 0.018112255096435548, 0.018131328582763673, 0.018121728897094725, 0.018152223587036134, 0.01813475227355957, 0.018088224411010743, 0.01812460708618164, 0.018120895385742186, 0.01827020835876465, 0.018298879623413086, 0.018335744857788085, 0.018149375915527344, 0.01820876884460449, 0.018164928436279298, 0.01816044807434082, 0.01818726348876953, 0.01814409637451172, 0.018140607833862305, 0.018065887451171873, 0.018123008728027343, 0.018042495727539062, 0.018167743682861327, 0.01812236785888672, 0.018180000305175782, 0.018097055435180663, 0.01815331268310547, 0.01817215919494629, 0.01823091125488281, 0.018237503051757812, 0.018180160522460936, 0.018198944091796874, 0.018161407470703127, 0.01823251152038574, 0.01869500732421875, 0.018422847747802736, 0.018423807144165038, 0.018467039108276368, 0.01835807991027832, 0.018299776077270506, 0.01817190361022949, 0.018114559173583983, 0.018157567977905274, 0.018176000595092775, 0.018139135360717772, 0.018259552001953124, 0.01818067169189453, 0.01818422317504883, 0.018124351501464842, 0.018153568267822266, 0.018141183853149414, 0.018133152008056642, 0.018102176666259767, 0.018161279678344727, 0.018106208801269532, 0.01816547203063965, 0.018129663467407228, 0.017913408279418945, 0.018094432830810546, 0.01808188819885254, 0.018114559173583983, 0.01816499137878418, 0.01808038330078125, 0.018182592391967775, 0.018290367126464844, 0.01822435188293457, 0.01820751953125, 0.0181507511138916, 0.018176191329956053, 0.018088415145874025, 0.018136543273925783, 0.018080192565917967, 0.01820038414001465, 0.01815113639831543, 0.018057151794433592, 0.018381439208984374, 0.01812499237060547, 0.01824300765991211, 0.018504064559936525, 0.0181146240234375, 0.018205888748168947, 0.018133216857910157, 0.018209312438964845, 0.01825699234008789, 0.018240352630615235, 0.018438207626342774, 0.018176000595092775, 0.018036735534667968, 0.01803398323059082, 0.018145984649658203, 0.018084896087646483, 0.018043872833251953, 0.018054719924926757, 0.018093952178955076, 0.0181909122467041, 0.018081792831420897, 0.01804697608947754, 0.018294431686401366, 0.0181910400390625, 0.018230783462524415, 0.018200895309448243, 0.018153696060180663, 0.018116064071655273, 0.018128223419189453, 0.01820140838623047, 0.018208511352539064, 0.018143552780151367, 0.018314815521240233, 0.01811903953552246, 0.018192256927490234, 0.018286624908447267, 0.018139263153076172, 0.018243295669555664, 0.01814963150024414, 0.01817190361022949, 0.019664896011352538, 0.018245887756347657, 0.018124223709106446, 0.018198623657226562, 0.018182336807250978, 0.017911903381347655, 0.018233728408813477, 0.018308895111083984, 0.018236448287963867, 0.018201568603515624, 0.018109823226928713, 0.018198911666870116, 0.018084096908569335, 0.018116735458374025, 0.018142080307006835, 0.018186656951904297, 0.01877631950378418, 0.01831056022644043, 0.01812371253967285, 0.01822105598449707, 0.018431999206542968, 0.0186014404296875, 0.018151679992675782, 0.018211103439331053, 0.018199712753295898, 0.018131744384765624, 0.018405055999755858, 0.01821939277648926, 0.0181615047454834, 0.01815510368347168, 0.018565568923950195, 0.018139263153076172, 0.018128896713256838, 0.018167871475219727, 0.01815135955810547, 0.01818227195739746, 0.01817091178894043, 0.018193248748779298, 0.018284767150878907, 0.018093856811523437, 0.018146848678588866, 0.01820047950744629, 0.018121408462524413, 0.018261760711669923, 0.01820684814453125, 0.018135007858276368, 0.018422143936157227, 0.018110111236572267, 0.018277503967285155, 0.018131839752197267, 0.018247007369995117, 0.018137760162353515, 0.018135040283203126, 0.018173952102661133, 0.018141183853149414, 0.018192159652709962, 0.0182391357421875, 0.01829350471496582, 0.018286399841308594, 0.018323455810546875, 0.018192384719848635, 0.018305023193359374, 0.01816124725341797, 0.018200416564941407, 0.018121152877807616, 0.018141311645507814, 0.018314336776733397, 0.018148256301879884]",tokens/s,54.53339955124181,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,23940.571136,13042.057216,0.0,12639.535104,12621.66016,s,1,48.33330859375,48.33330859375,0.0,48.33330859375,48.33330859375,48.33330859375,48.33330859375,[48.33330859375],,kWh,0.0012075479103625486,0.00013319451576492312,0.00041542311011599775,0.0017561655362434694,,MB,1335.742464,13809.614848,0.0,13386.121216,13251.628032,s,10,1.3758823699951173,0.1375882369995117,0.0002159531632196734,0.13761296081542967,0.13781410217285156,0.1378843460083008,0.13794054107666015,"[0.13727696228027345, 0.13724581909179687, 0.13779849243164063, 0.13751177978515625, 0.1376413116455078, 0.13742413330078124, 0.13758460998535157, 0.13775567626953125, 0.13768899536132811, 0.13795458984375]",tokens/s,1860.6241753131014,kWh,4.104360200694743e-06,4.526342201115148e-07,2.7116032495280253e-06,7.268597670334284e-06,tokens/kWh,35219998.63121142,MB,1355.771904,13916.5696,0.0,13493.075968,13391.3472,s,10,54.289676757812494,5.42896767578125,0.01503988719202925,5.4294775390625,5.44617490234375,5.44817265625,5.4497708593750005,"[5.4400126953125, 5.44573095703125, 5.42680419921875, 5.4227490234375, 5.45017041015625, 5.42190771484375, 5.43749755859375, 5.43215087890625, 5.3961259765625, 5.41652734375]",tokens/s,11.60441611782742,kWh,0.00015677295109681237,1.7292627356846905e-05,9.963525640567216e-05,0.00027370083485933145,tokens/kWh,230178.32602658612,,s,630,54.28643206024167,0.08616893977816142,0.0009453966655985928,0.08600691223144531,0.08676414566040039,0.0874577579498291,0.0905246614074707,"[0.0865955810546875, 0.0861957778930664, 0.08597574615478516, 0.08633113861083984, 0.08605843353271485, 0.08573932647705078, 0.08540032196044922, 0.08720588684082031, 0.08608767700195312, 0.09068953704833985, 0.08617766571044921, 0.08588076782226563, 0.08605081939697265, 0.08681452941894531, 0.08636863708496094, 0.08664678192138672, 0.08640473937988281, 0.08620275115966797, 0.08626345825195313, 0.08650582122802734, 0.08604467010498047, 0.08738969421386719, 0.08632905578613281, 0.08598166656494141, 0.08646482849121094, 0.08582758331298829, 0.0859210205078125, 0.0865218276977539, 0.08566194915771484, 0.08593052673339843, 0.085823486328125, 0.08591974639892579, 0.08602214050292968, 0.0859156494140625, 0.08655449676513671, 0.0886165771484375, 0.08692617797851562, 0.0866725082397461, 0.08562262725830078, 0.08561260986328124, 0.08571353912353516, 0.08587673950195313, 0.08583974456787109, 0.08598931121826171, 0.0858148193359375, 0.08649974060058593, 0.08623471832275391, 0.08649318695068359, 0.08912531280517579, 0.08902678680419922, 0.0861756134033203, 0.08570063781738281, 0.08586454772949219, 0.08562844848632813, 0.08590589141845703, 0.08634345245361329, 0.08585648345947265, 0.08568831634521484, 0.08613069152832031, 0.08616687774658204, 0.0862624282836914, 0.08593795013427734, 0.08605709075927734, 0.08580095672607421, 0.08582963562011718, 0.08621670532226562, 0.08672255706787109, 0.08634726715087891, 0.08648140716552734, 0.08671641540527343, 0.08586239624023438, 0.08545279693603515, 0.08617958068847656, 0.08873600006103516, 0.0860239715576172, 0.08582540893554688, 0.0860709457397461, 0.0858958740234375, 0.08666067504882813, 0.08648534393310547, 0.08689401245117187, 0.08664335632324219, 0.08640675354003906, 0.08624114990234374, 0.08629283142089844, 0.08632511901855469, 0.08704176330566406, 0.08586847686767578, 0.08662611389160156, 0.0861397476196289, 0.08648499298095703, 0.08623506927490235, 0.08734521484375, 0.08949289703369141, 0.08641187286376953, 0.0860917739868164, 0.08629580688476562, 0.08602105712890624, 0.0861304931640625, 0.08652166748046874, 0.08632675170898438, 0.08633363342285157, 0.08664476776123047, 0.08723027038574219, 0.08836576080322266, 0.08676057434082031, 0.086364990234375, 0.08621878051757813, 0.08618412780761718, 0.08577008056640625, 0.08610518646240234, 0.08643260955810547, 0.08590751647949219, 0.0860811538696289, 0.08624166107177735, 0.08627609252929687, 0.08697555541992187, 0.08590601348876953, 0.08608598327636718, 0.08605458831787109, 0.08626822662353516, 0.08730009460449219, 0.08631900787353515, 0.08627728271484375, 0.08613145446777344, 0.08602848052978515, 0.08624259185791015, 0.0863050537109375, 0.08562937927246093, 0.08591155242919922, 0.08564921569824219, 0.08566976165771484, 0.0856839370727539, 0.08612249755859375, 0.08561952209472656, 0.08616486358642578, 0.08564899444580078, 0.0863524169921875, 0.08577462768554688, 0.08582550048828125, 0.08557472229003907, 0.08564131164550781, 0.08569519805908203, 0.08570076751708984, 0.08583773040771485, 0.08585782623291016, 0.0861292495727539, 0.08582044982910156, 0.09052665710449219, 0.08688992309570312, 0.08607596588134765, 0.08588444519042969, 0.08618345642089843, 0.08577545928955078, 0.08570044708251953, 0.08717731475830078, 0.08595651245117188, 0.08613887786865235, 0.08584146881103516, 0.08581983947753906, 0.08928982543945313, 0.08961526489257812, 0.08690796661376952, 0.08592598724365234, 0.08570146942138672, 0.08570256042480469, 0.08566178894042968, 0.08593987274169922, 0.08589126586914063, 0.0862732162475586, 0.08578352355957031, 0.08627200317382812, 0.08609382629394531, 0.08625270080566406, 0.08569340515136718, 0.08575526428222656, 0.08544921875, 0.08556339263916016, 0.08580300903320312, 0.08595478057861328, 0.08588617706298828, 0.08551507568359375, 0.08553651428222656, 0.08584575653076172, 0.08600717163085937, 0.08592912292480469, 0.08592524719238281, 0.08755999755859376, 0.08591561889648437, 0.08601190185546875, 0.08757360076904297, 0.08585702514648437, 0.08606511688232422, 0.08650771331787109, 0.08629248046875, 0.08703794860839843, 0.08618943786621094, 0.08616182708740235, 0.08625788879394532, 0.08569241333007813, 0.08568441772460937, 0.08564435577392578, 0.08608803558349609, 0.08556790161132813, 0.08585955047607421, 0.08616201782226562, 0.08658553314208985, 0.08570214080810547, 0.0857089614868164, 0.08588848114013672, 0.08585004425048828, 0.08562783813476563, 0.08546099090576172, 0.08532364654541015, 0.0858477783203125, 0.0856386260986328, 0.08590745544433594, 0.08606610870361328, 0.08598937225341798, 0.08617574310302735, 0.08599910736083985, 0.08603020477294922, 0.08586115264892578, 0.08571190643310547, 0.08562297821044922, 0.08578316497802735, 0.08577145385742188, 0.0857915496826172, 0.08660582733154297, 0.08579443359375, 0.08602867126464844, 0.0855367660522461, 0.08571084594726562, 0.08583302307128907, 0.0871119384765625, 0.08589888000488281, 0.08569436645507812, 0.08604354858398437, 0.08601830291748047, 0.08589024353027344, 0.08686386871337891, 0.0867474594116211, 0.08628249359130859, 0.08599913787841797, 0.08616393280029297, 0.08610201263427734, 0.0860979232788086, 0.08636812591552734, 0.08632537841796875, 0.0867962875366211, 0.08649292755126953, 0.08699314880371094, 0.08662774658203125, 0.08603497314453125, 0.08563922882080079, 0.08603218841552734, 0.08584825897216797, 0.08606301116943359, 0.08627823638916016, 0.08657046508789062, 0.08574620819091797, 0.08802713775634766, 0.09630713653564453, 0.08685072326660156, 0.0862053451538086, 0.08647065734863281, 0.08652799987792968, 0.08623104095458985, 0.08635596466064453, 0.08647679901123047, 0.08652329254150391, 0.08692182159423828, 0.08631321716308593, 0.08885363006591797, 0.08828173065185548, 0.08661309051513671, 0.08634051513671875, 0.08608521270751954, 0.08581366729736328, 0.08626697540283203, 0.08646749114990235, 0.08601296234130859, 0.08657599639892578, 0.08611545562744141, 0.0862260513305664, 0.08624428558349609, 0.08600665283203125, 0.08588697814941407, 0.08578460693359374, 0.08583168029785156, 0.0865947494506836, 0.08610428619384766, 0.08669833374023438, 0.08626611328125, 0.08616448211669922, 0.08615219116210937, 0.0861343994140625, 0.08592169952392578, 0.0860796127319336, 0.08688655853271485, 0.08817648315429688, 0.08617120361328125, 0.08573007965087891, 0.08608563232421874, 0.08596685028076172, 0.08603382110595703, 0.08621920013427735, 0.08612016296386718, 0.086392578125, 0.08575046539306641, 0.08601190185546875, 0.08578636932373047, 0.08667158508300782, 0.0862041244506836, 0.08606505584716796, 0.08602301025390625, 0.08640873718261718, 0.08601580810546874, 0.0863545913696289, 0.08603404998779297, 0.08600975799560547, 0.08606563568115234, 0.08597408294677734, 0.08566265869140625, 0.08554086303710938, 0.08567362976074219, 0.08588697814941407, 0.0856817626953125, 0.08580134582519532, 0.08613689422607422, 0.08623955535888672, 0.08612249755859375, 0.08692326354980469, 0.0855873260498047, 0.08589516448974609, 0.0853592300415039, 0.0856651840209961, 0.08589987182617187, 0.08616448211669922, 0.08572621154785157, 0.08569187164306641, 0.08595254516601562, 0.08575145721435547, 0.08542499542236329, 0.0857343978881836, 0.0855203857421875, 0.08591871643066407, 0.08645836639404297, 0.08559410858154297, 0.08579071807861328, 0.08601805114746094, 0.08593730926513672, 0.08594025421142579, 0.08741766357421875, 0.08633958435058593, 0.08619827270507813, 0.08609382629394531, 0.08581097412109374, 0.08572950744628906, 0.08574531555175781, 0.08581510162353516, 0.08594429016113281, 0.08614729309082031, 0.0859241943359375, 0.08582553863525391, 0.08566349029541015, 0.08587606048583984, 0.08608656311035157, 0.08709529876708984, 0.08617369842529297, 0.08577433776855468, 0.08584601593017578, 0.08598937225341798, 0.08633958435058593, 0.08644758605957031, 0.0864156494140625, 0.08977171325683594, 0.08652652740478516, 0.08573587036132813, 0.0858419189453125, 0.08555513763427734, 0.08598729705810547, 0.08601609802246094, 0.08666726684570313, 0.08616748809814453, 0.08615939331054688, 0.09140022277832031, 0.08877581024169921, 0.08663948822021485, 0.08596479797363281, 0.08593612670898437, 0.0859279327392578, 0.08586975860595703, 0.08594255828857422, 0.08601187133789062, 0.09014534759521485, 0.08694697570800781, 0.08644694519042968, 0.08667340850830078, 0.08663859558105469, 0.08619213104248047, 0.08608972930908203, 0.08576322937011718, 0.08554911804199218, 0.09205635070800781, 0.08598300933837891, 0.08575929260253906, 0.08584220886230469, 0.08650559997558593, 0.08656531524658204, 0.08609510040283203, 0.08594879913330078, 0.0881395492553711, 0.08584464263916015, 0.08527667236328125, 0.08475590515136719, 0.085279296875, 0.08556102752685547, 0.0853957748413086, 0.08562687683105469, 0.08575385284423828, 0.08616140747070312, 0.08568422698974609, 0.08594815826416016, 0.08624972534179688, 0.08643574523925782, 0.08611644744873047, 0.0864086685180664, 0.08599606323242187, 0.08624537658691406, 0.08631283569335937, 0.08641085052490234, 0.08668787384033202, 0.08599593353271484, 0.08781619262695313, 0.08538240051269531, 0.08546380615234375, 0.08526643371582031, 0.08500224304199219, 0.08499394989013671, 0.08515555572509766, 0.08633737945556641, 0.0861247329711914, 0.08622211456298828, 0.08600662231445312, 0.08602828979492187, 0.08582745361328124, 0.08557170867919922, 0.0851630096435547, 0.08500940704345702, 0.08556364440917968, 0.08613657379150391, 0.08622489929199219, 0.08661811065673829, 0.08627200317382812, 0.08589266967773437, 0.08555359649658204, 0.08521727752685547, 0.08548265838623047, 0.08539440155029297, 0.08645619201660157, 0.08515174102783203, 0.08588428497314453, 0.08584595489501953, 0.08608019256591797, 0.08585587310791015, 0.08692976379394532, 0.08616963195800781, 0.0858071060180664, 0.0854302749633789, 0.08594226837158203, 0.08528076934814453, 0.08606428527832032, 0.08642438507080077, 0.0860191650390625, 0.08620127868652344, 0.08697650909423828, 0.08749056243896484, 0.08575955200195312, 0.086235107421875, 0.08657868957519531, 0.08590025329589844, 0.08587059020996093, 0.08669404602050781, 0.08668275451660157, 0.08636899566650391, 0.08649472045898438, 0.086185791015625, 0.08726726531982422, 0.08910310363769532, 0.0864353256225586, 0.08602265930175781, 0.08625711822509766, 0.08601216125488281, 0.08589955139160156, 0.08558153533935547, 0.08568860626220703, 0.08536678314208984, 0.08646205139160157, 0.0858025894165039, 0.08837328338623048, 0.090519775390625, 0.08700780487060547, 0.08659712219238282, 0.08554291534423829, 0.08585590362548828, 0.08566429138183594, 0.08580076599121093, 0.08565257263183594, 0.08606585693359375, 0.08667510223388672, 0.08658592224121094, 0.08568812561035156, 0.08610221099853516, 0.08571459197998046, 0.08579065704345704, 0.08527910614013672, 0.08525417327880859, 0.08572313690185547, 0.08578781127929687, 0.08548953247070312, 0.08575830078125, 0.0862685775756836, 0.08539952087402344, 0.08537497711181641, 0.08829952239990234, 0.08625766754150391, 0.085870849609375, 0.0852559356689453, 0.08509439849853516, 0.08511228942871094, 0.08571753692626953, 0.08536988830566407, 0.08570979309082032, 0.08628633880615234, 0.08581938934326172, 0.08509235382080078, 0.08573542022705079, 0.08639692687988282, 0.0854466552734375, 0.08510578918457032, 0.0850481948852539, 0.08504319763183593, 0.08527625274658203, 0.08512745666503906, 0.08625369262695312, 0.08568217468261718, 0.08530316925048828, 0.08512115478515625, 0.08561049652099609, 0.08572918701171875, 0.08551570892333984, 0.08519132995605469, 0.08516575622558593, 0.085195068359375, 0.0852520980834961, 0.08539545440673828, 0.08598118591308594, 0.08642355346679688, 0.08571699523925781, 0.08553266906738281, 0.08582144165039063, 0.08556543731689453, 0.08517632293701172, 0.0852459487915039, 0.08496150207519532, 0.0854402847290039, 0.08671427154541016, 0.08672112274169921, 0.08591315460205078, 0.08597058868408203, 0.08561260986328124, 0.08592662048339844, 0.08760115051269532, 0.08549983978271485, 0.08535443115234374, 0.08549590301513672, 0.0855060806274414, 0.08562598419189453, 0.085897216796875, 0.08539043426513672, 0.08538697814941407, 0.08537094116210937, 0.08565055847167968, 0.08543679809570312, 0.0852833251953125, 0.08518450927734375, 0.08504873657226562, 0.08550665283203125, 0.08696553802490234, 0.08608214569091797, 0.08610771179199218, 0.08588678741455077, 0.08503103637695313, 0.0849188461303711, 0.08605068969726562, 0.08565779113769531, 0.08631852722167968, 0.08540013122558594, 0.08559407806396484, 0.08512515258789062, 0.08516182708740234, 0.08571087646484375, 0.09139558410644531, 0.08593436431884766, 0.08590121459960938, 0.08552022552490235, 0.08525885009765626, 0.08536883544921875, 0.0855367660522461, 0.08547942352294922, 0.0858603515625, 0.08564940643310547, 0.08612012481689453, 0.09058860778808593, 0.08724301147460937, 0.08628192138671875, 0.08631394958496094, 0.08634550476074218, 0.08613088226318359, 0.08628633880615234, 0.08621206665039062, 0.08572073364257812, 0.08573145294189453, 0.08588365173339843, 0.08588460540771484, 0.08641120147705078, 0.08596281433105468, 0.08551455688476563, 0.08557341003417969]",tokens/s,11.605109713250053,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,3982.00832,2147.352576,0.0,1744.830464,1736.37632,s,1,12.3091767578125,12.3091767578125,0.0,12.3091767578125,12.3091767578125,12.3091767578125,12.3091767578125,[12.3091767578125],,kWh,0.00015406139816670172,1.69861646581767e-05,5.240309747800431e-05,0.00022345066030288272,,MB,3992.383488,2390.622208,0.0,1967.128576,1910.987776,s,10,0.5557610855102539,0.055576108551025384,0.00026359540344872754,0.05544390296936035,0.055858928680419924,0.05600717544555664,0.05612577285766602,"[0.05581097412109375, 0.05537564849853516, 0.05615542221069336, 0.055643009185791015, 0.055825984954833985, 0.05526598358154297, 0.055436382293701174, 0.05545142364501953, 0.05536377716064453, 0.055432479858398435]",tokens/s,4606.2958827885905,kWh,1.6413118260063013e-06,1.8100751081553854e-07,8.913396643821938e-07,2.713659001204034e-06,tokens/kWh,94337571.4805782,MB,3996.594176,2411.593728,0.0,1988.100096,1972.65152,s,10,34.73163012695313,3.473163012695312,0.005784500419796451,3.4731156005859374,3.48041025390625,3.4810094482421876,3.4814888037109375,"[3.477383544921875, 3.477878173828125, 3.481608642578125, 3.480277099609375, 3.468337890625, 3.468698486328125, 3.46395849609375, 3.475328369140625, 3.47090283203125, 3.467256591796875]",tokens/s,18.139085257363,kWh,0.00010167472765858427,1.1214640075094667e-05,4.086916596621533e-05,0.00015375853369989432,tokens/kWh,409733.35582767264,,s,630,34.72922929000854,0.05512576077779135,0.0006333710157300173,0.054964591979980465,0.055644130325317384,0.05614704647064209,0.05815664363861084,"[0.05488844680786133, 0.054849536895751956, 0.05642649459838867, 0.05499289703369141, 0.054902782440185545, 0.05485772705078125, 0.05504934310913086, 0.055526046752929686, 0.05486409759521484, 0.054607872009277345, 0.05502479934692383, 0.055046367645263675, 0.05492595291137695, 0.05489868927001953, 0.0547696647644043, 0.05494169616699219, 0.055177310943603515, 0.05478960037231445, 0.055103935241699216, 0.05492899322509766, 0.055056800842285154, 0.05491222381591797, 0.05568387222290039, 0.058759166717529294, 0.056435935974121096, 0.055294048309326174, 0.05513238525390625, 0.05543574523925781, 0.05509452819824219, 0.05625494384765625, 0.054935840606689455, 0.05482700729370117, 0.05469148635864258, 0.054726272583007815, 0.05544972610473633, 0.055035774230957034, 0.05482332611083984, 0.055023937225341796, 0.05489664077758789, 0.05499820709228516, 0.05488518524169922, 0.05470003128051758, 0.05587558364868164, 0.05552537536621094, 0.05578710556030273, 0.055470497131347656, 0.055482368469238284, 0.05551308822631836, 0.05525657653808594, 0.0548787841796875, 0.054930591583251955, 0.054964576721191406, 0.05526777648925781, 0.05499628829956055, 0.05478470230102539, 0.05519696044921875, 0.05562851333618164, 0.05496611022949219, 0.054892704010009764, 0.05491686248779297, 0.05494784164428711, 0.05517318344116211, 0.0550709114074707, 0.05466080093383789, 0.0557509765625, 0.055156097412109376, 0.055618175506591795, 0.05466726303100586, 0.054746654510498045, 0.05540911865234375, 0.05614591979980469, 0.05576704025268555, 0.05582233428955078, 0.05564412689208984, 0.05564115142822266, 0.0551492805480957, 0.05599462509155274, 0.055779327392578126, 0.055166976928710934, 0.05568716812133789, 0.05538777542114258, 0.05535782241821289, 0.05508822250366211, 0.05530716705322265, 0.0554967041015625, 0.055236576080322265, 0.0552509765625, 0.05564416122436523, 0.05575993728637695, 0.05555091094970703, 0.055363582611083983, 0.05492940902709961, 0.0549826545715332, 0.054900096893310546, 0.05499692916870117, 0.05483795166015625, 0.05483340835571289, 0.05496806335449219, 0.05468569564819336, 0.05472051239013672, 0.054687744140625, 0.054724609375, 0.05508480072021484, 0.055099647521972654, 0.05500652694702148, 0.05508780670166016, 0.05476147079467773, 0.05480857467651367, 0.055556095123291016, 0.05496627044677734, 0.05534515380859375, 0.05489254379272461, 0.0556195182800293, 0.05617452621459961, 0.05475750350952149, 0.055177215576171876, 0.054720096588134766, 0.05498915100097656, 0.054908992767333985, 0.05476883316040039, 0.054682430267333985, 0.054812671661376954, 0.05489651107788086, 0.055347328186035154, 0.05561654281616211, 0.0550283203125, 0.05475449752807617, 0.054970462799072264, 0.054952671051025394, 0.05652835083007812, 0.056301822662353514, 0.05708406448364258, 0.055449695587158204, 0.055739776611328125, 0.055302814483642576, 0.05500723266601563, 0.054932479858398435, 0.05511065673828125, 0.05496124649047852, 0.0555140151977539, 0.05702624130249023, 0.05493382263183594, 0.05497446441650391, 0.05578931045532227, 0.055969150543212894, 0.05518131256103516, 0.054909824371337894, 0.05812335968017578, 0.055117919921875, 0.054815425872802734, 0.05488246536254883, 0.05514767837524414, 0.05483808135986328, 0.054980480194091796, 0.05479436874389648, 0.054749183654785157, 0.05463449478149414, 0.054717857360839846, 0.05507952117919922, 0.05545574569702148, 0.054801441192626955, 0.05492780685424805, 0.054833694458007814, 0.05489664077758789, 0.05485356903076172, 0.05482892990112305, 0.05482310485839844, 0.05474508666992187, 0.05486336135864258, 0.05470048141479492, 0.05508070373535156, 0.05850300979614258, 0.055508480072021485, 0.05522940826416016, 0.05470188903808594, 0.05472809600830078, 0.05490972900390625, 0.05566873550415039, 0.05558272171020508, 0.05530624008178711, 0.05497977447509766, 0.054876609802246096, 0.05486774444580078, 0.05522444915771484, 0.05486640167236328, 0.05510521697998047, 0.05525945663452148, 0.054980609893798826, 0.05500223922729492, 0.05468364715576172, 0.055231937408447264, 0.05490950393676758, 0.055801185607910156, 0.05547689437866211, 0.05600255966186524, 0.05546963119506836, 0.055482814788818356, 0.05507660675048828, 0.05498857498168945, 0.0549442253112793, 0.05496012878417969, 0.057956096649169925, 0.05891078567504883, 0.0552729606628418, 0.05581673431396485, 0.054751201629638674, 0.054825023651123045, 0.055122016906738285, 0.05496425628662109, 0.055027454376220704, 0.05466547012329102, 0.054747135162353515, 0.05533817672729492, 0.054995777130126954, 0.05521203231811524, 0.05517087936401367, 0.054808193206787106, 0.05486342239379883, 0.05521100616455078, 0.055021472930908207, 0.05533910369873047, 0.055586814880371094, 0.055315681457519535, 0.0553664321899414, 0.05525020980834961, 0.055206623077392575, 0.05518867111206055, 0.05509817504882813, 0.05504204940795898, 0.054736576080322265, 0.05512223815917969, 0.05468108749389648, 0.05505484771728516, 0.05557228851318359, 0.055301406860351565, 0.05502659225463867, 0.0548570556640625, 0.05562345504760742, 0.05482995223999024, 0.05499289703369141, 0.054970367431640625, 0.05501862335205078, 0.05474745559692383, 0.05679980850219726, 0.05486582565307617, 0.055038047790527345, 0.05462015914916992, 0.054749183654785157, 0.05504185485839844, 0.05481900787353516, 0.054854846954345705, 0.05562047958374024, 0.05447884750366211, 0.054615230560302735, 0.054899265289306644, 0.0551283187866211, 0.05517689514160156, 0.054732192993164064, 0.05501545715332031, 0.054551326751708984, 0.05456291198730469, 0.05483724975585937, 0.054659072875976565, 0.05457468795776367, 0.054702495574951174, 0.05475532913208008, 0.054760574340820316, 0.05468044662475586, 0.0550645751953125, 0.056057857513427733, 0.055242752075195314, 0.054889888763427735, 0.0553682861328125, 0.055129249572753905, 0.05481308746337891, 0.05466896057128906, 0.05495068740844727, 0.05465087890625, 0.05530559921264649, 0.05518924713134766, 0.055196544647216794, 0.05458124923706055, 0.05475942230224609, 0.055158782958984375, 0.0550645751953125, 0.054788097381591794, 0.054970367431640625, 0.05495596694946289, 0.05508639907836914, 0.054830848693847654, 0.05468467330932617, 0.05551103973388672, 0.05491097640991211, 0.05482665634155273, 0.05497875213623047, 0.05484560012817383, 0.055011070251464844, 0.0547669448852539, 0.05505231857299805, 0.0546824951171875, 0.05465497589111328, 0.054495166778564454, 0.05508832168579102, 0.054547328948974606, 0.05488435363769531, 0.05594112014770508, 0.05519267272949219, 0.05501136016845703, 0.05493840026855469, 0.059363422393798826, 0.05611724853515625, 0.055347007751464845, 0.055427265167236325, 0.056147968292236325, 0.05481881713867188, 0.05443183898925781, 0.054581024169921874, 0.054499713897705075, 0.05454207992553711, 0.05478582382202148, 0.05468620681762695, 0.05508259201049805, 0.057345630645751954, 0.05499987030029297, 0.05523807907104492, 0.054785728454589844, 0.05510847854614258, 0.05532262420654297, 0.05470563125610352, 0.054860321044921875, 0.05456281661987305, 0.054724609375, 0.05478313446044922, 0.05466147232055664, 0.054550495147705075, 0.054843936920166016, 0.054543647766113285, 0.0551943359375, 0.054814273834228516, 0.05492371368408203, 0.05529110336303711, 0.054858528137207034, 0.05477568054199219, 0.05489827346801758, 0.0550610237121582, 0.05469785690307617, 0.05468320083618164, 0.0548419189453125, 0.0548164176940918, 0.05989411163330078, 0.05524889755249023, 0.055021568298339846, 0.05504819107055664, 0.05520304107666016, 0.056242977142333984, 0.05514854431152344, 0.05479219055175781, 0.054848800659179686, 0.055126720428466794, 0.054669281005859376, 0.05497452926635742, 0.054847488403320314, 0.05481881713867188, 0.055097217559814456, 0.05485737609863281, 0.055554527282714844, 0.05504204940795898, 0.054779903411865234, 0.05489049530029297, 0.05469993591308594, 0.05493932723999023, 0.05496460723876953, 0.055088607788085935, 0.05498108673095703, 0.056447071075439455, 0.054834175109863284, 0.05501235198974609, 0.054877281188964844, 0.05515935897827148, 0.05525049591064453, 0.05714342498779297, 0.05557299041748047, 0.05460089492797852, 0.05498348617553711, 0.05464678573608398, 0.054865760803222655, 0.054607425689697266, 0.054731361389160155, 0.054834209442138675, 0.0547174072265625, 0.05507059097290039, 0.05478617477416992, 0.05464883041381836, 0.05525884628295898, 0.05466255950927734, 0.05458768081665039, 0.054633056640625, 0.0546693115234375, 0.055137439727783205, 0.055421791076660155, 0.05529600143432617, 0.055197151184082034, 0.05471644973754883, 0.05481644821166992, 0.05485644912719727, 0.05471392059326172, 0.054663681030273435, 0.054865921020507816, 0.054796287536621094, 0.05472444915771484, 0.054878368377685546, 0.05504528045654297, 0.05471420669555664, 0.05501766586303711, 0.05490892791748047, 0.05485609436035156, 0.054720321655273435, 0.05489443206787109, 0.055009662628173826, 0.05506032180786133, 0.05479465484619141, 0.0547452163696289, 0.05575062561035156, 0.05508713531494141, 0.05523251342773437, 0.05458729553222656, 0.054742496490478514, 0.05496895980834961, 0.0548367691040039, 0.054782432556152345, 0.05522214508056641, 0.054921600341796876, 0.05490047836303711, 0.05496627044677734, 0.05494524765014648, 0.0551646728515625, 0.0571195182800293, 0.05493920135498047, 0.05473724746704101, 0.054732894897460936, 0.054798206329345706, 0.054392833709716794, 0.054812191009521484, 0.054588897705078125, 0.054889152526855466, 0.05467372894287109, 0.054831104278564455, 0.05526323318481445, 0.054831104278564455, 0.054857345581054685, 0.05493183898925781, 0.05508003234863281, 0.05475612640380859, 0.055019649505615234, 0.05511564636230469, 0.054814849853515625, 0.05482700729370117, 0.05453740692138672, 0.05466195297241211, 0.05462015914916992, 0.054681598663330076, 0.055228416442871096, 0.055191551208496094, 0.05467136001586914, 0.05466479873657227, 0.054868385314941405, 0.05482636642456055, 0.0547644157409668, 0.05477715301513672, 0.055058464050292966, 0.05496192169189453, 0.05927750396728516, 0.054962337493896486, 0.055589183807373044, 0.05499871826171875, 0.054866241455078124, 0.05500233459472656, 0.054774463653564455, 0.05522608184814453, 0.054817150115966794, 0.05504000091552735, 0.055228416442871096, 0.05488230514526367, 0.055113311767578124, 0.055900577545166016, 0.055053951263427735, 0.054813056945800784, 0.056136833190917966, 0.058170238494873044, 0.05744025421142578, 0.05549884796142578, 0.0566926383972168, 0.05662047958374023, 0.05481449508666992, 0.054738975524902346, 0.05466598510742188, 0.05482086563110351, 0.05463628768920899, 0.05480444717407226, 0.0547658576965332, 0.05489868927001953, 0.05542092895507812, 0.055244800567626956, 0.0549826545715332, 0.054623329162597656, 0.05476444625854492, 0.05462742233276367, 0.054729217529296874, 0.05489827346801758, 0.054919296264648435, 0.05464678573608398, 0.05479494476318359, 0.054919166564941405, 0.05533260726928711, 0.05479008102416992, 0.05516729736328125, 0.055398399353027344, 0.054833152770996096, 0.05488351821899414, 0.0549159049987793, 0.05558220672607422, 0.054794750213623046, 0.0550563850402832, 0.05502361679077149, 0.054604991912841794, 0.05462691116333008, 0.05533718490600586, 0.055529022216796876, 0.057575870513916015, 0.05511577606201172, 0.0547327995300293, 0.05468947219848633, 0.05452540969848633, 0.054770526885986326, 0.05510335922241211, 0.05478412628173828, 0.0548616943359375, 0.054720638275146484, 0.05645926284790039, 0.055654399871826174, 0.05486083221435547, 0.05485052871704101, 0.05488230514526367, 0.05481811141967773, 0.05460857772827148, 0.055152641296386716, 0.0547817268371582, 0.05541852951049805, 0.05539692687988281, 0.05491507339477539, 0.05488835144042969, 0.05473494338989258, 0.05513420867919922, 0.05494563293457031, 0.05496847915649414, 0.05493251037597656, 0.05570864105224609, 0.05569664001464844, 0.05513894271850586, 0.05621977615356445, 0.05489385604858398, 0.057235488891601564, 0.05481123352050781, 0.05491465759277344, 0.05497087860107422, 0.05508915328979492, 0.054902721405029296, 0.054583297729492185, 0.05462214279174805, 0.05497183990478516, 0.05471903991699219, 0.05481683349609375, 0.055092929840087894, 0.055517505645751954, 0.05483932876586914, 0.05540563201904297, 0.05510851287841797, 0.055623680114746096, 0.05525708770751953, 0.05537177658081055, 0.05523865509033203, 0.055087104797363284, 0.05476681518554687, 0.05491088104248047, 0.05514329528808594, 0.054712158203125, 0.05455068969726563, 0.05488435363769531, 0.054598880767822267, 0.0547479362487793, 0.054831104278564455, 0.0550645751953125, 0.05488790512084961, 0.055681472778320314, 0.05616227340698242, 0.05495001602172851, 0.054747135162353515, 0.05455257415771484, 0.054975902557373044, 0.054806785583496095, 0.05555379104614258, 0.05470640182495117, 0.055285633087158205, 0.05462847900390625, 0.05455295944213867, 0.05508505630493164, 0.05459308624267578, 0.05471686553955078, 0.054712318420410154, 0.05440630340576172, 0.0557616958618164, 0.05499289703369141, 0.055599166870117185, 0.0551195182800293, 0.05513046264648438, 0.05515008163452149, 0.05512857437133789, 0.05504819107055664, 0.054868160247802736, 0.055121280670166015, 0.05507683181762695, 0.05533500671386719, 0.05575443267822266, 0.05500128173828125, 0.05563238525390625, 0.05488844680786133, 0.05491097640991211, 0.05505843353271484, 0.054978431701660155, 0.05496047973632812]",tokens/s,18.14033921510744,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,3982.565376,2147.352576,0.0,1744.830464,1736.37632,s,1,12.2262646484375,12.2262646484375,0.0,12.2262646484375,12.2262646484375,12.2262646484375,12.2262646484375,[12.2262646484375],,kWh,0.00015197141169587668,1.675611825076008e-05,5.16805969000278e-05,0.00022040812684666455,,MB,3994.243072,2390.622208,0.0,1967.128576,1910.987776,s,10,0.5686233291625976,0.05686233291625977,0.0008340210458139839,0.05658724784851074,0.05741770248413086,0.058280578994750976,0.058970880203247075,"[0.056333953857421876, 0.05685260772705078, 0.056285118103027346, 0.0591434555053711, 0.056635967254638674, 0.05711731338500976, 0.056538528442382815, 0.0572259521484375, 0.05613353729248047, 0.05635689544677734]",tokens/s,4502.101599964374,kWh,1.6613558135421763e-06,1.8320816138952013e-07,8.962712346932373e-07,2.7408352096249335e-06,tokens/kWh,93402185.9836776,MB,3998.441472,2411.593728,0.0,1988.100096,1972.65152,s,10,35.25484790039062,3.5254847900390622,0.0077741697800735735,3.52396923828125,3.534960986328125,3.538515283203125,3.541358720703125,"[3.542069580078125, 3.534171142578125, 3.523362060546875, 3.518064453125, 3.5291337890625, 3.52355224609375, 3.5187548828125, 3.527177490234375, 3.52438623046875, 3.514176025390625]",tokens/s,17.86988279682862,kWh,0.00010301321887938117,1.1362452292211566e-05,4.121842912370724e-05,0.00015559410029529998,tokens/kWh,404899.6708771935,,s,630,35.25213562774656,0.055955770837692986,0.000593369088017771,0.055780366897583006,0.05651822204589844,0.05687230434417725,0.058799331550598154,"[0.05569510269165039, 0.05572259140014649, 0.05573427200317383, 0.05576499176025391, 0.055531520843505856, 0.05578342437744141, 0.05570150375366211, 0.055877376556396484, 0.05632640075683594, 0.05589971160888672, 0.055862751007080075, 0.056056800842285155, 0.057796607971191405, 0.058398719787597655, 0.05732966232299805, 0.05667839813232422, 0.056379169464111326, 0.056065601348876955, 0.05593750381469727, 0.05590240097045898, 0.05566668701171875, 0.05574620819091797, 0.0555687370300293, 0.05575065612792969, 0.05564825439453125, 0.057229312896728515, 0.05714684677124023, 0.056662559509277344, 0.056272415161132815, 0.05666249465942383, 0.05740073776245117, 0.05723564910888672, 0.05693891143798828, 0.05676793670654297, 0.05672582244873047, 0.056516193389892576, 0.05672208023071289, 0.056678272247314455, 0.05620240020751953, 0.056271102905273436, 0.05621833419799805, 0.056170398712158204, 0.05585929489135742, 0.05573427200317383, 0.05556601715087891, 0.05626876831054688, 0.05634288024902344, 0.056360126495361325, 0.05573510360717773, 0.0557213134765625, 0.05565225601196289, 0.05554800033569336, 0.058132415771484376, 0.05658047866821289, 0.05584291076660156, 0.05592704010009766, 0.05584598541259766, 0.05567103958129883, 0.055626399993896486, 0.055709598541259765, 0.055567840576171874, 0.05581887817382813, 0.05567212677001953, 0.0557886734008789, 0.05589891052246094, 0.05644047927856445, 0.055921089172363284, 0.055654399871826174, 0.05568716812133789, 0.05565235137939453, 0.05566668701171875, 0.05559910583496094, 0.055567489624023435, 0.05594406509399414, 0.05567609786987305, 0.05560335922241211, 0.05561337661743164, 0.05565513610839844, 0.05557215881347656, 0.05570291137695312, 0.05605267333984375, 0.055904129028320315, 0.05605388641357422, 0.05583798217773438, 0.058081920623779294, 0.05907670211791992, 0.05570150375366211, 0.055602783203125, 0.05543158340454102, 0.05541068649291992, 0.055578529357910154, 0.05574460983276367, 0.055826431274414064, 0.055725791931152346, 0.05566287994384766, 0.05567049789428711, 0.05547446441650391, 0.05587353515625, 0.05651248168945312, 0.056446369171142576, 0.05634521484375, 0.057235454559326174, 0.057222175598144534, 0.05679536056518555, 0.056559391021728515, 0.05666096115112305, 0.05658185577392578, 0.05641244888305664, 0.056420352935791014, 0.056594398498535155, 0.0563917121887207, 0.056049663543701174, 0.05884668731689453, 0.056201183319091796, 0.05557100677490234, 0.05575676727294922, 0.05629750442504883, 0.056162017822265625, 0.05584304046630859, 0.055721118927001954, 0.05564444732666016, 0.05543999862670899, 0.05545779037475586, 0.05558265686035156, 0.057196510314941405, 0.055631969451904295, 0.055625663757324216, 0.056251262664794924, 0.056156158447265625, 0.05600665664672851, 0.05603737640380859, 0.05571100616455078, 0.05585359954833984, 0.056506561279296874, 0.056076065063476566, 0.05588809585571289, 0.05549465560913086, 0.05596303939819336, 0.055619392395019535, 0.05561423873901367, 0.05555814361572266, 0.05554380798339844, 0.05551705551147461, 0.05556563186645508, 0.055661376953125, 0.05594083023071289, 0.056024768829345706, 0.05584956741333008, 0.05603737640380859, 0.05600662231445312, 0.055796897888183594, 0.0563487663269043, 0.05725059127807617, 0.05603513717651367, 0.055724224090576174, 0.05551232147216797, 0.055644672393798826, 0.05602944183349609, 0.05631094360351562, 0.0560792007446289, 0.055593982696533206, 0.05571583938598633, 0.055720382690429685, 0.056092510223388674, 0.05611180877685547, 0.055776832580566406, 0.05573823928833008, 0.05566729736328125, 0.05573427200317383, 0.05630771255493164, 0.055998046875, 0.05604985427856445, 0.05615798568725586, 0.05564390563964844, 0.0555357437133789, 0.055521278381347655, 0.05586592102050781, 0.05564412689208984, 0.05578140640258789, 0.05590016174316406, 0.05568038558959961, 0.05555059051513672, 0.05576867294311524, 0.05586166381835937, 0.05583603286743164, 0.05571033477783203, 0.05575680160522461, 0.05608556747436524, 0.058991550445556644, 0.056469280242919924, 0.05624790573120117, 0.0561255989074707, 0.055736801147460935, 0.05564416122436523, 0.05543731307983398, 0.05523603057861328, 0.05542559814453125, 0.05553561782836914, 0.05570150375366211, 0.05572528076171875, 0.056008480072021485, 0.05576115036010742, 0.056207454681396485, 0.05579980850219726, 0.05599913787841797, 0.05641116714477539, 0.056032222747802736, 0.05573836898803711, 0.055525215148925784, 0.05551065444946289, 0.05554025650024414, 0.05570560073852539, 0.055638015747070314, 0.05537286376953125, 0.05575740814208984, 0.05572192001342773, 0.055658912658691405, 0.05561139297485351, 0.055860862731933594, 0.0558287353515625, 0.055644287109375, 0.05557452774047852, 0.05601875305175781, 0.056012958526611326, 0.05568310546875, 0.05614707183837891, 0.05543814468383789, 0.0557564811706543, 0.05796486282348633, 0.056350048065185544, 0.0560852165222168, 0.05575587081909179, 0.05571180725097656, 0.05579616165161133, 0.055965343475341794, 0.05595008087158203, 0.056139774322509765, 0.05612307357788086, 0.05625273513793945, 0.055810047149658204, 0.055779327392578126, 0.05660467147827149, 0.05571990585327148, 0.05583055877685547, 0.05575382232666016, 0.055583648681640625, 0.055500030517578125, 0.05557049560546875, 0.05567558288574219, 0.05556224060058594, 0.05542092895507812, 0.05559257507324219, 0.055417537689208984, 0.055785472869873044, 0.05613116836547852, 0.0565579833984375, 0.05586511993408203, 0.055869182586669924, 0.05654985427856445, 0.056602622985839846, 0.05613471984863281, 0.05629433441162109, 0.05604719924926758, 0.05594294357299805, 0.055869697570800785, 0.05572441482543945, 0.05558857727050781, 0.05570111846923828, 0.055914432525634765, 0.05617939376831055, 0.056172576904296875, 0.056536479949951174, 0.0561585922241211, 0.05643491363525391, 0.05605567932128906, 0.05590233612060547, 0.05626675033569336, 0.056745918273925784, 0.056328094482421875, 0.055834785461425784, 0.05575433731079102, 0.05567939376831055, 0.05574041748046875, 0.055631393432617186, 0.05564054489135742, 0.05565785598754883, 0.055644798278808597, 0.055818241119384764, 0.05854572677612305, 0.056264480590820315, 0.05657180786132812, 0.05646128082275391, 0.05635971069335938, 0.0562606086730957, 0.05658009719848633, 0.05573222351074219, 0.05568467330932617, 0.05561388778686523, 0.05570912170410156, 0.055686817169189454, 0.05648271942138672, 0.05583993530273437, 0.05582851028442383, 0.05586819076538086, 0.05590796661376953, 0.05600396728515625, 0.05580492782592773, 0.05557987213134766, 0.05537823867797852, 0.05542895889282227, 0.05586111831665039, 0.05582720184326172, 0.0560863037109375, 0.05576726531982422, 0.05559296035766602, 0.05535129547119141, 0.05568262481689453, 0.05672732925415039, 0.05536403274536133, 0.05519494247436523, 0.05546649551391602, 0.0554172477722168, 0.055521278381347655, 0.05554732894897461, 0.055589439392089844, 0.055820289611816405, 0.05556224060058594, 0.055755775451660154, 0.05639420700073242, 0.0561646728515625, 0.05586867141723633, 0.05564105606079101, 0.055449600219726565, 0.05576201629638672, 0.055296031951904294, 0.05623625564575195, 0.05535398483276367, 0.055319583892822266, 0.056115646362304684, 0.05567545700073242, 0.05554915237426758, 0.05541353607177734, 0.055529376983642575, 0.05541267013549805, 0.05553782272338867, 0.05555379104614258, 0.05639705657958984, 0.059638431549072266, 0.056283489227294925, 0.05581929779052734, 0.05556067276000977, 0.05559676742553711, 0.05532134246826172, 0.05628931045532227, 0.05538611221313477, 0.05541459274291992, 0.05542438507080078, 0.05558508682250977, 0.05579417419433594, 0.055504894256591795, 0.055564064025878906, 0.05589385604858398, 0.05559481430053711, 0.055508865356445315, 0.05839724731445312, 0.058912574768066404, 0.05657136154174805, 0.056443744659423825, 0.056257953643798826, 0.05592473602294922, 0.05640252685546875, 0.05670016098022461, 0.056199295043945316, 0.05589238357543945, 0.05614748764038086, 0.05600713729858398, 0.05562543869018555, 0.05595596694946289, 0.05570383834838867, 0.056016895294189455, 0.05558867263793945, 0.05570358276367188, 0.05634064102172852, 0.056159358978271484, 0.055589473724365235, 0.055645503997802735, 0.05547721481323242, 0.05641737747192383, 0.05574339294433594, 0.05528575897216797, 0.05534310531616211, 0.05549260711669922, 0.055563678741455076, 0.05565014266967774, 0.05572480010986328, 0.05535129547119141, 0.05563334274291992, 0.05560377502441406, 0.05575030517578125, 0.05588617706298828, 0.05616332626342774, 0.056458240509033204, 0.05540454483032226, 0.05724979019165039, 0.057270271301269535, 0.056436737060546874, 0.05574607849121094, 0.055498367309570314, 0.05546889495849609, 0.055390209197998044, 0.05569945526123047, 0.055581855773925784, 0.05587849426269531, 0.05573836898803711, 0.05607958221435547, 0.055593246459960936, 0.05571430587768555, 0.055820289611816405, 0.0562973747253418, 0.05666121673583984, 0.05576287841796875, 0.055710655212402344, 0.055375873565673826, 0.056213504791259764, 0.05578953552246094, 0.05543836975097656, 0.055585792541503906, 0.05565353775024414, 0.05570646286010742, 0.05575449752807617, 0.05575702285766602, 0.055676959991455076, 0.05587081527709961, 0.055581375122070314, 0.0559694709777832, 0.055717567443847656, 0.05595996856689453, 0.05648624038696289, 0.056333438873291015, 0.056232673645019535, 0.05605990219116211, 0.055648353576660155, 0.055438209533691406, 0.05543308639526367, 0.055545982360839845, 0.055656448364257816, 0.055572479248046876, 0.05567897415161133, 0.05549260711669922, 0.055818016052246094, 0.05579315185546875, 0.05545852661132813, 0.05585715103149414, 0.056856575012207033, 0.05706547164916992, 0.05639311981201172, 0.05613951873779297, 0.055849632263183596, 0.056078529357910155, 0.0561868782043457, 0.05557452774047852, 0.05528985595703125, 0.05605295944213867, 0.05618268966674805, 0.05613363265991211, 0.060273536682128905, 0.05601279830932617, 0.056145889282226566, 0.056212799072265625, 0.055943904876708986, 0.055430816650390624, 0.05577763366699219, 0.056008705139160155, 0.05582233428955078, 0.05597087860107422, 0.056185791015625, 0.0569571533203125, 0.055648033142089846, 0.05551103973388672, 0.05536342239379883, 0.05566252899169922, 0.05589424133300781, 0.05648793411254883, 0.05597174453735351, 0.055893184661865235, 0.05570038223266602, 0.05568511962890625, 0.05678435134887695, 0.05606851196289062, 0.056045536041259766, 0.056549537658691404, 0.05586886215209961, 0.05598060989379883, 0.05703705596923828, 0.056249855041503906, 0.05596390533447266, 0.05559616088867188, 0.0555445442199707, 0.055688705444335934, 0.05551980972290039, 0.055639423370361325, 0.0556080322265625, 0.055524383544921875, 0.05552339172363281, 0.055470718383789065, 0.055619487762451174, 0.05592457580566406, 0.056783008575439456, 0.056240127563476565, 0.055526718139648434, 0.055968448638916014, 0.05634457778930664, 0.05621488189697266, 0.055616161346435544, 0.05568511962890625, 0.05583871841430664, 0.05561548614501953, 0.055959423065185546, 0.05580796813964844, 0.05585321426391601, 0.05578956985473633, 0.0556767692565918, 0.056096927642822265, 0.05558272171020508, 0.05594521713256836, 0.05629033660888672, 0.055838817596435546, 0.055489120483398435, 0.056037662506103515, 0.055760894775390625, 0.05572198486328125, 0.05566463851928711, 0.05551824188232422, 0.05574959945678711, 0.05540227127075195, 0.05592291259765625, 0.05568102264404297, 0.05571500778198242, 0.05557299041748047, 0.05646982574462891, 0.05771468734741211, 0.05687635040283203, 0.05657788848876953, 0.05686735916137695, 0.056105281829833986, 0.056075424194335935, 0.055636737823486326, 0.05614767837524414, 0.05586326217651367, 0.055691680908203124, 0.05576230239868164, 0.05581887817382813, 0.0555601921081543, 0.05573839950561523, 0.055788543701171874, 0.05564924621582031, 0.055613441467285155, 0.055565567016601564, 0.05638016128540039, 0.055918369293212894, 0.05609904098510742, 0.05644902420043945, 0.055844863891601565, 0.05568921661376953, 0.055943168640136716, 0.05611110305786133, 0.05625148773193359, 0.055760639190673825, 0.05583193588256836, 0.055765888214111325, 0.055752704620361325, 0.05565849685668945, 0.05568844985961914, 0.05559884643554688, 0.055562271118164065, 0.0556717758178711, 0.05566668701171875, 0.055545665740966796, 0.0559617919921875, 0.05918105697631836, 0.05599990463256836, 0.05565910339355469, 0.05532035064697265, 0.05590156936645508, 0.055512992858886716, 0.055661502838134765, 0.05556140899658203, 0.05578220748901367, 0.05561958312988281, 0.055715713500976566, 0.055537792205810545, 0.055556095123291016, 0.05554697418212891, 0.05574544143676758, 0.05570560073852539, 0.05585715103149414, 0.056164608001708985, 0.05556403350830078, 0.055363521575927735, 0.05608038330078125, 0.05560099029541016, 0.0560928955078125, 0.055982078552246094, 0.05567679977416992, 0.05567500686645508, 0.055465984344482425, 0.055483936309814456, 0.05562771224975586, 0.05559759902954101, 0.05543862533569336, 0.05566128158569336, 0.05868339157104492, 0.0558221435546875, 0.05566185760498047, 0.05594204711914062, 0.055588863372802735, 0.05566815948486328, 0.055527454376220704, 0.055389854431152345, 0.05550940704345703, 0.05577116775512695, 0.05583657455444336, 0.05567855834960937, 0.05575161743164062, 0.055383327484130856, 0.05546211242675781, 0.055452159881591793, 0.05536703872680664, 0.05546867370605469, 0.05607526397705078]",tokens/s,17.871257692090964,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1069.400064,903.74144,0.0,501.219328,495.906816,s,1,8.08823583984375,8.08823583984375,0.0,8.08823583984375,8.08823583984375,8.08823583984375,8.08823583984375,[8.08823583984375],,kWh,3.234773552508158e-05,3.5610747075418088e-06,1.0518063970010871e-05,4.6426874202634265e-05,,MB,1381.855232,1067.319296,0.0,652.214272,602.748928,s,10,0.49010515594482423,0.049010515594482426,0.0003562068416459281,0.04897918319702148,0.049173652267456056,0.049582410621643064,0.049909417304992675,"[0.04999116897583008, 0.04903401565551758, 0.04878956985473633, 0.04897811126708984, 0.04907535934448242, 0.04908281707763672, 0.04870041656494141, 0.04872454452514648, 0.048748897552490233, 0.04898025512695312]",tokens/s,5223.36884023355,kWh,1.5754504957883449e-06,1.7369169127723373e-07,1.0414061497310935e-06,2.790548336796672e-06,tokens/kWh,91738242.4895989,MB,1390.211072,1079.902208,0.0,664.797184,611.073536,s,10,19.09695935058594,1.9096959350585938,0.009505097681421363,1.9110812377929687,1.922445654296875,1.9224631225585938,1.9224770971679688,"[1.912946533203125, 1.9160135498046875, 1.9224805908203124, 1.917328369140625, 1.8971890869140624, 1.9092159423828126, 1.90106640625, 1.9018583984375, 1.9224417724609375, 1.896418701171875]",tokens/s,32.989545007366324,kWh,5.5582876927960786e-05,6.129849419828915e-06,2.182747325647043e-05,8.354019960426015e-05,tokens/kWh,754127.9563424373,,s,630,19.089618968963613,0.03030098249041845,0.000621728367839276,0.03022464084625244,0.03062401599884033,0.0307905704498291,0.032240317230224615,"[0.029768255233764647, 0.0300731201171875, 0.02999091148376465, 0.030394367218017578, 0.02993971252441406, 0.029949951171875, 0.030279680252075194, 0.02999001693725586, 0.029975135803222655, 0.02998262405395508, 0.03005820846557617, 0.030464672088623048, 0.03017932891845703, 0.030173120498657228, 0.03160825538635254, 0.030773887634277342, 0.03085923194885254, 0.030484512329101564, 0.030310400009155275, 0.030183008193969726, 0.030279712677001955, 0.030159231185913085, 0.03018489646911621, 0.03019139289855957, 0.03113235282897949, 0.03013039970397949, 0.030547840118408203, 0.03021004867553711, 0.030111711502075197, 0.03010668754577637, 0.030124000549316406, 0.03034992027282715, 0.030300575256347655, 0.030294015884399415, 0.03036476707458496, 0.03033135986328125, 0.030415231704711915, 0.03049283218383789, 0.03033283233642578, 0.030495840072631834, 0.030392927169799806, 0.030390464782714843, 0.030470272064208985, 0.03057663917541504, 0.03089606475830078, 0.030527551651000975, 0.03052044868469238, 0.030350175857543946, 0.030578367233276366, 0.030774879455566406, 0.030800640106201174, 0.030705663681030275, 0.03047327995300293, 0.030331552505493162, 0.0302574405670166, 0.030315935134887697, 0.030257312774658204, 0.030499263763427733, 0.030369792938232422, 0.030373888015747072, 0.030228479385375977, 0.03019491195678711, 0.030157024383544923, 0.029865888595581053, 0.030116064071655273, 0.03041766357421875, 0.03014019203186035, 0.03032819175720215, 0.03013104057312012, 0.030170944213867186, 0.030249151229858398, 0.03025027275085449, 0.030208063125610352, 0.030321311950683594, 0.03017478370666504, 0.030638399124145507, 0.030355583190917967, 0.030457984924316405, 0.030291072845458983, 0.03023523139953613, 0.03059267234802246, 0.03076259231567383, 0.030432159423828126, 0.030308351516723633, 0.030287456512451173, 0.030085535049438478, 0.030099456787109374, 0.030199583053588868, 0.030116064071655273, 0.030451711654663087, 0.03092633628845215, 0.03052604866027832, 0.030246816635131835, 0.030135583877563477, 0.030153440475463866, 0.030216480255126955, 0.03021945571899414, 0.030172895431518555, 0.030073055267333983, 0.03006934356689453, 0.03017728042602539, 0.030136320114135744, 0.030183040618896484, 0.030283519744873047, 0.03005523109436035, 0.030170976638793947, 0.02990652847290039, 0.030099359512329102, 0.030361568450927735, 0.030331392288208008, 0.03020185661315918, 0.030159040451049803, 0.030055839538574217, 0.030173599243164064, 0.030709600448608397, 0.030410911560058592, 0.03046928024291992, 0.03080841636657715, 0.030222848892211916, 0.030305440902709962, 0.030335359573364258, 0.030521631240844727, 0.03447395324707031, 0.03135919952392578, 0.032446720123291015, 0.03063577651977539, 0.03036329650878906, 0.03042252731323242, 0.030376800537109373, 0.030273120880126955, 0.030355871200561522, 0.030338432312011717, 0.030397344589233398, 0.030582496643066406, 0.030631935119628906, 0.03063759994506836, 0.0302903995513916, 0.030365695953369142, 0.030588960647583006, 0.03083247947692871, 0.030619775772094727, 0.030561376571655273, 0.03054889678955078, 0.030533632278442382, 0.030491968154907227, 0.030470848083496094, 0.030424991607666017, 0.03062588882446289, 0.03100057601928711, 0.030693376541137695, 0.030531583786010744, 0.030734527587890626, 0.030490432739257813, 0.03079167938232422, 0.030611455917358397, 0.0307957763671875, 0.030572223663330077, 0.030799840927124022, 0.030472415924072266, 0.030380064010620118, 0.030396320343017577, 0.03028326416015625, 0.030401472091674805, 0.030287616729736327, 0.030468095779418947, 0.03027574348449707, 0.030439264297485353, 0.030489919662475586, 0.03033363151550293, 0.030330591201782227, 0.030353408813476562, 0.030343488693237306, 0.030725088119506836, 0.030654880523681642, 0.03047235107421875, 0.030399999618530273, 0.03036809539794922, 0.030579296112060547, 0.030472192764282226, 0.030416608810424805, 0.03035696029663086, 0.030479360580444335, 0.030449472427368163, 0.030725727081298827, 0.03046403121948242, 0.030803936004638672, 0.030692863464355468, 0.030261375427246093, 0.030449792861938475, 0.029800960540771484, 0.03032304000854492, 0.03000713539123535, 0.030535680770874023, 0.032110496520996096, 0.030463199615478515, 0.03036454391479492, 0.030064640045166017, 0.030158432006835937, 0.03037385559082031, 0.03007049560546875, 0.02999350357055664, 0.029964479446411132, 0.03007276725769043, 0.030314815521240233, 0.030306047439575195, 0.030068735122680663, 0.030105600357055663, 0.030023679733276368, 0.030078304290771483, 0.030261920928955077, 0.030250112533569337, 0.030338207244873048, 0.030447328567504883, 0.030449663162231445, 0.030068735122680663, 0.03054204750061035, 0.030306079864501952, 0.030046207427978516, 0.030234943389892577, 0.02998851203918457, 0.030165023803710937, 0.030048255920410157, 0.030065759658813477, 0.02998681640625, 0.030004127502441406, 0.029980255126953126, 0.029956031799316406, 0.03003664016723633, 0.02999283218383789, 0.03011782455444336, 0.02998271942138672, 0.030293024063110352, 0.03012281608581543, 0.030006784439086914, 0.030323360443115236, 0.030262271881103517, 0.030127103805541993, 0.0301844482421875, 0.030022655487060547, 0.030058303833007814, 0.030023872375488283, 0.030075967788696287, 0.03132003211975098, 0.04146684646606445, 0.030494720458984374, 0.030789215087890624, 0.030682912826538088, 0.030372224807739257, 0.030311712265014647, 0.030663232803344726, 0.030490528106689452, 0.030239231109619142, 0.030231103897094727, 0.030255456924438477, 0.030265344619750976, 0.030054399490356445, 0.0301345272064209, 0.029968128204345704, 0.03006572723388672, 0.03005961608886719, 0.030144128799438476, 0.02999728012084961, 0.03010326385498047, 0.029964448928833008, 0.02992153549194336, 0.03009129524230957, 0.029936479568481444, 0.030077632904052735, 0.03009328079223633, 0.03012643241882324, 0.030188735961914064, 0.03011568069458008, 0.03008118438720703, 0.03010643196105957, 0.03009449577331543, 0.030178144454956056, 0.0305511360168457, 0.030511199951171877, 0.030179712295532228, 0.030214784622192382, 0.02997228813171387, 0.030165216445922852, 0.030252832412719727, 0.03022751998901367, 0.030075263977050782, 0.030050880432128908, 0.0301441593170166, 0.02998838424682617, 0.029920320510864257, 0.029892351150512694, 0.02996633529663086, 0.029919008255004882, 0.0299616641998291, 0.030025632858276367, 0.029976703643798827, 0.029917951583862304, 0.030025056838989258, 0.02999158477783203, 0.02988044738769531, 0.029962112426757812, 0.029913087844848633, 0.030257152557373046, 0.030265344619750976, 0.030205472946166993, 0.030019424438476563, 0.029917823791503907, 0.029997055053710937, 0.030013696670532226, 0.030207359313964843, 0.03041433525085449, 0.03020070457458496, 0.03022643280029297, 0.030040063858032227, 0.030106943130493165, 0.03041462326049805, 0.029991680145263672, 0.03029216003417969, 0.03011564826965332, 0.03017728042602539, 0.030205120086669923, 0.029963071823120118, 0.02990224075317383, 0.03007753562927246, 0.029941024780273436, 0.029948640823364257, 0.029865503311157227, 0.029970720291137697, 0.030013824462890627, 0.029921087265014648, 0.029952064514160156, 0.030139455795288084, 0.030173696517944337, 0.03027574348449707, 0.030426464080810546, 0.030274431228637697, 0.0301711368560791, 0.031676416397094724, 0.03018342399597168, 0.030316064834594727, 0.030189855575561524, 0.030350591659545897, 0.030860223770141602, 0.03097769546508789, 0.030459552764892577, 0.03036435127258301, 0.03044534492492676, 0.03024835205078125, 0.030792160034179686, 0.03036716842651367, 0.030660831451416015, 0.03033977508544922, 0.030298112869262695, 0.03018956756591797, 0.030201120376586912, 0.03011043167114258, 0.03022038459777832, 0.030125152587890624, 0.030226848602294923, 0.030097183227539064, 0.030145151138305664, 0.030138368606567382, 0.030134048461914063, 0.030137792587280273, 0.030349184036254882, 0.030440319061279298, 0.030355072021484374, 0.03061596870422363, 0.030369951248168946, 0.03025494384765625, 0.030191200256347656, 0.030301984786987306, 0.03035785675048828, 0.030536096572875978, 0.030300031661987303, 0.030584735870361326, 0.030763328552246092, 0.030374975204467775, 0.030423072814941405, 0.029736127853393555, 0.03026419258117676, 0.030341119766235353, 0.030621696472167968, 0.03053152084350586, 0.030357471466064455, 0.030085216522216796, 0.03008412742614746, 0.030548095703125, 0.03019219207763672, 0.02992291259765625, 0.0301712646484375, 0.03016352081298828, 0.030248416900634765, 0.030374431610107423, 0.03040239906311035, 0.03004431915283203, 0.030186912536621095, 0.030079231262207032, 0.0306375675201416, 0.03003887939453125, 0.03015475273132324, 0.030148479461669923, 0.030091392517089845, 0.02997452735900879, 0.030085119247436523, 0.02981068801879883, 0.030288896560668944, 0.03029916763305664, 0.03098591995239258, 0.030679487228393556, 0.030260480880737305, 0.03024342346191406, 0.030113887786865235, 0.030426847457885743, 0.030380159378051757, 0.030093376159667968, 0.030265344619750976, 0.030299936294555664, 0.030013120651245118, 0.03004265594482422, 0.029896703720092774, 0.02983670425415039, 0.029809247970581054, 0.0298374080657959, 0.030077888488769532, 0.02981177520751953, 0.029896608352661135, 0.029833215713500977, 0.02986524772644043, 0.02987104034423828, 0.029894432067871093, 0.030618911743164064, 0.03009529685974121, 0.030036991119384765, 0.03009040069580078, 0.030106208801269532, 0.03011177635192871, 0.03000934410095215, 0.030107776641845704, 0.03005753517150879, 0.030286079406738282, 0.030576288223266603, 0.032955390930175785, 0.031744415283203126, 0.03067750358581543, 0.03377731323242188, 0.030656383514404296, 0.03004419136047363, 0.030082719802856445, 0.030218656539916993, 0.03008355140686035, 0.029921279907226563, 0.030056512832641602, 0.0299355525970459, 0.03003129577636719, 0.03008176040649414, 0.030025632858276367, 0.029961408615112303, 0.029938175201416017, 0.02992348861694336, 0.029845407485961914, 0.0298984317779541, 0.02985366439819336, 0.02999977684020996, 0.029945728302001953, 0.030742528915405274, 0.030259071350097658, 0.030148128509521484, 0.030210655212402345, 0.03021824073791504, 0.029995008468627928, 0.03025436782836914, 0.030491199493408203, 0.030464160919189454, 0.030015296936035156, 0.03021843147277832, 0.029931072235107423, 0.03034566307067871, 0.02983888053894043, 0.029872608184814454, 0.02984102439880371, 0.02977609634399414, 0.029747360229492186, 0.030039968490600585, 0.03007279968261719, 0.029822656631469727, 0.02981865692138672, 0.02994451141357422, 0.029915103912353514, 0.029871423721313475, 0.029825183868408205, 0.029850143432617188, 0.029808256149291994, 0.02991756820678711, 0.029816543579101563, 0.029716096878051757, 0.030390687942504883, 0.030486015319824217, 0.029988639831542967, 0.030011680603027342, 0.02994041633605957, 0.030037343978881834, 0.030097728729248048, 0.029929536819458008, 0.030001440048217774, 0.030283424377441408, 0.03038038444519043, 0.03046601676940918, 0.030438432693481444, 0.030213119506835938, 0.030069759368896484, 0.030041088104248048, 0.030042112350463866, 0.030109695434570313, 0.030093311309814453, 0.03014041519165039, 0.031122976303100586, 0.03229334259033203, 0.030728479385375977, 0.030489696502685546, 0.030647968292236327, 0.03043984031677246, 0.030445375442504884, 0.030370367050170897, 0.03028937530517578, 0.030357919692993163, 0.0302728328704834, 0.03037696075439453, 0.03041689682006836, 0.030215648651123046, 0.030328351974487303, 0.03036073684692383, 0.030345087051391603, 0.030279712677001955, 0.030384063720703125, 0.030340383529663086, 0.030378047943115234, 0.030745248794555664, 0.03062380790710449, 0.030283424377441408, 0.0305435848236084, 0.030457984924316405, 0.03059548759460449, 0.030297887802124022, 0.03028188705444336, 0.03041084861755371, 0.0301627197265625, 0.03027699279785156, 0.030401376724243163, 0.030332223892211914, 0.030679744720458986, 0.03062950325012207, 0.0304019832611084, 0.03128211212158203, 0.0305513916015625, 0.03042108726501465, 0.030261823654174805, 0.030341215133666992, 0.030453664779663086, 0.030283103942871092, 0.030575263977050782, 0.030310400009155275, 0.0302523193359375, 0.030410560607910156, 0.03030108833312988, 0.030572511672973632, 0.030363679885864258, 0.03477667236328125, 0.029952096939086913, 0.030451711654663087, 0.03043436813354492, 0.030812255859375, 0.030379776000976563, 0.030318368911743165, 0.030148832321166993, 0.030201536178588867, 0.030091167449951172, 0.030214303970336913, 0.03008742332458496, 0.030014720916748047, 0.030069568634033202, 0.030029760360717774, 0.03040460777282715, 0.03043436813354492, 0.030251968383789064, 0.030301727294921876, 0.030126560211181642, 0.030048255920410157, 0.030084224700927736, 0.029883264541625976, 0.029970432281494142, 0.02994380760192871, 0.03030191993713379, 0.03001372718811035, 0.030011392593383788, 0.030019584655761718, 0.029859840393066408, 0.030032127380371094, 0.030023679733276368, 0.02991798400878906, 0.029875167846679686, 0.02993324851989746, 0.03025542449951172, 0.030449663162231445, 0.02998476791381836, 0.030296064376831053, 0.03026697540283203, 0.029915552139282226, 0.02993971252441406, 0.029957408905029296, 0.029929952621459963, 0.02988876724243164, 0.03002566337585449, 0.02992902374267578, 0.029960704803466798, 0.029920927047729494, 0.029884960174560545, 0.03012588882446289, 0.030078975677490235, 0.030066719055175783, 0.029941247940063476, 0.02989104080200195, 0.029875200271606447, 0.02996486473083496, 0.029929664611816405, 0.030200063705444338, 0.029955808639526366, 0.030011743545532227, 0.02997228813171387, 0.030047903060913084, 0.0301746883392334]",tokens/s,33.00223021864761,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,1929.732096,1065.222144,0.0,662.700032,622.833664,s,1,8.8538447265625,8.8538447265625,0.0,8.8538447265625,8.8538447265625,8.8538447265625,8.8538447265625,[8.8538447265625],,kWh,5.428605320829168e-05,5.980901350611859e-06,1.736779167199609e-05,7.763474623089963e-05,,MB,1986.322432,1188.954112,0.0,765.46048,733.871104,s,10,0.6210003852844238,0.06210003852844238,0.00027209658479981176,0.062149072647094725,0.062435297012329104,0.06245847988128662,0.062477026176452635,"[0.06189369583129883, 0.06184771347045898, 0.061527744293212894, 0.06221353530883789, 0.06248166275024414, 0.06221311950683594, 0.062430145263671875, 0.062254623413085936, 0.062053119659423825, 0.06208502578735352]",tokens/s,4122.380695186681,kWh,1.8376646032762545e-06,2.0266298682417727e-07,8.013301309307629e-07,2.8416577210311946e-06,tokens/kWh,90088260.13961367,MB,1986.94912,1201.537024,0.0,775.94624,748.240384,s,10,38.209515869140624,3.8209515869140622,0.02542276615628889,3.822981201171875,3.8526896484375,3.855232763671875,3.8572672558593752,"[3.77501708984375, 3.7844375, 3.8304482421875, 3.8169599609375, 3.840097900390625, 3.85212451171875, 3.85777587890625, 3.82496484375, 3.82099755859375, 3.8066923828125]",tokens/s,16.488039318729253,kWh,0.00011268361034463734,1.2429188304850034e-05,3.968300781206678e-05,0.00016479580646155414,tokens/kWh,382291.2812693296,,s,630,38.20391799926764,0.060641139681377104,0.0008353711782173449,0.06051820755004883,0.06138549842834473,0.06178992519378662,0.06363191326141358,"[0.05950041580200195, 0.06007139205932617, 0.05979212951660156, 0.05991443252563477, 0.05989577484130859, 0.05988336181640625, 0.059694976806640626, 0.06007596969604492, 0.06013132858276367, 0.05963999938964844, 0.05967043304443359, 0.05993072128295898, 0.06106220626831055, 0.06009254455566406, 0.059964576721191404, 0.05999785614013672, 0.059850975036621096, 0.05988534545898438, 0.05962319946289062, 0.060063968658447264, 0.06046105575561524, 0.05988556671142578, 0.0598364143371582, 0.05966995239257813, 0.05988764953613281, 0.060142112731933595, 0.05968838500976562, 0.05993926239013672, 0.05986892700195313, 0.06080995178222656, 0.0600266227722168, 0.059912094116210936, 0.059807743072509766, 0.05982608032226563, 0.05998355102539062, 0.05985526275634766, 0.059774974822998046, 0.05992179107666016, 0.059980255126953125, 0.059814048767089845, 0.05965820693969726, 0.06025235366821289, 0.059719615936279294, 0.059821983337402344, 0.05955564880371094, 0.05996563339233398, 0.06011695861816406, 0.061249568939208986, 0.06034028625488281, 0.059721664428710936, 0.05972195053100586, 0.05961500930786133, 0.05949276733398438, 0.05983395385742187, 0.059695262908935544, 0.059561214447021484, 0.05947452926635742, 0.05950259017944336, 0.05986643218994141, 0.059554496765136716, 0.05964992141723633, 0.05986931228637695, 0.06033414459228516, 0.060250400543212894, 0.060087711334228515, 0.05993913650512695, 0.059666431427001954, 0.05976166534423828, 0.05983974456787109, 0.0596228141784668, 0.059676673889160155, 0.05933808135986328, 0.05934912109375, 0.05957926559448242, 0.06002604675292969, 0.05973619079589844, 0.0608590087890625, 0.060126304626464844, 0.059632606506347656, 0.05965414428710938, 0.060575008392333984, 0.05972566223144531, 0.059835262298583984, 0.05950259017944336, 0.059753822326660155, 0.05971011352539062, 0.0595682258605957, 0.06443011474609375, 0.05952902221679687, 0.05971152114868164, 0.059895839691162106, 0.05974774551391602, 0.059517406463623045, 0.05950064086914063, 0.0597088623046875, 0.05981779098510742, 0.05974095916748047, 0.060477054595947266, 0.060469825744628905, 0.059832321166992185, 0.059735870361328124, 0.06546451568603516, 0.0617790412902832, 0.06025878524780273, 0.05984899139404297, 0.05966435241699219, 0.06000761413574219, 0.06023187255859375, 0.06211199951171875, 0.059743968963623044, 0.05958127975463867, 0.059696384429931644, 0.05962771224975586, 0.05978572845458984, 0.060116001129150394, 0.060359519958496095, 0.05971510314941406, 0.05994156646728516, 0.05996908950805664, 0.059805408477783206, 0.05965670394897461, 0.05961318588256836, 0.0596357421875, 0.05970723342895508, 0.05997580718994141, 0.059701248168945314, 0.0590964469909668, 0.060330078125, 0.059959487915039064, 0.06087027359008789, 0.06066803359985352, 0.06099824142456055, 0.060368896484375, 0.06054912185668945, 0.06014976119995117, 0.0600513916015625, 0.06011846542358398, 0.06023263931274414, 0.06428771209716796, 0.060969470977783206, 0.06072691345214844, 0.060414016723632814, 0.060537376403808595, 0.060655166625976566, 0.060682369232177735, 0.06068870544433594, 0.06085868835449219, 0.06209049606323242, 0.060435134887695315, 0.0603583984375, 0.06039068984985352, 0.060343360900878905, 0.06046796798706055, 0.060322559356689454, 0.060408287048339844, 0.06040707015991211, 0.06925526428222656, 0.060690689086914065, 0.06048979187011719, 0.060352512359619144, 0.060418048858642576, 0.06062099075317383, 0.06095443344116211, 0.061375873565673825, 0.06175398254394531, 0.061868030548095705, 0.06129663848876953, 0.06131916809082031, 0.06078073501586914, 0.06059926223754883, 0.06030543899536133, 0.06014585494995117, 0.06056537628173828, 0.060590686798095705, 0.060292896270751954, 0.06036640167236328, 0.06035948944091797, 0.060469249725341796, 0.060407455444335935, 0.06046262359619141, 0.06136124801635742, 0.06067987060546875, 0.06082988739013672, 0.06068204879760742, 0.060631072998046875, 0.06076416015625, 0.06032179260253906, 0.060276737213134764, 0.06020505523681641, 0.05990399932861328, 0.06011862564086914, 0.06042051315307617, 0.06077584075927735, 0.06069705581665039, 0.06058918380737305, 0.06069548797607422, 0.060448833465576175, 0.0603504638671875, 0.06117987060546875, 0.06043033599853516, 0.06025423812866211, 0.0602534065246582, 0.06102265548706055, 0.06035491180419922, 0.0612287368774414, 0.060956127166748045, 0.06096368026733399, 0.06048796844482422, 0.06092156982421875, 0.06212396621704101, 0.06076422500610352, 0.06042828750610352, 0.06081740951538086, 0.06025830459594726, 0.061134239196777344, 0.06076278305053711, 0.0604015998840332, 0.06023510360717774, 0.06025283050537109, 0.060055809020996095, 0.06036608123779297, 0.060297569274902346, 0.06070083236694336, 0.06016614532470703, 0.06054905700683594, 0.0604420166015625, 0.06054291152954101, 0.06045766448974609, 0.061007904052734374, 0.06062080001831055, 0.060284191131591794, 0.060969696044921876, 0.061159423828125, 0.060194816589355465, 0.06040396881103516, 0.060251903533935544, 0.06046870422363281, 0.06045779037475586, 0.06066556930541992, 0.06115532684326172, 0.06040902328491211, 0.060515262603759765, 0.06074915313720703, 0.060383777618408206, 0.06047737503051758, 0.06071507263183594, 0.06057107162475586, 0.06032457733154297, 0.06091350555419922, 0.060393470764160156, 0.060356510162353515, 0.06015804672241211, 0.06041481781005859, 0.06117465591430664, 0.06104975891113281, 0.06070470428466797, 0.060878273010253905, 0.06060825729370117, 0.060647518157958984, 0.06141535949707031, 0.061188255310058594, 0.061456993103027345, 0.06070684814453125, 0.06266873550415039, 0.06291286468505859, 0.06200700759887695, 0.061311134338378905, 0.06096249771118164, 0.060676254272460935, 0.061117633819580075, 0.0605601921081543, 0.06307567977905273, 0.06058665466308594, 0.0608851203918457, 0.06070259094238281, 0.06077590560913086, 0.06092854309082031, 0.06059193420410156, 0.060260704040527344, 0.061058494567871095, 0.06172835159301758, 0.06104556655883789, 0.0611146240234375, 0.0612525749206543, 0.06060268783569336, 0.060309982299804686, 0.06071091079711914, 0.06085254287719727, 0.060520160675048826, 0.06048934555053711, 0.06043222427368164, 0.0602149772644043, 0.06104966354370117, 0.06012723159790039, 0.06019071960449219, 0.06021529769897461, 0.06051772689819336, 0.06147139358520508, 0.060649120330810546, 0.06077679824829101, 0.0630786247253418, 0.06079414367675781, 0.06158927917480469, 0.06012112045288086, 0.06041820907592774, 0.06054147338867188, 0.06076403045654297, 0.06070899200439453, 0.06077417755126953, 0.06127990341186523, 0.06085065460205078, 0.06090966415405273, 0.06096694564819336, 0.060663585662841794, 0.06046915054321289, 0.06025558471679687, 0.06033235168457031, 0.06258748626708985, 0.060747039794921874, 0.060759777069091796, 0.060569793701171874, 0.06042486572265625, 0.06103148651123047, 0.061389759063720704, 0.061472766876220705, 0.061298591613769535, 0.06140326309204101, 0.06170825576782227, 0.06547660827636718, 0.06145552062988281, 0.06129238510131836, 0.061217601776123044, 0.06150572967529297, 0.06120000076293945, 0.06159193420410156, 0.06112051010131836, 0.06101401519775391, 0.061400318145751955, 0.06129705429077149, 0.0615316162109375, 0.06233996963500977, 0.06058003234863281, 0.06093190383911133, 0.06084985733032226, 0.06057984161376953, 0.06096736145019531, 0.060841182708740234, 0.06077711868286133, 0.06076006317138672, 0.061099807739257814, 0.06103411102294922, 0.060704544067382814, 0.060904254913330076, 0.060967105865478516, 0.0611162223815918, 0.061439262390136716, 0.06125363159179688, 0.06177865600585938, 0.06095478439331055, 0.060890975952148436, 0.06103244781494141, 0.06106732940673828, 0.06056313705444336, 0.06043862533569336, 0.06061686325073242, 0.06042595291137695, 0.06059030532836914, 0.06084979248046875, 0.060268672943115234, 0.06069180679321289, 0.06074057769775391, 0.06178406524658203, 0.06092915344238281, 0.0614901123046875, 0.060854209899902346, 0.061378623962402346, 0.061333633422851565, 0.061611774444580075, 0.0602828483581543, 0.06074524688720703, 0.06131558227539063, 0.06093580627441406, 0.061430145263671875, 0.060856510162353515, 0.06074310302734375, 0.060950592041015626, 0.06110870361328125, 0.06087776184082031, 0.06122751998901367, 0.06179471969604492, 0.060631038665771485, 0.06071920013427735, 0.060895233154296874, 0.060908897399902344, 0.06070534515380859, 0.060674049377441405, 0.06071295928955078, 0.06074163055419922, 0.06133760070800781, 0.061472991943359374, 0.06103017425537109, 0.061306526184082034, 0.06193612670898437, 0.06128214263916015, 0.06132643127441406, 0.06141952133178711, 0.06097603225708008, 0.060985343933105465, 0.06106316757202149, 0.06123929595947265, 0.061288478851318356, 0.06094649505615234, 0.060712863922119144, 0.06087478256225586, 0.060641246795654295, 0.06107353591918945, 0.06122646331787109, 0.0609747200012207, 0.06129500961303711, 0.06128678512573242, 0.061196609497070314, 0.06149292755126953, 0.060959007263183596, 0.06056313705444336, 0.06223875045776367, 0.06348339080810547, 0.06255996704101563, 0.06125344085693359, 0.061172126770019535, 0.061332321166992186, 0.060907230377197266, 0.061385025024414064, 0.061257408142089846, 0.06613811492919922, 0.06144204711914063, 0.061396766662597656, 0.0611940803527832, 0.06121011352539062, 0.06093449783325195, 0.06061724853515625, 0.06050643157958984, 0.059827808380126954, 0.06015564727783203, 0.06027536010742188, 0.06003472137451172, 0.06008063888549805, 0.05997868728637695, 0.06026655960083008, 0.060425056457519534, 0.06023168182373047, 0.06059542465209961, 0.060846561431884764, 0.06084403228759765, 0.06056537628173828, 0.06118880081176758, 0.06125241470336914, 0.061397953033447264, 0.061279712677001955, 0.06148764801025391, 0.06096691131591797, 0.06131232070922851, 0.06369257736206055, 0.06114944076538086, 0.06078531265258789, 0.06052969741821289, 0.060695518493652345, 0.06118787384033203, 0.06104921722412109, 0.06069868850708008, 0.06084790420532227, 0.060491519927978514, 0.06037324905395508, 0.06032524871826172, 0.06046815872192383, 0.06026435089111328, 0.05999785614013672, 0.06033216094970703, 0.06026031875610351, 0.06033615875244141, 0.060353569030761715, 0.06249887847900391, 0.06054083251953125, 0.06045183944702148, 0.06066339111328125, 0.061868736267089844, 0.060727008819580076, 0.06042214584350586, 0.06052044677734375, 0.06095872116088867, 0.060467201232910155, 0.06052249526977539, 0.06099148941040039, 0.06116556930541992, 0.06062723159790039, 0.06033071899414062, 0.06027299118041992, 0.06028559875488281, 0.06017843246459961, 0.06025945663452149, 0.060982143402099606, 0.060737216949462894, 0.0605676155090332, 0.060724830627441405, 0.06077718353271484, 0.06018867111206055, 0.060639041900634766, 0.06047897720336914, 0.060076416015625, 0.06016748809814453, 0.06012198257446289, 0.05982854461669922, 0.06042639923095703, 0.06018620681762695, 0.06039344024658203, 0.06035014343261719, 0.060733089447021486, 0.061032737731933594, 0.060719295501708986, 0.0605186882019043, 0.0603054084777832, 0.06098470306396484, 0.060538654327392576, 0.0600709114074707, 0.06031753540039062, 0.06009151840209961, 0.060341121673583985, 0.0607457275390625, 0.06043270492553711, 0.06045888137817383, 0.060206558227539064, 0.06013167953491211, 0.06046003341674805, 0.061925537109375, 0.061956798553466794, 0.060672126770019534, 0.060819393157958986, 0.06052463912963867, 0.060622142791748046, 0.06068703842163086, 0.061120223999023435, 0.06098076629638672, 0.06111616134643555, 0.061326625823974606, 0.060988449096679685, 0.060854976654052734, 0.06028902435302735, 0.060468830108642575, 0.06116806411743164, 0.061274078369140624, 0.06130588912963867, 0.06211491012573242, 0.06158937454223633, 0.06108979034423828, 0.061087745666503906, 0.06140108871459961, 0.06127584075927734, 0.06119222259521485, 0.06039334487915039, 0.059916030883789065, 0.06009632110595703, 0.06003535842895508, 0.060464927673339844, 0.059967361450195315, 0.060148288726806644, 0.060109375, 0.06013520050048828, 0.06037513732910156, 0.05951356887817383, 0.06008940887451172, 0.06016460800170898, 0.060467647552490233, 0.06229196929931641, 0.06024979019165039, 0.059908096313476565, 0.06025247955322265, 0.060020030975341795, 0.06016889572143555, 0.05997158432006836, 0.060098209381103516, 0.06001913452148438, 0.06020518493652344, 0.060192543029785155, 0.060280414581298826, 0.06029660797119141, 0.060289825439453125, 0.05997795104980469, 0.062058494567871096, 0.06294876861572266, 0.060832286834716795, 0.06049971389770508, 0.060246337890625, 0.060165153503417966, 0.06040879821777344, 0.06055049514770508, 0.060384929656982424, 0.06032806396484375, 0.0602789421081543, 0.060017696380615236, 0.060432064056396485, 0.060157054901123046, 0.060715232849121094, 0.06054761505126953, 0.06037263870239258, 0.060557697296142576, 0.06032144165039063, 0.06015436935424805, 0.06007295989990234, 0.06009689712524414, 0.060061599731445314, 0.06014838409423828, 0.06046739196777344, 0.06039532852172851, 0.060423999786376956, 0.06042214584350586, 0.06048316955566406, 0.060211807250976565, 0.060631038665771485, 0.060544513702392576, 0.06047795104980469, 0.0603770866394043, 0.060281150817871096, 0.06017331314086914, 0.060168094635009765, 0.060044158935546876, 0.06231849670410156, 0.06043670272827149, 0.06038460922241211, 0.060350849151611326, 0.06032595062255859, 0.06042214584350586]",tokens/s,16.490455246293795,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,1135.828992,717.094912,0.0,314.5728,299.62752,s,1,8.04080224609375,8.04080224609375,0.0,8.04080224609375,8.04080224609375,8.04080224609375,8.04080224609375,[8.04080224609375],,kWh,2.5225621108393172e-05,2.773670688345568e-06,8.583062421990695e-06,3.658235421872943e-05,,MB,1384.206336,826.146816,0.0,402.653184,359.612416,s,10,0.3155899543762207,0.03155899543762208,0.0003328186298239102,0.031408672332763676,0.03182567749023437,0.032156647491455075,0.032421423492431635,"[0.03248761749267578, 0.031503040313720705, 0.03142707252502441, 0.031377439498901365, 0.03158409690856934, 0.03132758331298828, 0.031351327896118164, 0.03139027214050293, 0.03138937568664551, 0.031752128601074216]",tokens/s,8111.791787099077,kWh,9.238278342431859e-07,1.018816951787367e-07,3.808028714494225e-07,1.4065124008713452e-06,tokens/kWh,182010481.98466367,MB,1422.876672,851.31264,0.0,425.721856,360.53248,s,10,19.227000244140626,1.9227000244140626,0.012302689566814492,1.9184745483398438,1.9325067749023437,1.9438855895996092,1.9529886413574218,"[1.955264404296875, 1.91815966796875, 1.9299781494140624, 1.9243385009765626, 1.91556640625, 1.9187894287109375, 1.915211669921875, 1.9137991943359376, 1.92601416015625, 1.909878662109375]",tokens/s,32.76642180269336,kWh,5.6099851865340643e-05,6.187504682553337e-06,1.9540740843549125e-05,8.18280973914431e-05,tokens/kWh,769906.6947460031,,s,630,19.222075931549092,0.03051123163737948,0.0005538295083693536,0.030357359886169433,0.030919724082946777,0.0313039719581604,0.03190252988815308,"[0.031178976058959963, 0.03168329620361328, 0.03186879920959473, 0.03171939277648926, 0.031347871780395505, 0.031255104064941405, 0.031071935653686523, 0.031406848907470704, 0.03130572891235352, 0.031133216857910158, 0.031195615768432616, 0.03145840072631836, 0.031886240005493165, 0.03131427192687988, 0.03164944076538086, 0.03163750457763672, 0.031583744049072264, 0.03161071968078613, 0.0318002872467041, 0.03153068733215332, 0.031809759140014646, 0.03128809547424317, 0.031425535202026365, 0.031307775497436525, 0.0312542724609375, 0.0312968635559082, 0.031301824569702145, 0.031271839141845705, 0.03127884864807129, 0.031148288726806642, 0.03129120063781738, 0.03174531173706055, 0.031189632415771485, 0.031121503829956054, 0.030907487869262694, 0.030632863998413085, 0.03048031997680664, 0.03047126388549805, 0.03050748825073242, 0.030463520050048827, 0.030481279373168944, 0.030541568756103515, 0.03093948745727539, 0.030657663345336914, 0.030429344177246093, 0.03045248031616211, 0.030503904342651367, 0.030647552490234375, 0.03039206314086914, 0.03047158432006836, 0.030593631744384765, 0.030530656814575195, 0.030417823791503908, 0.030371200561523436, 0.030628511428833008, 0.030486495971679687, 0.030699520111083983, 0.03080726432800293, 0.030569568634033203, 0.03053536033630371, 0.030515199661254884, 0.030514335632324217, 0.030747488021850587, 0.03010304069519043, 0.030439487457275392, 0.03035318374633789, 0.030354080200195314, 0.030257343292236328, 0.03031449508666992, 0.030345216751098632, 0.0302902717590332, 0.030313600540161134, 0.03028022384643555, 0.030236671447753907, 0.030875648498535156, 0.03041001510620117, 0.030718944549560548, 0.030381919860839844, 0.030498559951782227, 0.031213983535766602, 0.030323776245117187, 0.03033718490600586, 0.030534175872802733, 0.030354944229125977, 0.030394239425659178, 0.03038060760498047, 0.030521600723266602, 0.030337087631225584, 0.030301952362060548, 0.030331968307495117, 0.030580831527709962, 0.03020502471923828, 0.03043712043762207, 0.030597312927246094, 0.03054316711425781, 0.030466560363769532, 0.03056550407409668, 0.030360448837280275, 0.030287647247314455, 0.030559648513793947, 0.03031942367553711, 0.03074662399291992, 0.030517536163330077, 0.03037980842590332, 0.03032444763183594, 0.030320064544677734, 0.030408992767333984, 0.030975744247436522, 0.03042585563659668, 0.030470239639282228, 0.030338048934936523, 0.03069011116027832, 0.030490720748901367, 0.03029132843017578, 0.03055241584777832, 0.030390880584716798, 0.030355136871337892, 0.03031884765625, 0.03032156753540039, 0.03073427200317383, 0.030515743255615235, 0.030318368911743165, 0.03047279930114746, 0.03037001609802246, 0.03036297607421875, 0.030431583404541017, 0.031909183502197264, 0.030479103088378905, 0.030291967391967774, 0.03050489616394043, 0.030597152709960936, 0.03053926467895508, 0.030394912719726563, 0.03059712028503418, 0.03101046371459961, 0.030798175811767577, 0.030381792068481444, 0.03048854446411133, 0.03033900833129883, 0.03127731132507324, 0.030500064849853514, 0.03036476707458496, 0.030459808349609374, 0.030724000930786134, 0.030539295196533204, 0.03038265609741211, 0.03098320007324219, 0.030422208786010742, 0.030293535232543946, 0.03028326416015625, 0.030323551177978515, 0.030286720275878906, 0.030241119384765626, 0.03032124710083008, 0.030755903244018553, 0.03071843147277832, 0.03031292724609375, 0.030509376525878908, 0.03037151908874512, 0.035595680236816404, 0.03470214462280274, 0.03047999954223633, 0.03034569549560547, 0.030293312072753906, 0.030278112411499022, 0.030289823532104493, 0.030281824111938478, 0.03018332862854004, 0.03021788787841797, 0.030326847076416016, 0.030459808349609374, 0.030247039794921875, 0.030738687515258788, 0.030344512939453124, 0.030284479141235353, 0.030361024856567383, 0.030282623291015626, 0.03033692741394043, 0.030263296127319338, 0.030474016189575195, 0.03049395179748535, 0.03024086380004883, 0.030337152481079103, 0.030296607971191405, 0.030678016662597656, 0.03070857620239258, 0.030636512756347656, 0.03052128028869629, 0.030379615783691406, 0.03023232078552246, 0.031857471466064456, 0.031018688201904298, 0.03725270462036133, 0.03183657646179199, 0.030635616302490235, 0.030767520904541015, 0.030554304122924803, 0.03049388885498047, 0.030343807220458985, 0.03027667236328125, 0.03023587226867676, 0.030176191329956054, 0.0302968635559082, 0.030356607437133788, 0.030894975662231445, 0.030487615585327147, 0.030413759231567382, 0.03073811149597168, 0.030283935546875, 0.03024483108520508, 0.03044406318664551, 0.030357152938842773, 0.030279680252075194, 0.030268863677978517, 0.030204479217529296, 0.030465471267700196, 0.030241632461547853, 0.030503679275512695, 0.030538688659667967, 0.03026688003540039, 0.03022287940979004, 0.030343103408813476, 0.030294143676757812, 0.030443456649780272, 0.030732288360595703, 0.030296480178833008, 0.030340703964233398, 0.03029408073425293, 0.03030419158935547, 0.03020128059387207, 0.03019830322265625, 0.03015273666381836, 0.03019990348815918, 0.030205503463745117, 0.030305984497070313, 0.030243776321411134, 0.030236543655395506, 0.03068886375427246, 0.03034137535095215, 0.030362655639648437, 0.0302728328704834, 0.03020150375366211, 0.03033497619628906, 0.030617664337158203, 0.03031785583496094, 0.03035366439819336, 0.03035366439819336, 0.03030985641479492, 0.03034796714782715, 0.03036288070678711, 0.03047500801086426, 0.030504320144653322, 0.030346271514892578, 0.03057468795776367, 0.030351615905761718, 0.030445375442504884, 0.030414880752563475, 0.030274335861206054, 0.03029520034790039, 0.03028054428100586, 0.03040870475769043, 0.030296287536621093, 0.030269216537475586, 0.031042943954467772, 0.03143948745727539, 0.030560480117797852, 0.030491552352905273, 0.030591232299804687, 0.030396448135375977, 0.030722143173217774, 0.030408256530761717, 0.030360511779785156, 0.03045577621459961, 0.030222496032714843, 0.03030726432800293, 0.03047724723815918, 0.030372928619384766, 0.030269472122192383, 0.03034204864501953, 0.030339359283447265, 0.03032444763183594, 0.030255104064941408, 0.03027078437805176, 0.030404863357543947, 0.03035795211791992, 0.030408031463623048, 0.03054863929748535, 0.03032678413391113, 0.03030124855041504, 0.03047420883178711, 0.03018047904968262, 0.03047612762451172, 0.03031449508666992, 0.030523391723632814, 0.030479423522949217, 0.030386783599853515, 0.030226816177368165, 0.030128288269042968, 0.0302827205657959, 0.030134336471557617, 0.03104207992553711, 0.03071820831298828, 0.030486495971679687, 0.03043916893005371, 0.03046224021911621, 0.03037401580810547, 0.030238592147827148, 0.030204256057739257, 0.030344863891601563, 0.03017308807373047, 0.03016326332092285, 0.030192480087280274, 0.03014956855773926, 0.03020697593688965, 0.03031318473815918, 0.030364479064941406, 0.030289920806884765, 0.030119487762451172, 0.030267839431762696, 0.030466047286987305, 0.030894079208374024, 0.030537727355957032, 0.03049203109741211, 0.030368383407592774, 0.03150643157958984, 0.030485792160034178, 0.03028860855102539, 0.03028976058959961, 0.030240928649902344, 0.030961280822753907, 0.030316320419311524, 0.030286272048950194, 0.030263456344604492, 0.030209728240966797, 0.030267711639404296, 0.0307488956451416, 0.030841888427734374, 0.030487295150756835, 0.030225824356079102, 0.03025161552429199, 0.030437376022338865, 0.030274784088134766, 0.03039516830444336, 0.030191936492919923, 0.03073356819152832, 0.030321088790893555, 0.030365152359008787, 0.03029462432861328, 0.03039151954650879, 0.03020675277709961, 0.03030575942993164, 0.03025494384765625, 0.030357376098632812, 0.03032713508605957, 0.03073206329345703, 0.03082464027404785, 0.030930784225463866, 0.030868064880371093, 0.03086726379394531, 0.030625823974609376, 0.030658720016479492, 0.030660608291625976, 0.030387392044067384, 0.030217023849487306, 0.030241024017333983, 0.03034511947631836, 0.030748512268066405, 0.030284832000732422, 0.030744863510131837, 0.030388927459716795, 0.03054182434082031, 0.03023468780517578, 0.030412736892700194, 0.030217632293701172, 0.03029462432861328, 0.030357503890991212, 0.03017932891845703, 0.030223840713500975, 0.03018956756591797, 0.030278783798217773, 0.030268287658691405, 0.030309375762939454, 0.030314720153808594, 0.030235424041748046, 0.03061759948730469, 0.030198944091796874, 0.03022729682922363, 0.030424863815307616, 0.030267072677612306, 0.03028646469116211, 0.030201759338378906, 0.03023052787780762, 0.030256992340087892, 0.030316320419311524, 0.030707616806030274, 0.030200288772583007, 0.030216512680053712, 0.030265024185180664, 0.03018342399597168, 0.030249280929565428, 0.03058857536315918, 0.030457664489746093, 0.03187084770202637, 0.030455711364746094, 0.030931072235107424, 0.030326271057128908, 0.030222400665283203, 0.03044633674621582, 0.03024403190612793, 0.030237503051757812, 0.030338495254516602, 0.030198463439941405, 0.030299840927124025, 0.030344671249389648, 0.03043152046203613, 0.030236352920532228, 0.03117647933959961, 0.030372831344604494, 0.030265695571899415, 0.030281375885009766, 0.03035273551940918, 0.030296575546264647, 0.030404767990112304, 0.03020627212524414, 0.030387903213500978, 0.03021174430847168, 0.030322528839111327, 0.0306080322265625, 0.03063382339477539, 0.030556415557861327, 0.03052694320678711, 0.030308832168579103, 0.03037164878845215, 0.030609439849853516, 0.030595104217529298, 0.030340320587158204, 0.030339807510375977, 0.030451711654663087, 0.030533344268798827, 0.03025574493408203, 0.030252544403076172, 0.030029855728149413, 0.030375904083251953, 0.030224384307861327, 0.030287872314453124, 0.030242816925048828, 0.03028713607788086, 0.030356128692626952, 0.03027769660949707, 0.030426464080810546, 0.030237344741821288, 0.03030841636657715, 0.030400543212890624, 0.030219743728637696, 0.030443967819213866, 0.03038572883605957, 0.03028348731994629, 0.03038105583190918, 0.030612319946289063, 0.030303359985351563, 0.030294784545898436, 0.030351808547973633, 0.030369535446166992, 0.0302476806640625, 0.030193536758422852, 0.030359743118286132, 0.030785535812377928, 0.030500288009643554, 0.030313024520874022, 0.030529855728149414, 0.030193344116210937, 0.030246912002563478, 0.03031865692138672, 0.030404895782470704, 0.030338720321655275, 0.03040460777282715, 0.030281728744506835, 0.030409791946411132, 0.03034422492980957, 0.030416799545288087, 0.030302207946777345, 0.030399551391601564, 0.03072643280029297, 0.030562976837158203, 0.03035055923461914, 0.030323488235473633, 0.030285823822021486, 0.03037593650817871, 0.030328832626342773, 0.030240768432617186, 0.030312448501586913, 0.030438880920410156, 0.030308895111083985, 0.03035539245605469, 0.03034499168395996, 0.030566688537597655, 0.03042099189758301, 0.030306304931640625, 0.030323776245117187, 0.030755584716796874, 0.03045347213745117, 0.030419424057006837, 0.030586624145507814, 0.030443775177001954, 0.0300992317199707, 0.03027180862426758, 0.030626144409179688, 0.031057472229003905, 0.030318687438964844, 0.030312448501586913, 0.030357343673706055, 0.03031260871887207, 0.030453760147094725, 0.03340214538574219, 0.031742431640625, 0.031154624938964842, 0.03522880172729492, 0.03074345588684082, 0.030721599578857423, 0.03067740821838379, 0.03067091178894043, 0.030414144515991212, 0.030312896728515625, 0.030281471252441405, 0.03015500831604004, 0.030229631423950194, 0.030163583755493165, 0.030392799377441406, 0.03038800048828125, 0.03033241653442383, 0.030212799072265626, 0.030661888122558593, 0.03027529525756836, 0.030585695266723632, 0.03024403190612793, 0.030284608840942383, 0.03021004867553711, 0.0302508487701416, 0.030206111907958983, 0.030228256225585937, 0.030298688888549804, 0.030270496368408204, 0.03020182418823242, 0.030280351638793945, 0.03097536087036133, 0.030334848403930664, 0.030264064788818358, 0.030523391723632814, 0.03069660758972168, 0.03027440071105957, 0.0303569278717041, 0.030382623672485353, 0.030283327102661132, 0.030766944885253906, 0.030394943237304687, 0.03057619285583496, 0.030419456481933595, 0.030281312942504884, 0.030300575256347655, 0.030314559936523437, 0.030349248886108397, 0.030337024688720703, 0.03039641571044922, 0.03038355255126953, 0.030918495178222656, 0.030298368453979492, 0.030661088943481445, 0.030097408294677733, 0.030216192245483397, 0.030175392150878905, 0.03024892807006836, 0.03024883270263672, 0.030250303268432616, 0.030202272415161133, 0.03012944030761719, 0.030126367568969727, 0.03036204719543457, 0.030392608642578124, 0.030436416625976563, 0.03030873680114746, 0.030284351348876953, 0.030250335693359377, 0.030274208068847657, 0.030289920806884765, 0.030176992416381835, 0.03024515151977539, 0.03022195243835449, 0.032396736145019533, 0.030444351196289063, 0.030347391128540038, 0.030300159454345704, 0.030270496368408204, 0.03024380874633789, 0.030406015396118164, 0.030531583786010744, 0.030378623962402342, 0.03032268714904785, 0.030295263290405272, 0.030318944931030274, 0.030263263702392577, 0.030245344161987306, 0.030238719940185548, 0.030273536682128906, 0.030261056900024414, 0.03022060775756836, 0.030260671615600587, 0.03027395248413086, 0.030211103439331054, 0.030229631423950194, 0.030269088745117186, 0.030319936752319337, 0.030196128845214845, 0.030433855056762694, 0.030279199600219728, 0.03037392044067383, 0.030842239379882813, 0.030151679992675783, 0.030233663558959963, 0.030161151885986327, 0.03018943977355957, 0.030171968460083007, 0.0301977596282959, 0.03019366455078125, 0.030167104721069336, 0.030195520401000975, 0.030138111114501952, 0.030172895431518555, 0.030112512588500978, 0.030223840713500975, 0.030681535720825194]",tokens/s,32.77481590664123,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,4007.66976,2159.935488,0.0,1757.413376,1736.37632,s,1,12.946537109375,12.946537109375,0.0,12.946537109375,12.946537109375,12.946537109375,12.946537109375,[12.946537109375],,kWh,0.0001532590866625166,1.6898542413566058e-05,5.2706708832006566e-05,0.00022286433790808923,,MB,4017.831936,2399.010816,0.0,1973.420032,1922.784256,s,10,0.6346965141296388,0.06346965141296387,0.0002951635719479483,0.06343950271606445,0.0639192123413086,0.06393914222717285,0.06395508613586426,"[0.0639147834777832, 0.06395907211303711, 0.06331375885009766, 0.06302089691162109, 0.06323385620117188, 0.06322534561157227, 0.06356524658203125, 0.06325344085693359, 0.06360208129882812, 0.0636080322265625]",tokens/s,4033.423759244268,kWh,1.8450650907756659e-06,2.0347640285175766e-07,1.0319417759371543e-06,3.0804832695645777e-06,tokens/kWh,83103843.6498911,MB,4017.831936,2419.982336,0.0,1994.391552,1971.314176,s,10,38.407959960937504,3.8407959960937506,0.01028176956904602,3.83710107421875,3.856882836914062,3.8581397827148436,3.859145339355469,"[3.856603515625, 3.859396728515625, 3.849652099609375, 3.830092041015625, 3.836884521484375, 3.835456298828125, 3.837317626953125, 3.8415068359375, 3.829861572265625, 3.831188720703125]",tokens/s,16.40284984260388,kWh,0.00011101749406880441,1.2245363472895501e-05,4.392689974666217e-05,0.00016718975728836205,tokens/kWh,376817.34229292633,,s,630,38.40542854690555,0.06096099769350082,0.0007100863123148767,0.06078459167480468,0.06150458145141602,0.06193844451904297,0.06429679054260254,"[0.060679519653320316, 0.06080377578735351, 0.061145183563232425, 0.0627608642578125, 0.06177382278442383, 0.06149324798583984, 0.061177696228027344, 0.06118352127075195, 0.0612127685546875, 0.06161648178100586, 0.061129150390625, 0.06680899047851563, 0.06098614501953125, 0.06086832046508789, 0.06097520065307617, 0.06092537689208984, 0.06071705627441406, 0.061039169311523436, 0.06086860656738281, 0.06126182556152344, 0.06058393478393555, 0.06116147232055664, 0.06096486282348633, 0.06113260650634766, 0.06154608154296875, 0.06116412734985351, 0.06121881484985352, 0.06133939361572266, 0.0606927375793457, 0.060680191040039064, 0.06081536102294922, 0.06049740982055664, 0.060647937774658205, 0.06078851318359375, 0.0610203857421875, 0.061287776947021484, 0.062292640686035156, 0.06162636947631836, 0.06127964782714844, 0.06116412734985351, 0.06112051010131836, 0.06220185470581055, 0.06106240081787109, 0.06100044631958008, 0.06114713668823242, 0.060722366333007816, 0.06057043075561523, 0.060690433502197265, 0.06058598327636719, 0.06057923126220703, 0.060684864044189456, 0.06058732986450195, 0.06187286376953125, 0.060778175354003906, 0.060649791717529294, 0.060985343933105465, 0.06093388748168945, 0.061586753845214844, 0.0615181770324707, 0.06092041778564453, 0.06210547256469726, 0.061646976470947266, 0.06105702209472656, 0.06115532684326172, 0.06072524642944336, 0.06095872116088867, 0.060997215270996094, 0.061044479370117186, 0.06138262557983398, 0.060723903656005856, 0.06057366561889648, 0.06133967971801758, 0.0609279670715332, 0.060821151733398436, 0.06066864013671875, 0.060937759399414065, 0.061489280700683595, 0.061597312927246094, 0.061063583374023435, 0.06096688079833985, 0.06086064147949219, 0.06113183975219726, 0.06095254516601562, 0.06087955093383789, 0.0618803825378418, 0.06109593582153321, 0.06075139236450195, 0.061207008361816403, 0.06098329544067383, 0.060543201446533204, 0.06071273422241211, 0.06103244781494141, 0.06253747177124024, 0.06242329788208008, 0.061920417785644534, 0.06193443298339844, 0.062362945556640625, 0.06173766326904297, 0.06166495895385742, 0.061323486328125, 0.061943904876708984, 0.06126588821411133, 0.06110246276855469, 0.0610873908996582, 0.06186707305908203, 0.061207168579101565, 0.06127449417114258, 0.06082320022583008, 0.06068252944946289, 0.06062720108032227, 0.060815105438232424, 0.06042591857910156, 0.06055984115600586, 0.060454753875732424, 0.060878849029541014, 0.0607295036315918, 0.06182486343383789, 0.06136003112792969, 0.060739009857177735, 0.06059638214111328, 0.0626646728515625, 0.06385631942749023, 0.06149955368041992, 0.06144480133056641, 0.06290646362304687, 0.06121200180053711, 0.06120560073852539, 0.061348766326904294, 0.06075769424438476, 0.06109823989868164, 0.06093532943725586, 0.06169283294677735, 0.06269132614135742, 0.06464096069335938, 0.06047897720336914, 0.06038179016113281, 0.06109795379638672, 0.061166912078857424, 0.06058873748779297, 0.06094198226928711, 0.0613004150390625, 0.0609983024597168, 0.06094233703613281, 0.061051006317138674, 0.06074486541748047, 0.06136905670166016, 0.06098688125610351, 0.06113536071777344, 0.06102582550048828, 0.060842048645019534, 0.060690433502197265, 0.06079324722290039, 0.060813312530517576, 0.06061856079101562, 0.06059820938110352, 0.06088320159912109, 0.06428659057617188, 0.06149737548828125, 0.061327457427978516, 0.06090137481689453, 0.061134849548339844, 0.060547359466552736, 0.06055059051513672, 0.0607410888671875, 0.06083667373657226, 0.060682239532470705, 0.060568737030029296, 0.06058041763305664, 0.06055276870727539, 0.06142025756835937, 0.060786495208740236, 0.06050630569458008, 0.06202684783935547, 0.06258979034423828, 0.06091782379150391, 0.06053411102294922, 0.06071363067626953, 0.061232799530029296, 0.06076393508911133, 0.06074211120605469, 0.06073331069946289, 0.06083724975585938, 0.06075068664550781, 0.06059417724609375, 0.060660736083984375, 0.06055219268798828, 0.06142156982421875, 0.06299555206298828, 0.060618721008300784, 0.060426017761230466, 0.06088521575927734, 0.06033203125, 0.06091072082519531, 0.06065423965454102, 0.060612449645996096, 0.06053279876708984, 0.06107302474975586, 0.06062377548217773, 0.060571231842041016, 0.060743614196777346, 0.060952831268310546, 0.06160793685913086, 0.06134783935546875, 0.06113420867919922, 0.06098223876953125, 0.06091734313964844, 0.06115923309326172, 0.06041183853149414, 0.0604156494140625, 0.06049155044555664, 0.06085311889648438, 0.06062080001831055, 0.06086608123779297, 0.06127990341186523, 0.06066179275512695, 0.06059292984008789, 0.06121673583984375, 0.06046713638305664, 0.060241374969482425, 0.06036953735351563, 0.06046025466918945, 0.06179305648803711, 0.060792736053466793, 0.06099683380126953, 0.060224384307861326, 0.060510398864746094, 0.060622657775878906, 0.06081126403808594, 0.06047334289550781, 0.06171852874755859, 0.06068838500976562, 0.0607191047668457, 0.06072918319702148, 0.06118620681762695, 0.060569183349609375, 0.060553054809570316, 0.06048006439208985, 0.06075187301635742, 0.06135193634033203, 0.06115532684326172, 0.06076006317138672, 0.06069200134277344, 0.060574462890625, 0.06100889587402344, 0.06109872055053711, 0.060715007781982425, 0.06096691131591797, 0.0607108154296875, 0.060508033752441404, 0.06099785614013672, 0.06051395034790039, 0.06075164794921875, 0.0615464973449707, 0.06160179138183594, 0.06082559967041016, 0.0607375373840332, 0.060509376525878907, 0.06058476638793946, 0.06052864074707031, 0.060679393768310545, 0.061047584533691406, 0.06108089447021484, 0.060878753662109375, 0.06080912017822265, 0.06087360000610351, 0.061230911254882815, 0.06085609436035156, 0.060586399078369144, 0.06040956878662109, 0.06062108612060547, 0.06051839828491211, 0.06100582504272461, 0.06087235260009766, 0.06056995010375977, 0.06051174545288086, 0.06057318496704102, 0.0605849609375, 0.06163657760620117, 0.06068963241577149, 0.06065439987182617, 0.060618240356445315, 0.06357974243164062, 0.0632534065246582, 0.06135193634033203, 0.06136988830566406, 0.06106569671630859, 0.06060031890869141, 0.06043033599853516, 0.060485633850097656, 0.06430095672607422, 0.06103366470336914, 0.060543903350830076, 0.06073513412475586, 0.06104115295410156, 0.061179615020751955, 0.06082704162597656, 0.0606932144165039, 0.06082467269897461, 0.060867488861083986, 0.06044672012329102, 0.06057574462890625, 0.06046460723876953, 0.06058844757080078, 0.060287105560302735, 0.06075187301635742, 0.060622848510742185, 0.06029107284545898, 0.06046105575561524, 0.06057183837890625, 0.060440383911132815, 0.060921855926513675, 0.060626174926757814, 0.06038195037841797, 0.06063513565063477, 0.06074070358276367, 0.060276737213134764, 0.06059596633911133, 0.06308854293823242, 0.061292800903320316, 0.06108127975463867, 0.060655166625976566, 0.06131571197509766, 0.060848094940185546, 0.06080742263793945, 0.06084812927246094, 0.06077167892456055, 0.061220542907714844, 0.06114608001708984, 0.06067647933959961, 0.0604699821472168, 0.06087567901611328, 0.060851905822753904, 0.060433822631835936, 0.06053100967407227, 0.0663372802734375, 0.06078268814086914, 0.06110822296142578, 0.060835838317871094, 0.060690048217773435, 0.06057526397705078, 0.06052479934692383, 0.060590686798095705, 0.060673919677734375, 0.06093606567382812, 0.06113715362548828, 0.06077439880371094, 0.06039756774902344, 0.06038425445556641, 0.06038336181640625, 0.06054377746582031, 0.060386943817138675, 0.06021177673339844, 0.06050559997558594, 0.06063484954833984, 0.06063907241821289, 0.06029299163818359, 0.0603985595703125, 0.06051225662231445, 0.060542335510253904, 0.06068902587890625, 0.06133555221557617, 0.061467742919921874, 0.06079580688476562, 0.06150307083129883, 0.06163087844848633, 0.06131097412109375, 0.060585601806640625, 0.06069049453735351, 0.06049004745483398, 0.06053459167480469, 0.06048172760009766, 0.06083699035644531, 0.06058483123779297, 0.060634368896484374, 0.06074019241333008, 0.060762271881103516, 0.06077545547485352, 0.06073798370361328, 0.06067862319946289, 0.06054297637939453, 0.06044672012329102, 0.060485633850097656, 0.06165212631225586, 0.060900062561035154, 0.060560958862304684, 0.06089990234375, 0.06056911849975586, 0.06099772644042969, 0.0605447998046875, 0.06044527816772461, 0.06099967956542969, 0.06122025680541992, 0.06066851043701172, 0.06119014358520508, 0.06194172668457031, 0.06124863815307617, 0.060739585876464844, 0.06070159912109375, 0.060369918823242184, 0.06092201614379883, 0.060556129455566404, 0.06055116653442383, 0.060739326477050784, 0.061171680450439456, 0.06069712066650391, 0.06076185607910156, 0.060868511199951174, 0.061065185546875, 0.06112041473388672, 0.06117023849487305, 0.061134334564208984, 0.061239456176757814, 0.06065478515625, 0.06086643218994141, 0.06105120086669922, 0.06097478485107422, 0.06095148849487304, 0.06060031890869141, 0.06062899017333984, 0.0612182731628418, 0.06087324905395508, 0.060724735260009766, 0.06064985656738281, 0.06097011184692383, 0.060691009521484374, 0.061012321472167966, 0.06090150451660156, 0.06546018981933593, 0.06137241744995117, 0.06058953475952149, 0.06043849563598633, 0.06059638214111328, 0.060559200286865233, 0.06033875274658203, 0.06035468673706055, 0.06093580627441406, 0.06099715042114258, 0.060709407806396484, 0.06059347152709961, 0.06068928146362305, 0.0608334732055664, 0.060410526275634764, 0.06355996704101563, 0.06309632110595703, 0.06170431900024414, 0.06164092636108399, 0.06137817764282227, 0.061013599395751954, 0.060604286193847653, 0.06051062393188476, 0.06075603103637695, 0.060948638916015624, 0.0605186882019043, 0.060581886291503906, 0.06083900833129883, 0.06132783889770508, 0.060881343841552735, 0.060685665130615234, 0.06065225601196289, 0.06135801696777344, 0.060801025390625, 0.06090054321289062, 0.06045113754272461, 0.06039580917358398, 0.06052272033691406, 0.06038118362426758, 0.060352512359619144, 0.06072025680541992, 0.06086896133422852, 0.06062675094604492, 0.06056419372558594, 0.06093363189697266, 0.06093212890625, 0.061205024719238284, 0.06064121627807617, 0.06107727813720703, 0.060682239532470705, 0.060776161193847655, 0.06055145645141601, 0.06301491165161133, 0.06086441421508789, 0.06084569549560547, 0.060451393127441404, 0.06045267105102539, 0.060889022827148434, 0.0603815689086914, 0.060587806701660155, 0.060616928100585936, 0.06096691131591797, 0.060630111694335936, 0.0605684814453125, 0.060657470703125, 0.060881088256835934, 0.06063139343261719, 0.06067763137817383, 0.06037417602539062, 0.06030131149291992, 0.0635687026977539, 0.06571641540527344, 0.06030464172363281, 0.06077849578857422, 0.06045759963989258, 0.06046278381347656, 0.06032633590698242, 0.0604571533203125, 0.0618702392578125, 0.06085222244262695, 0.06096691131591797, 0.06065151977539063, 0.06047110366821289, 0.060833118438720704, 0.06078732681274414, 0.06046083068847656, 0.060643775939941406, 0.0602147216796875, 0.06038518524169922, 0.060760574340820314, 0.06082166290283203, 0.06063718414306641, 0.060863681793212894, 0.061092575073242186, 0.06067603302001953, 0.06061673736572266, 0.06097318267822266, 0.06122905731201172, 0.06143289566040039, 0.061229728698730466, 0.061182239532470706, 0.06096486282348633, 0.06067529678344727, 0.060427040100097654, 0.0604139518737793, 0.0608125114440918, 0.060674846649169924, 0.06044467163085938, 0.06072275161743164, 0.06143840026855469, 0.06082883071899414, 0.06081008148193359, 0.060888671875, 0.06064374542236328, 0.060718208312988284, 0.06095049667358399, 0.060447582244873045, 0.06137382507324219, 0.06054111862182617, 0.060580352783203124, 0.06043385696411133, 0.06027679824829101, 0.06080767822265625, 0.06054912185668945, 0.06098944091796875, 0.06075769424438476, 0.061145408630371094, 0.06047878265380859, 0.06057644653320313, 0.06136048126220703, 0.061075103759765624, 0.06053376007080078, 0.06047760009765625, 0.060441440582275394, 0.061282302856445314, 0.06117507171630859, 0.06072188949584961, 0.060682239532470705, 0.06073548889160156, 0.06064854431152344, 0.060562080383300784, 0.061068958282470706, 0.060541278839111326, 0.06042828750610352, 0.06053683090209961, 0.06066790390014649, 0.06053273773193359, 0.06052150344848633, 0.06040060806274414, 0.06039699172973633, 0.060699199676513674, 0.06064291381835937, 0.0602644157409668, 0.060330142974853514, 0.060770591735839846, 0.06076969528198242, 0.060721759796142576, 0.060760032653808596, 0.06072300720214844, 0.06054908752441406, 0.06044041442871094, 0.060502464294433594, 0.06134982299804687, 0.06097103881835937, 0.060665695190429685, 0.060588191986083985, 0.060333118438720704, 0.060746944427490235, 0.06039936065673828, 0.06023273468017578, 0.060447486877441406, 0.061573215484619144, 0.06093791961669922, 0.06079328155517578, 0.061249534606933595, 0.060864513397216796, 0.06066790390014649, 0.06201875305175781, 0.06448006439208985, 0.061590911865234375, 0.0615327033996582, 0.06124886322021485, 0.061080352783203125, 0.061171680450439456, 0.06110003280639648, 0.0606710090637207, 0.06063315200805664, 0.060558238983154294, 0.061118465423583984, 0.06074982452392578, 0.06084563064575195, 0.060932254791259764, 0.06047068786621094, 0.06051631927490234, 0.060726177215576174, 0.06079318237304687, 0.06035827255249023, 0.06054095840454102, 0.06032998275756836, 0.06031727981567383, 0.06082550430297851, 0.06088991928100586, 0.06078819274902344]",tokens/s,16.403931002372882,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,876.183552,664.666112,0.0,262.144,258.163712,s,1,7.6761015625,7.6761015625,0.0,7.6761015625,7.6761015625,7.6761015625,7.6761015625,[7.6761015625],,kWh,2.0508158679179663e-05,2.255073747388508e-06,6.643338647971397e-06,2.9406571074539568e-05,,MB,1293.996032,763.232256,0.0,348.127232,317.820928,s,10,0.20537840080261233,0.020537840080261226,0.00011462728244317746,0.020528592109680176,0.020678690910339356,0.020730545520782472,0.020772029209136962,"[0.020338367462158204, 0.02066716766357422, 0.0205185604095459, 0.02045110321044922, 0.020555168151855468, 0.020538623809814454, 0.020782400131225585, 0.02051456069946289, 0.020459680557250975, 0.02055276870727539]",tokens/s,12464.796638768252,kWh,6.011156571245678e-07,6.629241360678097e-08,3.997859851234435e-07,1.0671940558547923e-06,tokens/kWh,239881396.07369837,MB,1307.27936,788.39808,0.0,373.293056,317.823488,s,10,9.903818054199219,0.9903818054199218,0.0022449583300621903,0.9903520812988281,0.9931331359863281,0.9933422332763672,0.9935095111083984,"[0.9935513305664062, 0.9902691040039062, 0.9898326416015625, 0.9914702758789062, 0.9917613525390625, 0.9903436889648437, 0.993086669921875, 0.9860274047851563, 0.9903604736328125, 0.9871151123046875]",tokens/s,63.611830967843765,kWh,2.875758869745974e-05,3.1714580794505873e-06,1.0871361920679637e-05,4.280040869758997e-05,tokens/kWh,1471948.5611722078,,s,630,9.899007524490349,0.015712710356333898,0.0004994784391564056,0.015613296031951905,0.015820867061614993,0.015998952198028563,0.018256409091949467,"[0.015509344100952149, 0.01576796817779541, 0.015684096336364747, 0.015678720474243166, 0.015673312187194824, 0.015645312309265136, 0.015642239570617677, 0.015626784324645995, 0.015700192451477052, 0.015604767799377441, 0.015621024131774902, 0.01568166446685791, 0.01565667152404785, 0.015676544189453124, 0.015606719970703125, 0.015683520317077638, 0.015762880325317384, 0.01572492790222168, 0.015749088287353517, 0.015671520233154296, 0.015658687591552735, 0.015668864250183104, 0.015653504371643066, 0.015586784362792969, 0.01560591983795166, 0.01567311954498291, 0.015616671562194824, 0.015773664474487303, 0.01978688049316406, 0.01582572841644287, 0.015779232025146483, 0.01565120029449463, 0.01564252758026123, 0.01570620822906494, 0.01576591968536377, 0.01569148826599121, 0.015685824394226073, 0.015677184104919435, 0.01572006416320801, 0.015854432106018067, 0.01579206371307373, 0.015672863960266112, 0.015720767974853514, 0.015699904441833498, 0.015678784370422364, 0.015664064407348632, 0.015642111778259277, 0.015837504386901857, 0.0156723518371582, 0.015661439895629882, 0.01571494388580322, 0.01589558410644531, 0.01582147216796875, 0.015626527786254882, 0.015689727783203124, 0.015683775901794435, 0.01571225643157959, 0.015718432426452638, 0.015959839820861816, 0.015746527671813965, 0.015687392234802245, 0.015727423667907716, 0.0156461124420166, 0.015351903915405274, 0.01568668842315674, 0.015629695892333983, 0.015686047554016112, 0.015562656402587891, 0.015588831901550294, 0.015577728271484376, 0.01558892822265625, 0.015611583709716798, 0.015585151672363282, 0.01563049602508545, 0.015608320236206055, 0.015607392311096192, 0.015540863990783691, 0.01555190372467041, 0.015639455795288085, 0.015650495529174805, 0.01563369560241699, 0.015587360382080077, 0.015552639961242676, 0.015576831817626953, 0.015737024307250977, 0.015579775810241699, 0.015598943710327148, 0.01557919979095459, 0.015548576354980468, 0.015671744346618652, 0.015603103637695313, 0.01584604835510254, 0.01978976058959961, 0.015747072219848633, 0.01568515205383301, 0.015606240272521972, 0.01593139171600342, 0.015632384300231932, 0.015619808197021485, 0.01575980758666992, 0.01561564826965332, 0.015902624130249024, 0.0158407039642334, 0.015663583755493163, 0.015667584419250487, 0.01574019241333008, 0.015679391860961914, 0.015689855575561525, 0.0156310396194458, 0.015658080101013185, 0.015570048332214356, 0.015623456001281738, 0.01559398365020752, 0.015612031936645508, 0.015593503952026367, 0.015590847969055176, 0.015609984397888183, 0.016038400650024414, 0.01579804801940918, 0.015659008026123047, 0.015603648185729981, 0.01561404800415039, 0.015594847679138183, 0.015591679573059082, 0.015663680076599122, 0.015537983894348144, 0.015274080276489257, 0.015511455535888672, 0.015572447776794434, 0.015558464050292969, 0.01564774417877197, 0.015622079849243164, 0.015660608291625976, 0.015578880310058593, 0.015578783988952637, 0.015606592178344727, 0.01562828826904297, 0.015545696258544921, 0.015585087776184081, 0.015600064277648925, 0.015538175582885743, 0.015663519859313964, 0.01580832004547119, 0.015648159980773926, 0.01576416015625, 0.015632479667663575, 0.015515647888183593, 0.015820799827575685, 0.015470911979675293, 0.015670175552368163, 0.015620896339416504, 0.017123071670532227, 0.015599231719970703, 0.01563916778564453, 0.015599616050720215, 0.015949824333190917, 0.018149375915527344, 0.01712303924560547, 0.015934911727905274, 0.015645376205444338, 0.015618207931518555, 0.015689727783203124, 0.01557913589477539, 0.0155414400100708, 0.015580096244812013, 0.015547455787658692, 0.015541055679321288, 0.015574655532836913, 0.015724960327148436, 0.015606880187988281, 0.015610400199890137, 0.01552956771850586, 0.015549311637878418, 0.015624064445495606, 0.015574496269226074, 0.015585503578186034, 0.01578441619873047, 0.0155894718170166, 0.015544063568115234, 0.01561190414428711, 0.015513279914855956, 0.015620256423950195, 0.01586192035675049, 0.01551318359375, 0.01548646354675293, 0.015532959938049316, 0.01570201587677002, 0.015663104057312013, 0.015572287559509278, 0.015429344177246094, 0.015665472030639647, 0.015640704154968262, 0.015719264030456544, 0.015619071960449218, 0.015586527824401855, 0.015892288208007813, 0.015600000381469727, 0.015605952262878418, 0.015608127593994141, 0.015773792266845704, 0.015684736251831054, 0.01561075210571289, 0.01567948818206787, 0.015668224334716797, 0.015757439613342285, 0.015960960388183593, 0.015705856323242187, 0.01566444778442383, 0.015626591682434084, 0.01555452823638916, 0.015686047554016112, 0.01563871955871582, 0.015597599983215332, 0.015613823890686036, 0.015595199584960938, 0.015681471824645996, 0.015599807739257813, 0.015560959815979004, 0.01569388771057129, 0.01581875228881836, 0.01577779197692871, 0.02004787254333496, 0.015781888008117674, 0.01566342353820801, 0.015568575859069823, 0.015633631706237792, 0.015803168296813965, 0.015666751861572265, 0.015636927604675294, 0.015702143669128418, 0.01568051242828369, 0.01571836757659912, 0.01565993595123291, 0.01560985565185547, 0.015713919639587404, 0.015677568435668945, 0.015598112106323243, 0.015613696098327638, 0.01557475185394287, 0.015591327667236327, 0.015649120330810548, 0.01586176013946533, 0.015714303970336914, 0.015700096130371093, 0.015585151672363282, 0.015659008026123047, 0.01564035224914551, 0.015590815544128419, 0.015573823928833008, 0.015594976425170898, 0.01558950424194336, 0.015580703735351562, 0.015279935836791993, 0.01558351993560791, 0.015584480285644532, 0.015753631591796876, 0.01570864009857178, 0.015594495773315429, 0.015596063613891601, 0.01556227207183838, 0.01551750373840332, 0.015549375534057617, 0.015540351867675781, 0.015591296195983886, 0.015573087692260743, 0.015479904174804687, 0.015618880271911622, 0.015550368309020996, 0.015745216369628907, 0.015601568222045899, 0.01555072021484375, 0.015537471771240234, 0.01550489616394043, 0.015485312461853027, 0.015538751602172851, 0.015492671966552735, 0.01544262409210205, 0.015531776428222656, 0.015685407638549805, 0.015538399696350097, 0.015574624061584472, 0.015624735832214355, 0.015670656204223633, 0.01570975971221924, 0.01589958381652832, 0.019256895065307617, 0.01642959976196289, 0.01586575984954834, 0.015763456344604493, 0.015633440017700194, 0.01555942440032959, 0.015622143745422363, 0.01561734390258789, 0.015675552368164064, 0.01563216018676758, 0.015619039535522461, 0.015656959533691405, 0.015606975555419922, 0.015573984146118163, 0.015583071708679199, 0.015619935989379883, 0.015527296066284179, 0.015554752349853516, 0.015632831573486328, 0.017421472549438478, 0.016702560424804686, 0.016485504150390625, 0.015991647720336913, 0.015590911865234374, 0.015677215576171875, 0.015561375617980957, 0.015453760147094726, 0.015681983947753907, 0.01555996799468994, 0.015513728141784668, 0.01523958396911621, 0.015705408096313475, 0.015548864364624023, 0.015629952430725097, 0.01549766445159912, 0.015517600059509277, 0.015521087646484376, 0.015443743705749512, 0.015517919540405274, 0.015637503623962403, 0.015605440139770508, 0.015538175582885743, 0.015574848175048828, 0.015564352035522462, 0.015523455619812011, 0.015661343574523926, 0.01568396759033203, 0.015546719551086425, 0.015599871635437012, 0.015574080467224121, 0.01563308811187744, 0.015507455825805663, 0.015523232460021973, 0.01544655990600586, 0.015562623977661132, 0.015537792205810547, 0.015508031845092774, 0.01551360034942627, 0.015559776306152344, 0.01544428825378418, 0.015457088470458985, 0.015748191833496093, 0.015491935729980469, 0.015724384307861328, 0.018163711547851562, 0.017057024002075195, 0.015758079528808595, 0.016004928588867186, 0.015491264343261719, 0.015460351943969726, 0.015544320106506348, 0.01556390380859375, 0.015476736068725586, 0.01550211238861084, 0.015442015647888184, 0.015493184089660645, 0.015560640335083007, 0.01548902416229248, 0.01551097583770752, 0.01555459213256836, 0.016231199264526368, 0.01609209632873535, 0.01629385566711426, 0.015945695877075197, 0.01563532829284668, 0.016601343154907227, 0.017050783157348634, 0.015764063835144042, 0.015692000389099122, 0.01575209617614746, 0.015677696228027345, 0.015618240356445313, 0.01562054443359375, 0.01575014400482178, 0.015767744064331055, 0.01566928005218506, 0.015600128173828125, 0.015567104339599609, 0.015604831695556641, 0.015589983940124511, 0.015536416053771972, 0.017481792449951173, 0.01621401596069336, 0.015644224166870117, 0.015679360389709473, 0.015660863876342773, 0.01564857578277588, 0.015621055603027343, 0.015709471702575684, 0.01567407989501953, 0.015659040451049804, 0.015689599990844728, 0.01573292827606201, 0.015612895965576173, 0.015717311859130858, 0.015499263763427735, 0.01559654426574707, 0.015592351913452148, 0.015607904434204102, 0.01571020793914795, 0.01588976001739502, 0.015577792167663574, 0.01552995204925537, 0.01583513641357422, 0.015640288352966308, 0.015709728240966798, 0.015586048126220704, 0.015919103622436523, 0.018294271469116212, 0.017383007049560546, 0.015724608421325684, 0.01585155200958252, 0.01571664047241211, 0.01562992000579834, 0.015623104095458985, 0.015616095542907715, 0.015591327667236327, 0.015584671974182129, 0.015593600273132325, 0.01558892822265625, 0.015569215774536132, 0.015542880058288574, 0.015616000175476074, 0.01559318447113037, 0.015615488052368164, 0.015555359840393067, 0.015803839683532713, 0.015542176246643067, 0.015743647575378418, 0.015607808113098144, 0.015605759620666505, 0.015617376327514648, 0.015587136268615723, 0.01566140842437744, 0.015599712371826172, 0.01562649631500244, 0.01520639991760254, 0.015578368186950683, 0.015529760360717773, 0.015606752395629883, 0.015587679862976074, 0.015581855773925781, 0.015537152290344238, 0.015585280418395997, 0.015618047714233398, 0.015529791831970214, 0.015547743797302246, 0.015542752265930175, 0.015626399993896485, 0.015558527946472169, 0.01553609561920166, 0.015584704399108886, 0.01568070411682129, 0.015613984107971192, 0.015734335899353028, 0.015593791961669922, 0.015556447982788086, 0.015519840240478516, 0.015525664329528809, 0.015549599647521973, 0.015596799850463867, 0.015658720016479492, 0.015849599838256834, 0.01550755214691162, 0.015457599639892579, 0.015622400283813476, 0.015549887657165527, 0.015530271530151367, 0.015512063980102539, 0.015546367645263673, 0.01571840000152588, 0.015539487838745118, 0.01579212760925293, 0.01997488021850586, 0.015733792304992677, 0.015612895965576173, 0.015536224365234375, 0.015579039573669434, 0.015525888442993165, 0.015509344100952149, 0.015497376441955566, 0.015523839950561523, 0.015523103713989258, 0.015557056427001953, 0.015503487586975097, 0.015593567848205566, 0.01569369602203369, 0.015541695594787597, 0.015568927764892578, 0.015571680068969727, 0.015529696464538573, 0.015517984390258789, 0.015517791748046876, 0.015494303703308105, 0.015542943954467774, 0.015900768280029298, 0.015548095703125, 0.015526176452636719, 0.015517375946044921, 0.015232416152954101, 0.015593215942382813, 0.01595068836212158, 0.016130048751831053, 0.01606608009338379, 0.015641056060791015, 0.01559347152709961, 0.015588640213012695, 0.015754143714904786, 0.015446111679077149, 0.015499296188354493, 0.015591103553771973, 0.015584992408752442, 0.01547920036315918, 0.015664352416992187, 0.015545120239257812, 0.015654560089111327, 0.015646944046020506, 0.015691776275634766, 0.015544320106506348, 0.015583231925964355, 0.015556608200073242, 0.01551360034942627, 0.015654911994934084, 0.015880191802978515, 0.015642623901367187, 0.015556608200073242, 0.01562009620666504, 0.015529824256896972, 0.015763232231140137, 0.015570528030395507, 0.015536928176879882, 0.015562656402587891, 0.015576671600341797, 0.0157838716506958, 0.015698240280151366, 0.015563008308410644, 0.0158505277633667, 0.018129888534545897, 0.017903167724609374, 0.015741215705871583, 0.015679840087890626, 0.01564857578277588, 0.01568054389953613, 0.015605983734130859, 0.015641247749328614, 0.01577996826171875, 0.015601311683654785, 0.015578720092773437, 0.015676128387451173, 0.015554464340209961, 0.015602047920227052, 0.01604332733154297, 0.015618464469909669, 0.01556390380859375, 0.015524736404418945, 0.015601408004760743, 0.015534336090087891, 0.01555072021484375, 0.015539487838745118, 0.015565279960632325, 0.015560319900512695, 0.01561843204498291, 0.01525113582611084, 0.015667712211608887, 0.015564831733703613, 0.01577507209777832, 0.015565407752990723, 0.015599072456359863, 0.015530367851257325, 0.015596575736999511, 0.015567839622497559, 0.015578304290771484, 0.016270143508911133, 0.01568182373046875, 0.015619808197021485, 0.015510560035705567, 0.015571871757507324, 0.015555999755859374, 0.01565283203125, 0.01567535972595215, 0.015640735626220703, 0.015593791961669922, 0.015545632362365723, 0.015503935813903809, 0.015488736152648926, 0.015507167816162109, 0.015524831771850586, 0.015511551856994628, 0.015544320106506348, 0.015568448066711425, 0.01583353614807129, 0.015672896385192873, 0.015592032432556153, 0.01548681640625, 0.01551916790008545, 0.015692352294921875, 0.01560371208190918, 0.015604928016662597, 0.015592255592346192, 0.01556604766845703, 0.015585375785827636, 0.015743136405944824, 0.019241504669189453, 0.0159017276763916, 0.01563865566253662, 0.015586112022399902, 0.015475071907043458, 0.015541055679321288, 0.015676095962524415, 0.0156627197265625, 0.015691871643066405, 0.015671456336975097, 0.015628512382507325, 0.015594752311706542, 0.015743167877197265, 0.015776384353637696, 0.01557215976715088, 0.015503647804260254, 0.015526432037353516, 0.015497471809387207, 0.01551260757446289, 0.015546815872192383, 0.015513888359069824, 0.015472672462463378, 0.015510496139526367]",tokens/s,63.64274382470834,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,13967.937536,7843.217408,0.0,7440.695296,7427.899392,s,1,30.944724609375,30.944724609375,0.0,30.944724609375,30.944724609375,30.944724609375,30.944724609375,[30.944724609375],,kWh,0.0007009409949791461,7.731170764698168e-05,0.00024184519347600353,0.0010200978961021313,,MB,1316.216832,8424.128512,0.0,8000.63488,7875.673088,s,10,0.9181020431518555,0.09181020431518554,0.00016440618683767256,0.09184403228759766,0.09194499130249023,0.0920434871673584,0.09212228385925293,"[0.09214198303222657, 0.09156422424316406, 0.09185926055908203, 0.09183907318115235, 0.09186032104492188, 0.09164659118652344, 0.09184899139404297, 0.0915816650390625, 0.09183683013916015, 0.09192310333251953]",tokens/s,2788.361074997164,kWh,2.7445764955220475e-06,3.0266893739114584e-07,1.8222626208414542e-06,4.869508053754648e-06,tokens/kWh,52572045.71262809,MB,1335.914496,8445.100032,0.0,8021.6064,7976.51712,s,10,47.5870986328125,4.75870986328125,0.02450104388407054,4.754734375,4.774922265625,4.799526806640625,4.819210439453125,"[4.76945458984375, 4.75359765625, 4.75587109375, 4.74460693359375, 4.76723291015625, 4.7452861328125, 4.7566865234375, 4.73366552734375, 4.73656591796875, 4.82413134765625]",tokens/s,13.238882346266834,kWh,0.00013870107488239783,1.5299126264729748e-05,7.451296511395861e-05,0.0002285131662610862,tokens/kWh,275695.2740658268,,s,630,47.584246864318814,0.07553055057828388,0.0010894234530527435,0.07526207733154297,0.07631293258666992,0.0774220890045166,0.0798182808685303,"[0.07583545684814454, 0.07563209533691406, 0.07727152252197265, 0.07631024169921875, 0.07571842956542969, 0.07541763305664062, 0.07492861175537109, 0.07498751831054687, 0.0753584976196289, 0.07514006042480469, 0.07525881958007813, 0.08101868438720704, 0.07498957061767578, 0.07490969848632813, 0.07599855804443359, 0.07617142486572266, 0.07514755249023437, 0.07541273498535156, 0.07548365020751953, 0.0757375717163086, 0.07587388610839843, 0.07575382232666016, 0.07531321716308594, 0.07487026977539063, 0.07503718566894531, 0.07523123168945313, 0.07484825897216797, 0.0751247329711914, 0.07524777221679688, 0.07495254516601563, 0.07523900604248047, 0.07493468475341797, 0.07549542236328124, 0.0748953628540039, 0.07529267120361328, 0.07647846221923828, 0.0767503662109375, 0.07636943817138672, 0.07655280303955078, 0.0762084197998047, 0.07582463836669921, 0.07601209259033204, 0.07550543975830078, 0.07578851318359375, 0.07590502166748046, 0.07558697509765624, 0.07843196868896485, 0.07513788604736328, 0.07492816162109375, 0.07497932434082032, 0.07492198181152344, 0.07497523498535157, 0.07494656372070313, 0.0748966064453125, 0.07545862579345704, 0.07559855651855468, 0.07526604461669922, 0.07843433380126953, 0.0793048324584961, 0.075378173828125, 0.07506169891357421, 0.07463276672363281, 0.07502406311035156, 0.07516620635986328, 0.07500790405273437, 0.07492205047607423, 0.07776866912841797, 0.07574121856689453, 0.07527629089355468, 0.07515916442871094, 0.07562397003173828, 0.07458697509765624, 0.07576576232910157, 0.07564854431152344, 0.07626390075683594, 0.07595613098144531, 0.07549132537841798, 0.07579452514648438, 0.07521481323242188, 0.07545177459716797, 0.07606899261474609, 0.07625577545166015, 0.07663555145263672, 0.075785888671875, 0.07584860992431641, 0.07549472045898438, 0.07761993408203124, 0.07508377838134765, 0.07473561859130859, 0.07455503845214843, 0.07486089324951171, 0.07495670318603516, 0.07545455932617187, 0.07506098937988281, 0.07537484741210937, 0.07538265228271485, 0.07493430328369141, 0.07486883544921875, 0.0746854705810547, 0.0761803207397461, 0.07543411254882812, 0.07531932830810546, 0.07479724884033204, 0.07522828674316406, 0.07541571044921876, 0.07538563537597656, 0.07558268737792968, 0.07539558410644531, 0.07581491088867187, 0.07581696319580078, 0.07560806274414063, 0.07516105651855469, 0.07512937927246094, 0.0750274887084961, 0.07512895965576172, 0.0751475830078125, 0.074923583984375, 0.07486534118652344, 0.0748403549194336, 0.07535810852050781, 0.07539311981201172, 0.07526338958740235, 0.07608585357666016, 0.07598480224609375, 0.07534591674804687, 0.07520444488525391, 0.07529545593261719, 0.07518822479248047, 0.0750755844116211, 0.07525811004638672, 0.07522815704345703, 0.07524018859863281, 0.07546265411376953, 0.07606294250488281, 0.07608092498779297, 0.07569203186035156, 0.07556505584716797, 0.0751852798461914, 0.07512767791748047, 0.07508364868164062, 0.07514329528808594, 0.08201625823974609, 0.07615078735351563, 0.07523744201660157, 0.0751717758178711, 0.07645804595947266, 0.0754031982421875, 0.07610163116455078, 0.07574681854248047, 0.07554678344726562, 0.074901123046875, 0.07505379486083984, 0.07490972900390624, 0.07525167846679688, 0.07470633697509765, 0.07468211364746094, 0.07464227294921875, 0.07506249237060547, 0.07546550750732423, 0.07588658905029297, 0.07513497924804688, 0.07636377716064453, 0.07557065582275391, 0.07487894439697265, 0.07485292816162109, 0.07517593383789062, 0.07475161743164062, 0.07514380645751953, 0.07528374481201172, 0.07510063934326172, 0.07498076629638672, 0.07514524841308594, 0.07512480163574219, 0.07491458892822266, 0.0754214096069336, 0.07499775695800781, 0.07546886444091797, 0.07506547546386719, 0.07495238494873047, 0.07513510131835938, 0.07502028656005859, 0.07503270721435547, 0.0752147216796875, 0.07518153381347656, 0.07577385711669922, 0.0752747802734375, 0.0754986572265625, 0.07738057708740234, 0.07865545654296875, 0.07528096008300782, 0.07487872314453126, 0.07509024047851562, 0.07481980895996093, 0.07487190246582032, 0.07492249298095703, 0.07569667053222656, 0.07519833374023438, 0.07539532470703125, 0.07524691009521485, 0.07521932983398437, 0.07536780548095703, 0.07516963195800781, 0.07588130950927735, 0.07503453063964843, 0.07500399780273438, 0.07509196472167969, 0.07503462219238281, 0.07479705810546874, 0.07484210968017578, 0.07560163116455078, 0.07485059356689452, 0.07509600067138672, 0.07527532958984375, 0.0750008316040039, 0.07453046417236328, 0.0746470718383789, 0.07711772918701172, 0.07576220703125, 0.07547017669677734, 0.07502505493164062, 0.07536434936523438, 0.0760892791748047, 0.07590287780761719, 0.07567702484130859, 0.07578067016601563, 0.07616284942626952, 0.07573347473144532, 0.07528652954101563, 0.07509606170654297, 0.07520460510253907, 0.07536358642578125, 0.0756968994140625, 0.07490121459960937, 0.07503286743164063, 0.07497280120849609, 0.07479698944091796, 0.07507113647460938, 0.07491868591308594, 0.07527247619628906, 0.07610297393798827, 0.07708303833007812, 0.0748685760498047, 0.07492214202880859, 0.07527613067626954, 0.0754587173461914, 0.07564288330078126, 0.07518822479248047, 0.07495986938476562, 0.07511347198486328, 0.07540121459960937, 0.07539711761474609, 0.07535206604003906, 0.07537245178222657, 0.07539958190917968, 0.07512882995605469, 0.07524771118164063, 0.0756098861694336, 0.07511385345458985, 0.07501197052001952, 0.0753201904296875, 0.07551795196533204, 0.07576582336425781, 0.07604940795898438, 0.07627462768554688, 0.07664435577392578, 0.07575331115722657, 0.07598095703125, 0.07592473602294922, 0.07545906829833984, 0.07596262359619141, 0.07580409240722656, 0.07552796936035157, 0.07863929748535156, 0.07554294586181641, 0.07592160034179687, 0.07656201934814454, 0.07545692443847657, 0.07600537872314453, 0.07576780700683594, 0.0761379852294922, 0.07543981170654297, 0.07541350555419922, 0.07505084991455079, 0.07595654296875, 0.07603257751464844, 0.07583254241943359, 0.07571347045898437, 0.07550355529785156, 0.07586576080322266, 0.07558326721191407, 0.07583516693115235, 0.07595267486572266, 0.07528860473632812, 0.07520893096923828, 0.0753070068359375, 0.07521389007568359, 0.07532434844970703, 0.07572684478759766, 0.0756960678100586, 0.0758067855834961, 0.07557705688476563, 0.07534825897216797, 0.07559929656982421, 0.07664406585693359, 0.07615100860595703, 0.0759405746459961, 0.07600118255615235, 0.07588169860839844, 0.0751860809326172, 0.07474054718017578, 0.07533984375, 0.07506896209716797, 0.07485478210449219, 0.07461692810058594, 0.0753531494140625, 0.07594595336914063, 0.08074652862548828, 0.07739174652099609, 0.07551577758789063, 0.07501439666748047, 0.07462911987304688, 0.07481549072265625, 0.074534912109375, 0.07542550659179688, 0.07523766326904296, 0.07515462493896484, 0.07519471740722657, 0.07512649536132812, 0.07513286590576172, 0.07581932830810546, 0.07584323120117188, 0.07526051330566406, 0.07501602935791016, 0.07472700500488282, 0.07490643310546875, 0.0750051498413086, 0.07549622344970704, 0.07604019165039062, 0.07547904205322266, 0.07513721466064453, 0.07475325012207032, 0.07499222564697265, 0.07523324584960937, 0.07521830749511718, 0.07483599853515625, 0.07456422424316406, 0.0761374740600586, 0.07519026947021484, 0.07479808044433593, 0.07467612457275391, 0.07489778900146485, 0.0755003204345703, 0.0749169921875, 0.07494636535644532, 0.07488256072998047, 0.0749542694091797, 0.07539778900146485, 0.07548345947265625, 0.0751365737915039, 0.07517369842529296, 0.07498976135253907, 0.07506114959716798, 0.07493689727783204, 0.07489532470703125, 0.07521097564697266, 0.07520976257324219, 0.07520864105224609, 0.07517481231689453, 0.07516316986083985, 0.07547277069091797, 0.0752088623046875, 0.07538483428955078, 0.07508207702636718, 0.07524352264404296, 0.07574323272705077, 0.07557286071777344, 0.07539340972900391, 0.07573737335205079, 0.07584722900390625, 0.07621881866455078, 0.0757224349975586, 0.07593926239013672, 0.0762718734741211, 0.07627430725097656, 0.07621939086914062, 0.07573222351074219, 0.07562745666503906, 0.07525049591064453, 0.07504895782470702, 0.07543603515625, 0.07568486022949218, 0.07643142700195313, 0.07640335845947266, 0.07566716766357422, 0.07744691467285156, 0.07651532745361328, 0.07542861175537109, 0.07493990325927734, 0.07460717010498047, 0.07480729675292969, 0.074700927734375, 0.07460419464111329, 0.07837852478027343, 0.07512863922119141, 0.07517263793945313, 0.07610991668701172, 0.074463134765625, 0.07482406616210938, 0.07516918182373047, 0.07587052917480469, 0.07464041900634766, 0.07451538848876953, 0.07434857940673828, 0.07523136138916016, 0.07468019104003906, 0.07460240173339844, 0.07466806030273437, 0.07503878021240235, 0.07549132537841798, 0.07563878631591797, 0.0753807373046875, 0.07599104309082032, 0.07592755126953125, 0.07623884582519531, 0.07554176330566406, 0.07518873596191407, 0.07540582275390625, 0.07517772674560547, 0.07486259460449218, 0.07473551940917969, 0.07466358184814453, 0.07472377777099609, 0.07489097595214844, 0.07476252746582031, 0.07495638275146485, 0.07952630615234375, 0.07727091217041016, 0.07563008117675782, 0.0753359375, 0.0748015365600586, 0.07461682891845703, 0.07476297760009766, 0.07452467346191406, 0.07454924774169921, 0.07503667449951172, 0.0750387191772461, 0.08479743957519531, 0.07752210998535156, 0.07544096374511719, 0.0749464340209961, 0.07452413177490234, 0.07458473968505859, 0.07448371124267578, 0.07446025848388672, 0.07470582580566407, 0.07457587432861328, 0.07489740753173828, 0.07464137268066406, 0.07500115203857421, 0.07506813049316406, 0.07572211456298829, 0.0752236785888672, 0.07464729309082031, 0.07467033386230469, 0.07481753540039063, 0.07468441772460938, 0.07472124481201171, 0.07444662475585938, 0.07484054565429688, 0.07468975830078126, 0.07481536102294922, 0.07476617431640625, 0.07539389038085938, 0.07542115020751954, 0.07490819549560547, 0.07515135955810547, 0.07552614593505859, 0.07644569396972656, 0.07498326110839844, 0.07464157104492188, 0.07466809844970704, 0.07521392059326172, 0.07514812469482422, 0.07488511657714844, 0.07478476715087891, 0.07506742095947265, 0.07528034973144532, 0.07524147033691406, 0.07540940856933594, 0.07491910552978516, 0.07449887847900391, 0.07616102600097656, 0.07476032257080079, 0.074837890625, 0.0746987533569336, 0.07499507141113282, 0.07470963287353516, 0.07463539123535157, 0.07437519836425781, 0.07539830780029297, 0.0753078384399414, 0.07495823669433593, 0.07484054565429688, 0.07452387237548828, 0.07493599700927735, 0.07450240325927734, 0.07439401245117187, 0.07463731384277343, 0.07463520050048827, 0.07481964874267578, 0.07476019287109376, 0.07480086517333985, 0.07630467224121094, 0.07596441650390626, 0.07681024169921875, 0.0757739486694336, 0.07570022583007813, 0.07568089294433594, 0.07630732727050782, 0.07688601684570312, 0.07549894714355469, 0.07525862121582032, 0.07555257415771484, 0.07560527801513672, 0.07529545593261719, 0.07593772888183593, 0.07545359802246093, 0.07548406219482422, 0.07453900909423829, 0.07481491088867187, 0.07490412902832032, 0.07482096099853516, 0.07451305389404297, 0.07737308502197265, 0.0752656021118164, 0.0755247039794922, 0.07483805084228516, 0.07454531097412109, 0.07557305908203125, 0.07543545532226563, 0.07524428558349609, 0.07494041442871094, 0.07468646240234375, 0.07474082946777344, 0.07473654174804688, 0.07471913909912109, 0.07500383758544922, 0.07499987030029297, 0.07502857971191407, 0.07472332763671875, 0.07490150451660156, 0.07501824188232421, 0.07496208190917969, 0.07526076507568359, 0.07528038024902343, 0.07546880340576172, 0.07486259460449218, 0.07469261169433594, 0.07453695678710938, 0.07453062438964844, 0.07479478454589844, 0.07497129821777344, 0.07495049285888672, 0.07479542541503906, 0.07481497955322265, 0.07566796875, 0.07482323455810547, 0.07548713684082031, 0.07485459136962891, 0.0745618896484375, 0.07446937561035157, 0.07479808044433593, 0.07455171203613281, 0.07471376037597656, 0.07496227264404297, 0.07482838439941407, 0.07459423828125, 0.07767391967773438, 0.07724301147460938, 0.0759758071899414, 0.07598579406738282, 0.07638758087158203, 0.07589469146728516, 0.07517203521728516, 0.07698908996582031, 0.07854195404052734, 0.07868096160888671, 0.08164351654052734, 0.07677760314941406, 0.07536013031005859, 0.07560598754882812, 0.07814514923095703, 0.07702569580078125, 0.07993753814697266, 0.07920591735839844, 0.07865392303466796, 0.07837696075439453, 0.07538387298583984, 0.07577184295654296, 0.07537999725341797, 0.07572246551513671, 0.07550918579101562, 0.07610425567626954, 0.07615398406982422, 0.07603699493408203, 0.07633715057373047, 0.0771256332397461, 0.07755980682373047, 0.07613791656494141, 0.07523609924316406, 0.07499961853027344, 0.0753807373046875, 0.07514236450195312, 0.07685609436035157, 0.07543309020996093, 0.0755654067993164, 0.0756045150756836, 0.07576576232910157, 0.0754298858642578, 0.0753807373046875, 0.0754496307373047, 0.07543791961669923, 0.07499635314941407, 0.07807206726074219, 0.07858790588378907, 0.07920146942138671, 0.0786542739868164, 0.0789683837890625, 0.07896723175048828, 0.08430592346191407]",tokens/s,13.23967576488863,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,6526.726144,3728.605184,0.0,3326.083072,3249.416192,s,1,17.27371484375,17.27371484375,0.0,17.27371484375,17.27371484375,17.27371484375,17.27371484375,[17.27371484375],,kWh,0.0003008218642041659,3.317571461048801e-05,0.00010190813708202406,0.000435905715896678,,MB,1918.164992,4013.817856,0.0,3590.324224,3521.678336,s,10,0.7698339614868165,0.07698339614868165,0.0017603784314308044,0.07620767974853515,0.07940043106079102,0.08035103340148926,0.08111151527404785,"[0.07737232208251953, 0.07637677001953125, 0.07589523315429687, 0.0813016357421875, 0.0763268814086914, 0.07561775970458984, 0.0760884780883789, 0.07918918609619141, 0.0759530258178711, 0.07571266937255859]",tokens/s,3325.3923937776813,kWh,2.233581161863995e-06,2.463251345712718e-07,1.3636100815571404e-06,3.843516377992407e-06,tokens/kWh,66605674.28978073,MB,1920.47104,4124.966912,0.0,3701.47328,3608.866816,s,10,47.63911767578126,4.763911767578125,0.024359321258515,4.773079345703125,4.78779375,4.789522119140625,4.790904814453125,"[4.78187255859375, 4.773564453125, 4.78740966796875, 4.76021875, 4.79125048828125, 4.7253837890625, 4.783630859375, 4.77259423828125, 4.7407509765625, 4.72244189453125]",tokens/s,13.224426285297872,kWh,0.0001386249819602221,1.529072786055382e-05,5.918016057564382e-05,0.0002130958703964197,tokens/kWh,295641.58086593536,,s,630,47.63625203704837,0.0756130984715053,0.0013821111329868061,0.07522145462036134,0.07686268844604492,0.07922473907470703,0.08036890594482422,"[0.07547904205322266, 0.08262655639648438, 0.0789401626586914, 0.07534339141845703, 0.07528272247314453, 0.07499977874755859, 0.0754195556640625, 0.07541382598876953, 0.075228515625, 0.07520051574707032, 0.07528105926513672, 0.07515273284912109, 0.07614656066894532, 0.07509193420410157, 0.0785514907836914, 0.07691423797607422, 0.07477945709228516, 0.07520870208740234, 0.0746903076171875, 0.07491404724121094, 0.07496908569335937, 0.07510425567626954, 0.07525273895263672, 0.07499263763427734, 0.07482518768310546, 0.07605712127685547, 0.07520771026611328, 0.07514339447021484, 0.07940172576904297, 0.07505101013183593, 0.07513683319091796, 0.0759393310546875, 0.07815853118896485, 0.07890322875976563, 0.0755416030883789, 0.07538310241699218, 0.07542425537109375, 0.07539113616943359, 0.07586406707763672, 0.07563468933105469, 0.07525785827636719, 0.07523433685302734, 0.0799241943359375, 0.0751895980834961, 0.0751929931640625, 0.07522438049316406, 0.07530111694335938, 0.07582726287841797, 0.07615526580810547, 0.07554867553710938, 0.07536640167236328, 0.07594802856445312, 0.07596428680419921, 0.07533769226074219, 0.07690589141845704, 0.07955433654785156, 0.0752218246459961, 0.07508541107177734, 0.07516156768798828, 0.07512899017333985, 0.07523987579345703, 0.07481295776367188, 0.07496514892578125, 0.07468057250976562, 0.07590684509277344, 0.07502812957763672, 0.07509641265869141, 0.07512882995605469, 0.07921206665039063, 0.07547714996337891, 0.07572716522216796, 0.07490531158447265, 0.07536041259765625, 0.07451033782958984, 0.07532147216796875, 0.07504447937011718, 0.07490188598632813, 0.07617702484130859, 0.0757681884765625, 0.07558553314208985, 0.07514316558837891, 0.07880806732177735, 0.07537152099609375, 0.07463526153564454, 0.07457283020019531, 0.07487187194824219, 0.07451417541503906, 0.0747541732788086, 0.0776171875, 0.07582835388183594, 0.07545536041259765, 0.07673677062988281, 0.07578598022460938, 0.07680409240722656, 0.07566687774658203, 0.0798705291748047, 0.07546189117431641, 0.0752279052734375, 0.07533567810058593, 0.07538082885742188, 0.07561571502685546, 0.07575548553466797, 0.07570793914794922, 0.07561644744873047, 0.07642390441894531, 0.0754749755859375, 0.07654195404052734, 0.07503257751464844, 0.07532134246826172, 0.07979571533203125, 0.0756126708984375, 0.07556905364990234, 0.07515555572509766, 0.07508934020996094, 0.0754319076538086, 0.07520025634765624, 0.07558025360107422, 0.07573299407958985, 0.0753602523803711, 0.07571660614013671, 0.07541311645507813, 0.0748133773803711, 0.07982272338867187, 0.07544649505615235, 0.07515376281738281, 0.07522866821289062, 0.07486483001708985, 0.07471171569824218, 0.07558316802978515, 0.07493869018554687, 0.07569612884521484, 0.07760089874267578, 0.08631283569335937, 0.07577190399169922, 0.07533567810058593, 0.07989247894287109, 0.075621826171875, 0.07521542358398438, 0.07526195526123047, 0.07486386871337891, 0.07500035095214844, 0.07576525115966797, 0.07797219085693359, 0.07597875213623047, 0.07516345977783204, 0.0752717742919922, 0.07496463775634765, 0.07466060638427735, 0.07964895629882812, 0.07506435394287109, 0.07503971099853515, 0.07519641876220703, 0.07569407653808594, 0.07525132751464844, 0.07547122955322266, 0.0751103973388672, 0.07541715240478515, 0.07521663665771484, 0.07515353393554687, 0.07488285064697266, 0.07478886413574219, 0.07677324676513672, 0.08049529266357422, 0.07869615936279296, 0.07567411041259765, 0.07514726257324218, 0.07549132537841798, 0.07516569519042969, 0.0754544677734375, 0.07570022583007813, 0.07573023986816406, 0.07499846649169922, 0.07528963470458984, 0.07488124847412109, 0.07509683227539063, 0.07756121826171875, 0.07810275268554688, 0.07498588562011718, 0.07501414489746094, 0.07495206451416016, 0.07507212829589843, 0.07506317138671875, 0.07512457275390624, 0.07574534606933593, 0.07616738891601563, 0.07950272369384766, 0.0759112319946289, 0.07543449401855469, 0.07549906921386719, 0.07572908782958984, 0.07578387451171875, 0.07572102355957032, 0.07619318389892578, 0.07564348602294922, 0.07573709106445313, 0.07621196746826171, 0.07552230072021485, 0.07539683532714844, 0.07538297271728515, 0.07540131378173828, 0.07582105255126953, 0.07533773040771484, 0.079259521484375, 0.07483609771728515, 0.07499980926513672, 0.07482169342041016, 0.07506732940673828, 0.07527407836914063, 0.07510380554199218, 0.07611433410644532, 0.07515679931640624, 0.07468940734863282, 0.07513497924804688, 0.07464371490478515, 0.07481318664550782, 0.08036966705322265, 0.07560566711425781, 0.07521724700927734, 0.07638339233398438, 0.07516844940185546, 0.07502044677734375, 0.07527117156982421, 0.07520323181152344, 0.07530483245849609, 0.07481391906738281, 0.07462252807617188, 0.0753966064453125, 0.07609014129638672, 0.07513308715820312, 0.07980150604248047, 0.0751371841430664, 0.07497545623779298, 0.0747954864501953, 0.07449545288085938, 0.07488925170898438, 0.0748037109375, 0.0745815658569336, 0.07467692565917969, 0.07430937957763672, 0.07448371124267578, 0.07504592132568359, 0.0748881607055664, 0.07505241394042969, 0.079829345703125, 0.07531343841552735, 0.07510160064697266, 0.07595021057128906, 0.07550204467773437, 0.0755416030883789, 0.07574947357177735, 0.07606969451904297, 0.07549542236328124, 0.07542745971679687, 0.07556681823730468, 0.07489762878417969, 0.07950527954101562, 0.075447998046875, 0.07518621063232422, 0.07545532989501953, 0.07574732971191406, 0.0752701416015625, 0.08409852600097656, 0.07938931274414063, 0.07650704193115235, 0.0760499496459961, 0.07557782745361329, 0.07581865692138672, 0.07558793640136718, 0.07600924682617187, 0.08055420684814453, 0.07553433227539062, 0.07580662536621094, 0.07620803070068359, 0.07676537322998046, 0.07646109008789062, 0.07672515106201172, 0.07526201629638672, 0.07506329345703125, 0.07486188507080078, 0.07469945526123047, 0.07471308898925781, 0.07502387237548828, 0.07909836578369141, 0.075136962890625, 0.07520796966552734, 0.07520492553710938, 0.076005859375, 0.07636537933349609, 0.07624543762207031, 0.07606018829345704, 0.07584815979003906, 0.07959449768066407, 0.07572991943359375, 0.07900956726074218, 0.0752063980102539, 0.07540169525146484, 0.07551789093017579, 0.07533984375, 0.07506476593017578, 0.07538089752197266, 0.07594367980957031, 0.07507830047607422, 0.0747776336669922, 0.07487721252441407, 0.07481005096435547, 0.07490131378173828, 0.07996844482421875, 0.07483753967285156, 0.07475452423095703, 0.07454720306396484, 0.0749114227294922, 0.07501651000976563, 0.0755814437866211, 0.07538893127441407, 0.07494656372070313, 0.07457443237304688, 0.07437926483154297, 0.07449005126953125, 0.07430502319335938, 0.0744653091430664, 0.07418704223632812, 0.07446678161621094, 0.07455184173583984, 0.07506924438476563, 0.07515318298339843, 0.07494022369384766, 0.07489574432373047, 0.07521862030029297, 0.07489794921875, 0.07488044738769531, 0.07457750701904296, 0.0760186538696289, 0.07596441650390626, 0.07611801910400391, 0.07599836730957031, 0.07594889831542968, 0.07614669036865235, 0.0770823974609375, 0.07587862396240234, 0.07565926361083984, 0.07536809539794923, 0.07568828582763672, 0.07543379211425781, 0.07519046020507812, 0.0748953628540039, 0.07499478149414063, 0.07463983917236328, 0.07467052459716797, 0.07606681823730468, 0.07478886413574219, 0.07481753540039063, 0.07517987060546875, 0.07478643035888671, 0.0750059814453125, 0.07464351654052734, 0.07488719940185547, 0.07463568115234374, 0.07457949066162109, 0.07469840240478516, 0.07462569427490234, 0.0743835220336914, 0.07447756958007813, 0.07453286743164063, 0.07483580780029297, 0.07508390045166016, 0.07500393676757812, 0.07509184265136719, 0.07474803161621094, 0.07466598510742188, 0.07475523376464843, 0.07457778930664062, 0.07528137969970704, 0.0745164794921875, 0.074608642578125, 0.07489945220947265, 0.07475814056396485, 0.07481958770751954, 0.07461251068115235, 0.07454105377197266, 0.07762739562988281, 0.075453857421875, 0.07481782531738282, 0.07477648162841796, 0.07453533172607422, 0.07456768035888672, 0.07434854125976563, 0.0743198699951172, 0.07481507110595703, 0.0744362564086914, 0.07489590454101562, 0.07477996826171875, 0.07490243530273437, 0.07731394958496093, 0.07953564453125, 0.07497686767578125, 0.07462713623046875, 0.07475907135009766, 0.07587583923339844, 0.07518227386474609, 0.07547731018066406, 0.0758221435546875, 0.07532819366455078, 0.0777771224975586, 0.079235107421875, 0.0800030746459961, 0.07854080200195312, 0.07576943969726563, 0.07490354919433594, 0.07443468475341797, 0.07430172729492188, 0.07685858917236328, 0.07767711639404297, 0.07751398468017578, 0.07897801971435547, 0.07663820648193359, 0.07543807983398437, 0.07576473236083985, 0.07546367645263671, 0.07602559661865234, 0.07521686553955079, 0.07560221099853516, 0.07651497650146484, 0.08036704254150391, 0.07653878021240235, 0.07969084930419922, 0.07589775848388672, 0.07572684478759766, 0.07514316558837891, 0.07529472351074219, 0.07546880340576172, 0.07553369903564452, 0.07586224365234374, 0.07555856323242187, 0.07563878631591797, 0.07514348602294922, 0.0751884765625, 0.0753420181274414, 0.07487404632568359, 0.07476831817626953, 0.07472937774658203, 0.07614358520507812, 0.07899990081787109, 0.07992658996582032, 0.07972281646728516, 0.07981238555908203, 0.07928278350830079, 0.07904045104980469, 0.07726905822753906, 0.07510630035400391, 0.07489027404785156, 0.07485100555419921, 0.074889404296875, 0.07527433776855469, 0.07531011199951172, 0.07530390167236328, 0.07571574401855469, 0.07622911834716797, 0.07629142761230469, 0.07566553497314453, 0.07540825653076172, 0.07531068420410156, 0.07508342742919921, 0.07503948974609374, 0.0751247329711914, 0.07550361633300781, 0.07532749176025391, 0.0756654052734375, 0.07533567810058593, 0.07528447723388672, 0.07549702453613281, 0.07551020812988281, 0.07561808013916016, 0.07612643432617187, 0.07505535888671876, 0.07482057952880859, 0.07507843017578125, 0.07530496215820312, 0.07541379547119141, 0.07689958190917968, 0.0754815673828125, 0.07553385925292969, 0.07529315185546875, 0.07509606170654297, 0.076115966796875, 0.0750712661743164, 0.07560829162597656, 0.07525926208496093, 0.07465229034423829, 0.07482755279541016, 0.074716796875, 0.0748260498046875, 0.07552642822265625, 0.07542361450195313, 0.07554605102539062, 0.07537667083740235, 0.0750838394165039, 0.07543869018554687, 0.07492588806152344, 0.07483001708984376, 0.07540531158447265, 0.07497727966308594, 0.07524345397949218, 0.07501010894775391, 0.075070556640625, 0.07532749176025391, 0.07584553527832032, 0.07528233337402344, 0.07492192077636718, 0.07522675323486328, 0.0749940185546875, 0.07469699096679687, 0.07461590576171875, 0.07469747161865234, 0.07462697601318359, 0.07541580963134766, 0.07472537231445313, 0.07500784301757812, 0.07473715209960938, 0.07495855712890626, 0.07489344024658204, 0.07514809417724609, 0.07589430236816407, 0.08196511840820313, 0.07643555450439453, 0.0755549087524414, 0.07502051544189453, 0.0754176025390625, 0.0747315216064453, 0.07471308898925781, 0.0754298858642578, 0.07487283325195312, 0.07463040161132813, 0.07639424133300782, 0.075328125, 0.0751783676147461, 0.07479910278320312, 0.0753183364868164, 0.07455808258056641, 0.07495458984375, 0.07493270111083984, 0.07459225463867188, 0.07449394989013672, 0.07451238250732421, 0.07495216369628906, 0.07506915283203125, 0.07501907348632812, 0.07488511657714844, 0.07485440063476563, 0.07520870208740234, 0.07452671813964844, 0.07446265411376952, 0.07457357025146484, 0.07810336303710938, 0.07506944274902344, 0.07580467224121094, 0.07841353607177734, 0.0753380126953125, 0.0745223388671875, 0.07537484741210937, 0.07489324951171875, 0.07522108459472657, 0.07480524444580078, 0.07515062713623047, 0.07490009307861328, 0.07463033294677734, 0.07470742034912109, 0.07513747406005859, 0.07430143737792969, 0.07487283325195312, 0.07459225463867188, 0.07472736358642579, 0.075015869140625, 0.07500019073486328, 0.07493427276611328, 0.07504281616210938, 0.0745185317993164, 0.07740803527832031, 0.07449622344970704, 0.07506646728515624, 0.07470377349853516, 0.07461068725585937, 0.07413670349121093, 0.07452134704589844, 0.07437120056152344, 0.07463526153564454, 0.0746618881225586, 0.07470428466796875, 0.07455760192871094, 0.07477820587158203, 0.07422844696044922, 0.0743978271484375, 0.07482982635498046, 0.07448528289794921, 0.07485692596435548, 0.0747540512084961, 0.0745126724243164, 0.07451004791259766, 0.07486255645751953, 0.07490943908691407, 0.07530115509033203, 0.07507743835449218, 0.07522259521484374, 0.07462313842773438, 0.07494294738769532, 0.07459430694580078, 0.07493222045898437, 0.07449750518798828, 0.07522972869873047, 0.07509190368652344, 0.07591532897949219, 0.07523462677001953, 0.07494028472900391, 0.07512735748291016, 0.07493888092041015, 0.07472512054443359, 0.07562035369873046, 0.0756162567138672, 0.0747540512084961, 0.07522303771972656, 0.07505510711669922, 0.07506944274902344, 0.0763904037475586, 0.07535404968261719, 0.07534803009033203, 0.07491584014892579, 0.07510655975341797, 0.0759002914428711, 0.07556339263916016, 0.07503667449951172, 0.07483369445800782]",tokens/s,13.22522182286775,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1268.813824,8455.585792,0.0,8053.06368,7930.605568,s,1,19.711283203125,19.711283203125,0.0,19.711283203125,19.711283203125,19.711283203125,19.711283203125,[19.711283203125],,kWh,0.0003685260360249231,4.064403617440899e-05,0.00012543426701400184,0.0005346043392133339,,MB,1382.48192,10221.387776,0.0,9806.282752,9135.716352,s,10,17.576072265624997,1.7576072265624998,0.007775130503945248,1.7587059326171874,1.7645816162109376,1.7664391845703125,1.7679252392578126,"[1.7380986328125, 1.75363427734375, 1.75492333984375, 1.7559471435546874, 1.759649658203125, 1.7609027099609376, 1.75776220703125, 1.7641688232421875, 1.762688720703125, 1.7682967529296876]",tokens/s,145.65256453837,kWh,5.1133405510830325e-05,5.639680572709984e-06,3.400886054039931e-05,9.078194662393962e-05,tokens/kWh,2819943.9373168456,MB,1407.664128,10223.484928,0.0,9806.282752,9135.718912,s,10,89.0283310546875,8.90283310546875,0.01600448390269224,8.9101630859375,8.9181701171875,8.919608007812501,8.9207583203125,"[8.869404296875, 8.8862099609375, 8.8892255859375, 8.8956015625, 8.9101083984375, 8.9102177734375, 8.9150224609375, 8.91364453125, 8.9178505859375, 8.9210458984375]",tokens/s,7.076399080344542,kWh,0.0002608859089433387,2.8777664902550355e-05,0.00017298561061060148,0.0004626491844564906,tokens/kWh,136172.29234719375,,s,630,89.02477482604985,0.1413091663905552,0.0017022937122733572,0.14119747161865234,0.1422438491821289,0.14252964401245116,0.15172585266113284,"[0.1537945556640625, 0.14138163757324218, 0.13981004333496094, 0.1400382080078125, 0.13996485900878905, 0.1396650848388672, 0.13986221313476563, 0.1399126739501953, 0.13992172241210937, 0.13979833984375, 0.14000979614257814, 0.13999055480957032, 0.13993458557128907, 0.13996652221679687, 0.14004428100585936, 0.14007286071777345, 0.14010726928710937, 0.14014115905761718, 0.1399869384765625, 0.14011170959472657, 0.14010179138183593, 0.1401282501220703, 0.14191206359863281, 0.14043954467773437, 0.14030876159667968, 0.14037577819824218, 0.14084268188476562, 0.1402227783203125, 0.14024908447265624, 0.1403453369140625, 0.1403365478515625, 0.14052351379394531, 0.14102589416503905, 0.14041203308105468, 0.14037286376953126, 0.14226188659667968, 0.14085157775878906, 0.14076101684570314, 0.140489990234375, 0.14046493530273438, 0.14046759033203124, 0.14052418518066406, 0.1417869110107422, 0.14148016357421875, 0.14050828552246095, 0.14057562255859374, 0.14051295471191405, 0.14081260681152344, 0.14069879150390624, 0.14149224853515624, 0.14230201721191407, 0.1408675842285156, 0.14060739135742187, 0.1406538848876953, 0.14050160217285157, 0.1405503387451172, 0.1407011260986328, 0.1425751953125, 0.1411426239013672, 0.14083929443359375, 0.1409122619628906, 0.14085568237304688, 0.14074674987792968, 0.15084815979003907, 0.1413089904785156, 0.139822021484375, 0.13987020874023437, 0.14011801147460937, 0.14012771606445312, 0.140157470703125, 0.13987020874023437, 0.14167245483398438, 0.14034124755859376, 0.13990287780761718, 0.14003126525878906, 0.13995030212402343, 0.14169290161132814, 0.14027226257324218, 0.13980262756347656, 0.14201834106445313, 0.14054371643066407, 0.14006871032714843, 0.14005500793457032, 0.14018524169921875, 0.14186550903320314, 0.14075698852539062, 0.140483642578125, 0.1419346923828125, 0.1405262451171875, 0.14045613098144533, 0.1413382110595703, 0.14039082336425782, 0.14042930603027343, 0.14177232360839845, 0.14170774841308595, 0.14027891540527343, 0.140413818359375, 0.140534912109375, 0.1404179229736328, 0.14140211486816406, 0.14223770141601563, 0.1416171569824219, 0.14064790344238282, 0.14056268310546874, 0.14052691650390625, 0.14061184692382814, 0.14082736206054688, 0.142304931640625, 0.14217430114746094, 0.14073049926757814, 0.14094883728027344, 0.14069839477539062, 0.14057061767578125, 0.14063612365722655, 0.1423585662841797, 0.1421414337158203, 0.1419489288330078, 0.14063113403320313, 0.14050746154785157, 0.1410177001953125, 0.14062559509277345, 0.14252473449707032, 0.1417761535644531, 0.14224867248535156, 0.14077133178710938, 0.14080368041992186, 0.1533624267578125, 0.14151799011230468, 0.13981929016113281, 0.1398810272216797, 0.1398206329345703, 0.14002423095703126, 0.1399009246826172, 0.1400647735595703, 0.14157414245605468, 0.1405049591064453, 0.14021644592285157, 0.13987429809570312, 0.1402019805908203, 0.1417502746582031, 0.14046598815917968, 0.1411662139892578, 0.1415332794189453, 0.14101142883300782, 0.1401621398925781, 0.14019676208496093, 0.14012416076660156, 0.1400663604736328, 0.14200057983398437, 0.14133807373046875, 0.1411663055419922, 0.1416446075439453, 0.14022428894042968, 0.14035784912109375, 0.14019920349121093, 0.14028668212890624, 0.14030233764648437, 0.14170317077636718, 0.141412353515625, 0.1404923858642578, 0.14055369567871093, 0.14034629821777345, 0.14051715087890626, 0.14038983154296875, 0.1422586212158203, 0.14244898986816407, 0.14180557250976564, 0.14047027587890626, 0.14051930236816407, 0.1404680633544922, 0.1405436096191406, 0.1417755126953125, 0.14251536560058595, 0.14121189880371093, 0.14053436279296874, 0.1405860137939453, 0.14103240966796876, 0.14066278076171876, 0.142350341796875, 0.14193994140625, 0.14175424194335937, 0.1405879364013672, 0.14107647705078125, 0.14222483825683593, 0.1408865966796875, 0.14057472229003906, 0.14216316223144532, 0.14141110229492188, 0.14085935974121094, 0.15179571533203126, 0.141264892578125, 0.1398678741455078, 0.13991165161132812, 0.139885986328125, 0.13984194946289064, 0.14050918579101562, 0.14203085327148438, 0.14132633972167968, 0.13994598388671875, 0.13995826721191407, 0.14066278076171876, 0.13997817993164063, 0.1412449951171875, 0.14108189392089843, 0.14172029113769533, 0.14038426208496094, 0.14008688354492188, 0.1406048583984375, 0.1400780487060547, 0.1407057647705078, 0.14149224853515624, 0.14189773559570312, 0.14099046325683592, 0.14006793212890625, 0.14023362731933595, 0.1401581726074219, 0.14169168090820314, 0.1412894744873047, 0.14175234985351562, 0.14137283325195313, 0.14115078735351563, 0.1402265625, 0.14023709106445312, 0.14176953125, 0.14194969177246095, 0.14128504943847656, 0.14174461364746094, 0.14025254821777344, 0.14035008239746094, 0.14029823303222655, 0.14178022766113282, 0.14151919555664064, 0.14156565856933595, 0.14174418640136718, 0.140400390625, 0.140861572265625, 0.14047308349609375, 0.14217593383789062, 0.14219296264648437, 0.14201808166503907, 0.1417845458984375, 0.14054296875, 0.1407283172607422, 0.14098419189453126, 0.14228215026855467, 0.14205410766601562, 0.14150579833984375, 0.14213938903808593, 0.14135577392578125, 0.1406356201171875, 0.14097567749023437, 0.14242236328125, 0.1515548095703125, 0.14110540771484376, 0.13962034606933593, 0.13981491088867187, 0.1397821502685547, 0.13989068603515625, 0.14098226928710939, 0.14185472106933594, 0.1420059814453125, 0.14071017456054688, 0.13983920288085938, 0.139931396484375, 0.13991075134277345, 0.14270970153808593, 0.14083197021484375, 0.14164979553222656, 0.14150466918945312, 0.13990576171875, 0.14018357849121094, 0.14018765258789062, 0.1416273956298828, 0.14131199645996093, 0.14179942321777345, 0.14111708068847656, 0.1405684814453125, 0.14015737915039062, 0.141264892578125, 0.141486083984375, 0.14211878967285158, 0.14310617065429687, 0.14154946899414061, 0.14036550903320313, 0.14025149536132814, 0.14046829223632812, 0.1411727294921875, 0.14186495971679688, 0.14215184020996094, 0.1417986602783203, 0.1421393280029297, 0.14082879638671875, 0.140341796875, 0.14186805725097656, 0.14162428283691406, 0.14234010314941406, 0.1414819793701172, 0.14174412536621095, 0.14164378356933593, 0.14047027587890626, 0.14128131103515626, 0.14240354919433593, 0.14153932189941407, 0.14224362182617187, 0.14211503601074219, 0.14167449951171876, 0.14066482543945313, 0.14148403930664064, 0.14221107482910156, 0.141412353515625, 0.1425059814453125, 0.14283570861816405, 0.14136729431152345, 0.1408100128173828, 0.14253692626953124, 0.15089663696289063, 0.14135090637207032, 0.13979238891601561, 0.1416392364501953, 0.14091049194335936, 0.13977229309082032, 0.14029603576660157, 0.14267628479003908, 0.14242982482910158, 0.14025357055664062, 0.14141567993164061, 0.14044975280761718, 0.14077122497558595, 0.14091558837890625, 0.14176870727539062, 0.14178713989257813, 0.14122598266601563, 0.14105599975585936, 0.13995213317871094, 0.1400463409423828, 0.14189891052246092, 0.1413620147705078, 0.14164787292480469, 0.141559814453125, 0.14160076904296875, 0.14028746032714845, 0.14016259765625, 0.14113424682617187, 0.14172572326660157, 0.14129823303222655, 0.1414039306640625, 0.14196131896972655, 0.14054722595214844, 0.14022755432128906, 0.1412710418701172, 0.14191798400878905, 0.1411966094970703, 0.14212188720703126, 0.14190182495117187, 0.1420615692138672, 0.14064845275878907, 0.1403330535888672, 0.14096588134765625, 0.14198130798339845, 0.141836669921875, 0.14168643188476562, 0.14218479919433594, 0.14060057067871093, 0.14134962463378906, 0.14152224731445312, 0.1406693115234375, 0.14223724365234375, 0.1421381072998047, 0.1412833251953125, 0.14068736267089843, 0.14165309143066407, 0.14168893432617188, 0.14097491455078126, 0.1423948516845703, 0.1420784912109375, 0.14119731140136718, 0.14072163391113282, 0.14236444091796874, 0.15497663879394533, 0.14127635192871094, 0.1398526153564453, 0.13989683532714844, 0.1403468780517578, 0.13986451721191406, 0.14081561279296875, 0.14191084289550782, 0.14183529663085936, 0.14123469543457032, 0.13980105590820313, 0.14002175903320313, 0.13989068603515625, 0.14117683410644533, 0.141012451171875, 0.14166889953613282, 0.1419586181640625, 0.1407944030761719, 0.13997225952148437, 0.14002006530761718, 0.14191766357421876, 0.1414783935546875, 0.14139190673828125, 0.14159855651855469, 0.14084457397460937, 0.1401649627685547, 0.14025788879394532, 0.14177912902832032, 0.14173799133300782, 0.14107398986816405, 0.14317202758789063, 0.14105722045898436, 0.14015359497070312, 0.14109616088867188, 0.1416405487060547, 0.141604736328125, 0.14115408325195314, 0.1423910675048828, 0.14146617126464844, 0.14088983154296875, 0.14154371643066407, 0.1415925750732422, 0.14187654113769532, 0.14159327697753907, 0.1422458953857422, 0.1413446044921875, 0.14227040100097657, 0.1405831298828125, 0.1416312713623047, 0.14173606872558595, 0.1419505615234375, 0.141433349609375, 0.14158189392089843, 0.1425924530029297, 0.14133042907714843, 0.141127685546875, 0.14195097351074218, 0.14223155212402344, 0.14147325134277344, 0.14182044982910155, 0.14229913330078126, 0.1413540496826172, 0.14194508361816408, 0.15427165222167968, 0.14118905639648438, 0.1397205047607422, 0.13965957641601562, 0.1397903289794922, 0.139683837890625, 0.14122528076171875, 0.14234048461914062, 0.14095187377929688, 0.14097398376464843, 0.14102700805664062, 0.13995050048828125, 0.13991116333007814, 0.14161920166015626, 0.14114405822753906, 0.14132208251953124, 0.14201568603515624, 0.14147648620605469, 0.14007066345214844, 0.13997731018066406, 0.1415142364501953, 0.14076364135742186, 0.14140354919433593, 0.14169322204589843, 0.14217996215820314, 0.140646240234375, 0.14015370178222655, 0.14326579284667967, 0.14087954711914064, 0.1411976318359375, 0.14148141479492188, 0.1417897644042969, 0.1404058532714844, 0.14074563598632814, 0.1418997802734375, 0.1411287078857422, 0.14182447814941407, 0.1412835235595703, 0.1416441345214844, 0.14137721252441407, 0.14018966674804687, 0.14164822387695314, 0.14198118591308595, 0.14197775268554688, 0.14076144409179686, 0.14207180786132811, 0.14067097473144533, 0.14057061767578125, 0.14208819580078125, 0.14216143798828124, 0.1426106262207031, 0.141967041015625, 0.1416414031982422, 0.14145628356933593, 0.14176870727539062, 0.14131983947753907, 0.1414652862548828, 0.1420089874267578, 0.14213938903808593, 0.1412833251953125, 0.1419735107421875, 0.14162442016601562, 0.1423404541015625, 0.15450630187988282, 0.14160540771484376, 0.13975120544433595, 0.13964352416992187, 0.13956483459472657, 0.13976588439941406, 0.14100003051757812, 0.14271260070800781, 0.14178608703613282, 0.14046412658691407, 0.14043948364257813, 0.13977340698242188, 0.13980117797851563, 0.14100265502929688, 0.14247520446777343, 0.14135308837890626, 0.14144720458984375, 0.14063821411132812, 0.13977107238769532, 0.13989552307128905, 0.1404580841064453, 0.14187648010253906, 0.14245759582519532, 0.14114816284179688, 0.14152217102050782, 0.14015359497070312, 0.14006230163574218, 0.14127520751953124, 0.14137936401367188, 0.14217251586914062, 0.14189590454101564, 0.1423585205078125, 0.1406054382324219, 0.14004339599609375, 0.1412268829345703, 0.14255923461914063, 0.1417523193359375, 0.14222889709472655, 0.14269676208496093, 0.1416461181640625, 0.14017939758300782, 0.14036384582519532, 0.1421593017578125, 0.14193516540527343, 0.14180557250976564, 0.1429012451171875, 0.14194857788085938, 0.14040713500976562, 0.1406033935546875, 0.14073651123046876, 0.14215577697753906, 0.14253366088867186, 0.14171029663085938, 0.14199107360839844, 0.1421545867919922, 0.1410600891113281, 0.1404311981201172, 0.14202195739746093, 0.14314125061035157, 0.1426068115234375, 0.1426534423828125, 0.14308966064453124, 0.1419792022705078, 0.1533849639892578, 0.1414676513671875, 0.1397505645751953, 0.13972889709472655, 0.13968060302734375, 0.14023216247558593, 0.14153890991210938, 0.1426564483642578, 0.14201036071777343, 0.1401343994140625, 0.13977098083496095, 0.14008412170410156, 0.1414383087158203, 0.14061021423339845, 0.14186448669433593, 0.1424204406738281, 0.1405617218017578, 0.141623291015625, 0.14024327087402344, 0.13997914123535157, 0.14163075256347657, 0.14156591796875, 0.14188963317871095, 0.1411447296142578, 0.14166134643554687, 0.14042604064941405, 0.14081027221679687, 0.14173593139648438, 0.1417523193359375, 0.14158198547363282, 0.14114646911621093, 0.1419815673828125, 0.14150582885742188, 0.14064297485351562, 0.14123846435546875, 0.14153520202636719, 0.14182736206054689, 0.14134757995605468, 0.14205075073242188, 0.14099513244628906, 0.14180880737304688, 0.14123049926757814, 0.14139027404785157, 0.14173799133300782, 0.14221241760253905, 0.14207862854003905, 0.14140010070800782, 0.14168803405761718, 0.1416363830566406, 0.14216581726074218, 0.14110943603515624, 0.14205751037597655, 0.14164784240722655, 0.1425489959716797, 0.14208204650878906, 0.14138983154296875, 0.14193862915039063, 0.1411932830810547, 0.14226605224609376, 0.14164781188964845, 0.1430839080810547, 0.14248345947265625, 0.1422458953857422]",tokens/s,7.076681757757774,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1070.51008,1620.967424,0.0,1218.445312,1206.173696,s,1,8.9400673828125,8.9400673828125,0.0,8.9400673828125,8.9400673828125,8.9400673828125,8.9400673828125,[8.9400673828125],,kWh,5.8148322166607614e-05,6.406967934025516e-06,1.9520015616003583e-05,8.407530571663671e-05,,MB,1443.422208,1916.665856,0.0,1501.560832,1463.359488,s,10,1.88896044921875,0.188896044921875,0.00042418022476985883,0.18892642974853516,0.1893941619873047,0.18943067779541015,0.18945989044189454,"[0.18800201416015624, 0.18946719360351563, 0.18889791870117187, 0.18903289794921874, 0.18919129943847657, 0.18895359802246095, 0.18879469299316406, 0.1883355255126953, 0.18938604736328124, 0.18889926147460936]",tokens/s,1355.2427744364809,kWh,5.700193120751697e-06,6.286310838211181e-07,3.7892070911917205e-06,1.0118031295764535e-05,tokens/kWh,25301364.713821653,MB,1468.35456,1918.763008,0.0,1501.560832,1463.362048,s,10,22.168346923828125,2.2168346923828124,0.005821755020750205,2.21626513671875,2.2263521728515623,2.2265076293945314,2.226631994628906,"[2.2266630859375, 2.218503662109375, 2.214466796875, 2.208841064453125, 2.208920166015625, 2.2188271484375, 2.21594921875, 2.2165810546875, 2.226317626953125, 2.213277099609375]",tokens/s,28.41889844852757,kWh,6.456090616465925e-05,7.120980691015627e-06,3.1725043542411415e-05,0.0001034069303980863,tokens/kWh,609243.4980660243,,s,630,22.165678146362314,0.03518361610533699,0.00046236814490699103,0.03508961677551269,0.03548408164978028,0.03570396270751953,0.03664260120391846,"[0.03540188980102539, 0.03523311996459961, 0.03524630355834961, 0.035203201293945316, 0.035445056915283206, 0.03579449462890625, 0.03541263961791992, 0.035526462554931644, 0.03562639999389648, 0.035340415954589845, 0.03518230438232422, 0.035219905853271484, 0.03509503936767578, 0.03537919998168945, 0.03567001724243164, 0.03508838272094727, 0.03518054580688477, 0.03508351898193359, 0.03503551864624024, 0.035248512268066405, 0.03506585693359375, 0.03497574234008789, 0.03521308898925781, 0.034985633850097654, 0.03522003173828125, 0.035053569793701174, 0.03508019256591797, 0.03500236892700195, 0.03539558410644531, 0.035206497192382814, 0.03529180908203125, 0.03529318237304688, 0.035366912841796876, 0.03551027297973633, 0.0356096305847168, 0.03535356903076172, 0.035588096618652344, 0.03521535873413086, 0.03556147384643555, 0.035364864349365234, 0.03547750473022461, 0.035544384002685545, 0.035576480865478516, 0.03538742446899414, 0.03524198532104492, 0.03530678558349609, 0.03521200180053711, 0.035156993865966796, 0.03572019195556641, 0.0366728630065918, 0.035305118560791014, 0.035345409393310545, 0.03519612884521484, 0.03526326370239258, 0.03520512008666992, 0.03524198532104492, 0.03538499069213867, 0.035557727813720706, 0.03523152160644531, 0.03551395034790039, 0.03523811340332031, 0.03539190292358398, 0.035501792907714845, 0.03598387145996094, 0.03547721481323242, 0.035329761505126955, 0.03521200180053711, 0.03546028900146484, 0.03509942245483398, 0.03517033767700195, 0.03519692611694336, 0.03502057647705078, 0.03502102279663086, 0.03495731353759766, 0.03498566436767578, 0.03507846450805664, 0.03499724960327148, 0.035089408874511716, 0.035010112762451175, 0.03512569427490234, 0.035332096099853515, 0.035098079681396485, 0.03491052627563476, 0.03515824127197266, 0.03527180862426758, 0.035533695220947265, 0.03523788833618164, 0.03514918518066406, 0.03514588928222656, 0.03523632049560547, 0.035250175476074216, 0.035418113708496096, 0.03548364639282227, 0.035195903778076174, 0.03504844665527344, 0.038419681549072264, 0.035109119415283205, 0.03512374496459961, 0.03530547332763672, 0.03520060729980469, 0.03498400115966797, 0.03498428726196289, 0.034920448303222655, 0.0350467529296875, 0.034781185150146485, 0.03503724670410156, 0.03475462341308594, 0.0348554573059082, 0.03510224151611328, 0.03509491348266602, 0.03503513717651367, 0.03493487930297851, 0.034940254211425784, 0.03498425674438477, 0.03500799942016602, 0.034998783111572264, 0.035103073120117186, 0.03517174530029297, 0.03487596893310547, 0.035133472442626955, 0.035350528717041016, 0.03525632095336914, 0.035630977630615235, 0.03555136108398437, 0.03539148712158203, 0.03549388885498047, 0.035823070526123046, 0.03523193740844727, 0.035168094635009764, 0.03498348617553711, 0.03493920135498047, 0.035281536102294925, 0.03505561447143555, 0.035413089752197265, 0.035004863739013674, 0.0351662712097168, 0.0352481918334961, 0.03522585678100586, 0.03532803344726562, 0.03515193557739258, 0.03509968185424805, 0.03504636764526367, 0.03590758514404297, 0.03514291381835938, 0.03531852722167969, 0.03528428649902344, 0.03504198455810547, 0.034934463500976565, 0.03502931213378906, 0.03495481491088867, 0.03494137573242188, 0.035961982727050784, 0.03500940704345703, 0.034930526733398436, 0.03512745666503906, 0.034928638458251955, 0.03528403091430664, 0.035046337127685546, 0.03503225708007812, 0.034949951171875, 0.035422206878662106, 0.03511414337158203, 0.0351148796081543, 0.03507299041748047, 0.035053569793701174, 0.03514547348022461, 0.0352545280456543, 0.035136577606201175, 0.035031936645507813, 0.03505897521972656, 0.03504585647583008, 0.0350047378540039, 0.035004383087158204, 0.035528736114501955, 0.03524607849121094, 0.035194881439208986, 0.03506953430175781, 0.03517209625244141, 0.03498617553710937, 0.034972129821777345, 0.0349117431640625, 0.03497830581665039, 0.03504742431640625, 0.034985183715820316, 0.0350318717956543, 0.034996192932128904, 0.03507593536376953, 0.03510492706298828, 0.03546495819091797, 0.03560857772827149, 0.035210784912109376, 0.03514243316650391, 0.03495507049560547, 0.03499363327026367, 0.03495174407958984, 0.03499647903442383, 0.034994174957275394, 0.034987552642822266, 0.03523049545288086, 0.0349447021484375, 0.03494009780883789, 0.034855743408203126, 0.03487948989868164, 0.03517440032958984, 0.034854080200195314, 0.034755390167236326, 0.035034175872802734, 0.03507500839233398, 0.03519046401977539, 0.03490233612060547, 0.03502899169921875, 0.03521535873413086, 0.03509862518310547, 0.035444576263427736, 0.03493289566040039, 0.034997726440429686, 0.03511145782470703, 0.035119102478027346, 0.03504127883911133, 0.03508838272094727, 0.035017921447753904, 0.03476972961425781, 0.03506175994873047, 0.03549318313598633, 0.03505769729614258, 0.035203166961669925, 0.03522787094116211, 0.03497814559936523, 0.03506585693359375, 0.03502284622192383, 0.034938880920410156, 0.03506502532958984, 0.0351137924194336, 0.03494035339355469, 0.03481836700439453, 0.03496780776977539, 0.034799457550048825, 0.03499433517456055, 0.03489923095703125, 0.034879711151123045, 0.035467231750488284, 0.035451423645019534, 0.03508428955078125, 0.03527679824829102, 0.034988033294677735, 0.03506585693359375, 0.035166206359863283, 0.03488972854614258, 0.03490553665161133, 0.03500019073486328, 0.03480031967163086, 0.035432449340820314, 0.036134048461914064, 0.035595104217529296, 0.035135486602783206, 0.03506380844116211, 0.03487744140625, 0.03487334442138672, 0.03486105728149414, 0.03495907211303711, 0.03487363052368164, 0.03475408172607422, 0.0348306884765625, 0.03517792129516602, 0.03574854278564453, 0.034838657379150394, 0.03498089599609375, 0.03500732803344726, 0.035210494995117185, 0.03496796798706055, 0.03520060729980469, 0.03489049530029297, 0.034830337524414064, 0.03473408126831055, 0.03503308868408203, 0.035125247955322264, 0.03546931076049804, 0.035004417419433595, 0.034958911895751954, 0.03475500869750976, 0.03486515045166016, 0.03506380844116211, 0.03480569458007812, 0.03490550231933594, 0.0347613754272461, 0.034729854583740234, 0.03493484878540039, 0.034764415740966795, 0.034724159240722655, 0.034738304138183594, 0.03477503967285156, 0.03469107055664063, 0.03498188781738281, 0.03546112060546875, 0.034729984283447264, 0.03489791870117188, 0.03490790557861328, 0.034966785430908205, 0.03488988876342773, 0.03491027069091797, 0.03483491134643555, 0.03491904067993164, 0.034989311218261716, 0.03481439971923828, 0.03511868667602539, 0.03498566436767578, 0.034835166931152343, 0.03484636688232422, 0.034730335235595704, 0.034920223236083986, 0.034877601623535155, 0.03560249710083008, 0.03933388900756836, 0.035386592864990234, 0.03504812622070313, 0.035471359252929685, 0.035426143646240235, 0.03495337677001953, 0.03534796905517578, 0.03505779266357422, 0.035127521514892575, 0.03518889617919922, 0.03490982437133789, 0.03499388885498047, 0.03487161636352539, 0.037806175231933595, 0.03519510269165039, 0.03510831832885742, 0.03548188781738281, 0.03504361724853516, 0.035545024871826175, 0.03503865432739258, 0.034951808929443356, 0.03518259048461914, 0.03507577514648438, 0.03529052734375, 0.03498281478881836, 0.03500646209716797, 0.036141056060791016, 0.03502284622192383, 0.03487936019897461, 0.03502092742919922, 0.034985118865966794, 0.03527347183227539, 0.035168350219726564, 0.034953216552734374, 0.03502284622192383, 0.03526860809326172, 0.03519692611694336, 0.035079456329345705, 0.03527692794799805, 0.03521187210083008, 0.03512319946289062, 0.035060928344726565, 0.035078975677490236, 0.03579824066162109, 0.03511318588256836, 0.03519315338134766, 0.03512140655517578, 0.03502444839477539, 0.034922943115234376, 0.035076095581054685, 0.03508982467651367, 0.03544329452514648, 0.03549388885498047, 0.035310848236083985, 0.03525823974609375, 0.03514182281494141, 0.035070304870605466, 0.03495951843261719, 0.03489811325073242, 0.03505487823486328, 0.035693279266357424, 0.035227649688720705, 0.03502899169921875, 0.035155681610107424, 0.03543888092041016, 0.03523289489746094, 0.03571270370483398, 0.03583350372314453, 0.03592668914794922, 0.03592512130737305, 0.03541843032836914, 0.03525279998779297, 0.03500851058959961, 0.03493478393554687, 0.03502489471435547, 0.03497903823852539, 0.03521206283569336, 0.035227649688720705, 0.03505561447143555, 0.03486848068237305, 0.03496217727661133, 0.034858238220214846, 0.03493555068969727, 0.03542739105224609, 0.03519583892822266, 0.03518009567260742, 0.03512569427490234, 0.03524607849121094, 0.034964607238769534, 0.03524697494506836, 0.03520204925537109, 0.03500281524658203, 0.03530809783935547, 0.035415294647216794, 0.035606975555419924, 0.0351126708984375, 0.03515999984741211, 0.035203742980957034, 0.03498393630981445, 0.034961406707763674, 0.03500851058959961, 0.03497292709350586, 0.03642780685424805, 0.035097118377685546, 0.03494313430786133, 0.03506723022460938, 0.035145790100097656, 0.03487372970581055, 0.03502083206176758, 0.034912479400634765, 0.034969600677490234, 0.03536076736450195, 0.03523289489746094, 0.03500640106201172, 0.03532870483398438, 0.03503478240966797, 0.03528764724731445, 0.035015937805175784, 0.034818241119384766, 0.03496607971191406, 0.03496345520019531, 0.03498339080810547, 0.03526700973510742, 0.035069473266601564, 0.035245662689208986, 0.03506399917602539, 0.035054367065429685, 0.03504127883911133, 0.03506790542602539, 0.03529299163818359, 0.03512921524047852, 0.03489260864257813, 0.034856800079345704, 0.03502096176147461, 0.03509187316894531, 0.035065502166748044, 0.03496646499633789, 0.03508838272094727, 0.03498393630981445, 0.03528268814086914, 0.03541574478149414, 0.035055553436279294, 0.03493132781982422, 0.03499008178710938, 0.035037151336669924, 0.03486313629150391, 0.035889087677001955, 0.03500764846801758, 0.034963390350341794, 0.03498697662353516, 0.03501055908203125, 0.0349653434753418, 0.034987648010253905, 0.03496195220947266, 0.03501260757446289, 0.03490335845947266, 0.03495980834960938, 0.03493487930297851, 0.03484892654418945, 0.03514572906494141, 0.03490307235717773, 0.034914752960205075, 0.03498652648925781, 0.03522969436645508, 0.03501055908203125, 0.034860992431640626, 0.03537516784667969, 0.03503212738037109, 0.03498204803466797, 0.03508047866821289, 0.035240447998046875, 0.03492454528808594, 0.03486105728149414, 0.03487744140625, 0.035422206878662106, 0.042000385284423826, 0.035776512145996094, 0.03525836944580078, 0.03545702362060547, 0.03493478393554687, 0.03514777755737305, 0.034895870208740236, 0.03494438552856445, 0.03507440185546875, 0.03508355331420898, 0.034923519134521484, 0.03487744140625, 0.035024799346923825, 0.03539363098144531, 0.03516339111328125, 0.03509088134765625, 0.03505942535400391, 0.035493247985839846, 0.03526105499267578, 0.035044384002685544, 0.035050464630126954, 0.035160064697265625, 0.03680390548706055, 0.03634236907958984, 0.03734947204589844, 0.03554841613769531, 0.03539763259887695, 0.035402496337890624, 0.035897502899169924, 0.03533343887329102, 0.03515347290039063, 0.035263454437255856, 0.035175872802734376, 0.035377696990966795, 0.035067935943603516, 0.03536265563964844, 0.0351479377746582, 0.03539295959472656, 0.03522208023071289, 0.03563315200805664, 0.035183712005615236, 0.035336864471435546, 0.03516140747070313, 0.035266559600830076, 0.03520403289794922, 0.03545635223388672, 0.03518291091918945, 0.035258495330810546, 0.035128608703613284, 0.035250911712646486, 0.035477664947509764, 0.035270401000976566, 0.03574736022949219, 0.03527145767211914, 0.035250175476074216, 0.035129150390625, 0.03530947113037109, 0.03507356643676758, 0.03503737640380859, 0.03526099014282227, 0.035141632080078124, 0.03518627166748047, 0.03523132705688477, 0.03512358474731445, 0.03512339019775391, 0.03517788696289063, 0.03519369506835938, 0.035274177551269534, 0.03537539291381836, 0.03527503967285156, 0.03521036911010742, 0.03537715148925781, 0.0352592658996582, 0.03523174285888672, 0.03508812713623047, 0.035143936157226566, 0.034947071075439456, 0.035116512298583986, 0.035057376861572266, 0.0351607666015625, 0.03559254455566406, 0.036144447326660153, 0.036307647705078126, 0.03528704071044922, 0.03634175872802734, 0.03500032043457031, 0.03496755218505859, 0.0347770881652832, 0.034902015686035154, 0.034955265045166016, 0.03536870574951172, 0.035487998962402345, 0.034875102996826175, 0.03496521759033203, 0.03522537612915039, 0.034972225189208984, 0.03496572875976563, 0.0349224967956543, 0.03499622344970703, 0.034917919158935544, 0.03484662246704102, 0.035985950469970704, 0.036568511962890626, 0.03511356735229492, 0.03502489471435547, 0.03495868682861328, 0.03490816116333008, 0.03490063858032227, 0.034871295928955076, 0.0349306869506836, 0.03505766296386719, 0.034891326904296874, 0.034849216461181644, 0.03498310470581055, 0.035076927185058594, 0.0349571533203125, 0.03495747375488281, 0.035037185668945314, 0.03486649703979492, 0.03504544067382812, 0.03521804809570313, 0.03503433609008789, 0.03498678588867187, 0.034874881744384766, 0.03487590408325195, 0.03489177703857422, 0.034994174957275394, 0.03490611267089844, 0.034858688354492184, 0.034997726440429686, 0.03495401763916016, 0.03511907196044922, 0.03499155044555664, 0.03494569778442383, 0.03488153457641602, 0.03504537582397461, 0.03521331024169922, 0.035127296447753906, 0.035130943298339844, 0.03517279815673828, 0.03523788833618164, 0.035552833557128904, 0.03522195053100586]",tokens/s,28.422320122129527,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,14066.999296,7843.217408,0.0,7440.695296,7427.899392,s,1,31.231966796875,31.231966796875,0.0,31.231966796875,31.231966796875,31.231966796875,31.231966796875,[31.231966796875],,kWh,0.0007061666502042195,7.788805583180023e-05,0.0002423688050059991,0.0010264235110420189,,MB,1350.385664,8424.128512,0.0,8000.63488,7884.32384,s,10,1.1541895675659182,0.11541895675659182,0.00020022225376749954,0.1154023208618164,0.11568985900878906,0.115712353515625,0.11573034912109376,"[0.1154122543334961, 0.11573484802246094, 0.11529398345947266, 0.11520636749267578, 0.11542854309082032, 0.11566233825683593, 0.11568486022949219, 0.11521775817871094, 0.1151562271118164, 0.11539238739013671]",tokens/s,2218.0065319762066,kWh,3.414993194476404e-06,3.766117789828602e-07,2.2670431573025854e-06,6.05864813076185e-06,tokens/kWh,42253650.39771819,MB,1375.58016,8468.168704,0.0,8042.57792,7975.158272,s,10,51.3239208984375,5.13239208984375,0.022834266588416485,5.135453857421875,5.155297900390624,5.159712426757812,5.163244047851562,"[5.15126611328125, 5.164126953125, 5.15431689453125, 5.13792236328125, 5.1329853515625, 5.1236728515625, 5.15022998046875, 5.098244140625, 5.0925244140625, 5.1186318359375]",tokens/s,12.274978001908263,kWh,0.0001498875521805339,1.6533103268862373e-05,7.819068787549327e-05,0.0002446113433248895,tokens/kWh,257551.42481812157,,s,630,51.3211213760376,0.0814620974222819,0.0008468689536812414,0.08136668777465819,0.08217213211059571,0.08270437393188476,0.08500755332946779,"[0.08232134246826171, 0.08190777587890626, 0.08162918090820312, 0.0818048324584961, 0.08196473693847656, 0.08208013153076171, 0.08137155151367187, 0.08180735778808594, 0.08144195556640625, 0.08125341033935547, 0.08138674926757812, 0.08134508514404297, 0.08162895965576172, 0.08179878234863282, 0.08211078643798828, 0.08148201751708985, 0.08187602996826172, 0.08241587066650391, 0.08219923400878906, 0.08147968292236328, 0.08137932586669921, 0.08113676452636719, 0.08128524780273437, 0.08140467071533203, 0.08143462371826173, 0.08187503814697265, 0.0815730209350586, 0.08226905822753906, 0.08174323272705078, 0.08236630249023437, 0.08592655944824219, 0.08239266967773437, 0.08124988555908202, 0.08122767639160157, 0.08107510375976562, 0.08130316925048828, 0.0814513931274414, 0.08237055969238281, 0.08206307220458985, 0.08409964752197266, 0.0816944351196289, 0.08146659088134765, 0.08238365173339844, 0.08163948822021484, 0.08128681945800781, 0.08118838500976562, 0.08101760101318359, 0.08099635314941406, 0.08100579071044922, 0.08146134185791015, 0.08183171081542968, 0.08154166412353515, 0.08153865814208984, 0.08182848358154297, 0.08215158081054688, 0.08170700836181641, 0.08147532653808594, 0.08279593658447265, 0.08169558715820313, 0.08141382598876953, 0.08117485046386719, 0.08111283111572265, 0.08164505767822265, 0.08133222198486328, 0.08133884429931641, 0.08139734649658203, 0.08220899200439453, 0.08146739196777343, 0.08134278106689453, 0.081388671875, 0.08187955474853516, 0.08149858856201173, 0.08160256195068359, 0.08191299438476562, 0.08198230743408202, 0.08160460662841797, 0.08161280059814453, 0.08155136108398438, 0.08632524871826172, 0.08220467376708984, 0.08181318664550781, 0.08168275451660156, 0.08164675140380859, 0.08161366271972656, 0.08165372467041015, 0.08177053070068359, 0.08186851501464844, 0.08173596954345703, 0.08170832061767579, 0.0815869140625, 0.08224924468994141, 0.08167648315429688, 0.08147750091552734, 0.08134697723388672, 0.08657078552246093, 0.08168406677246094, 0.08186124420166016, 0.08211046600341797, 0.08225791931152343, 0.08245760345458984, 0.0824780502319336, 0.0823821792602539, 0.08211936187744141, 0.08183318328857422, 0.08207820892333985, 0.08215744018554688, 0.08213753509521485, 0.08234809875488282, 0.08209808349609375, 0.08185628509521484, 0.08412387084960937, 0.08168653106689452, 0.08220262145996093, 0.08144281768798828, 0.08194252777099609, 0.08138703918457031, 0.08155343627929687, 0.08167817687988281, 0.0815738525390625, 0.08179698944091797, 0.08132841491699219, 0.0814678726196289, 0.08178793334960938, 0.0819119644165039, 0.0814618911743164, 0.0815640640258789, 0.08131590270996093, 0.08126054382324219, 0.08170527648925781, 0.08141004943847656, 0.0815552978515625, 0.08154332733154297, 0.08175001525878907, 0.08175206756591796, 0.08189132690429687, 0.08222431945800782, 0.08199398040771484, 0.08243990325927734, 0.08174678039550781, 0.08130489349365234, 0.08138822174072266, 0.08184422302246094, 0.08198467254638672, 0.0814447021484375, 0.08142707061767578, 0.08144870758056641, 0.08151513671875, 0.08144863891601563, 0.08204844665527344, 0.08571916961669922, 0.08417298889160156, 0.08196259307861328, 0.08116732788085937, 0.08176204681396484, 0.08174578857421876, 0.0812610855102539, 0.08147507476806641, 0.08151392364501953, 0.08171737670898438, 0.08192899322509765, 0.08163715362548828, 0.0817318115234375, 0.08153907012939453, 0.08140185546875, 0.08152864074707031, 0.081497314453125, 0.08209833526611328, 0.08208879852294922, 0.08165923309326172, 0.0817834243774414, 0.08202227020263672, 0.08153504180908203, 0.08147071838378907, 0.08119789123535157, 0.0812072982788086, 0.08098643493652344, 0.08313113403320313, 0.0835830078125, 0.08197238159179687, 0.08215293121337891, 0.08128950500488281, 0.08151859283447266, 0.08182694244384765, 0.08170489501953125, 0.0816668472290039, 0.08195833587646484, 0.08196169281005859, 0.08151039886474609, 0.08150016021728515, 0.0819705581665039, 0.08193843078613282, 0.08136358642578125, 0.08152236938476562, 0.08171552276611328, 0.08166524505615234, 0.08157059478759765, 0.08166400146484375, 0.0813387222290039, 0.08127430725097656, 0.08113584136962891, 0.0811374740600586, 0.08174713897705078, 0.08130867004394532, 0.08135001373291016, 0.08121612548828125, 0.08135270690917969, 0.08169062042236327, 0.08161408233642578, 0.08179174041748047, 0.08154541015625, 0.08154255676269531, 0.08155996704101562, 0.08123299407958984, 0.08327875518798829, 0.08131584167480468, 0.08123590087890625, 0.08113362884521484, 0.08114739227294922, 0.08144716644287109, 0.08144716644287109, 0.08160665893554687, 0.08185036468505859, 0.08203593444824218, 0.08169171142578124, 0.08262012481689453, 0.08281446075439453, 0.08276633453369141, 0.08269004821777344, 0.08268125152587891, 0.08220646667480469, 0.08357564544677734, 0.08208179473876953, 0.08133334350585937, 0.08131881713867188, 0.08138511657714843, 0.08115644836425781, 0.08100454711914062, 0.08087667083740234, 0.0808252182006836, 0.08127241516113282, 0.0811012191772461, 0.08139116668701171, 0.08097996520996094, 0.08129580688476562, 0.08131116485595703, 0.08107884979248046, 0.08083455657958985, 0.08091033935546875, 0.08079933166503907, 0.0810907211303711, 0.08078291320800782, 0.08104585266113282, 0.08132035064697266, 0.08157651519775391, 0.08157087707519531, 0.08158099365234375, 0.08117862701416016, 0.08411500549316406, 0.08124845123291016, 0.08181958770751953, 0.08131574249267579, 0.08118701171875, 0.08154134368896485, 0.08126617431640625, 0.08164812469482421, 0.08169840240478515, 0.08172777557373047, 0.08187273406982422, 0.08295247650146484, 0.08493260955810547, 0.08212480163574219, 0.08116223907470703, 0.08134041595458984, 0.08129519653320312, 0.08119721221923829, 0.08083388519287109, 0.08108099365234375, 0.08154895782470703, 0.08223369598388672, 0.08168243408203125, 0.08157743835449219, 0.08149436950683593, 0.08114399719238281, 0.0812769317626953, 0.08092057800292969, 0.08094303894042969, 0.08088172912597656, 0.08106803131103515, 0.08108236694335938, 0.08108646392822266, 0.08190739440917968, 0.08132367706298828, 0.08136361694335938, 0.08182374572753906, 0.08117247772216797, 0.08111843109130859, 0.0831984634399414, 0.08170111846923828, 0.08129251098632813, 0.08118319702148437, 0.08108067321777344, 0.08101251220703125, 0.08142665863037109, 0.08165510559082031, 0.08127686309814453, 0.08112380981445312, 0.08121987152099609, 0.08091238403320312, 0.08064409637451173, 0.08071894073486328, 0.08063273620605468, 0.08071817779541016, 0.08107318115234376, 0.08145369720458985, 0.08113970947265625, 0.08201017761230468, 0.08120178985595704, 0.0815670394897461, 0.08109343719482422, 0.080932861328125, 0.08063999938964844, 0.08051712036132813, 0.08063145446777344, 0.08066287994384766, 0.08070889282226562, 0.08109539031982421, 0.08115814208984375, 0.08188098907470703, 0.08143062591552734, 0.08156877136230468, 0.08081049346923828, 0.08081459045410157, 0.08103116607666015, 0.08117996978759766, 0.0849415054321289, 0.08127430725097656, 0.08105385589599609, 0.0809287338256836, 0.08123625946044923, 0.08146729278564453, 0.08207180786132813, 0.08219974517822265, 0.08119184112548829, 0.08129222106933594, 0.08091951751708984, 0.08097996520996094, 0.08092626953125, 0.08138591766357423, 0.08120524597167969, 0.08133222198486328, 0.0826263656616211, 0.08110857391357422, 0.0815642547607422, 0.08281088256835938, 0.08160982513427735, 0.08136182403564453, 0.08098729705810546, 0.08065315246582032, 0.08110079956054687, 0.08086236572265625, 0.08106275177001954, 0.08093081665039062, 0.0814551010131836, 0.08118271636962891, 0.08151174163818359, 0.08265910339355469, 0.08132889556884766, 0.08112963104248047, 0.08123574066162109, 0.08100067138671875, 0.0810618896484375, 0.08126850891113281, 0.0811890869140625, 0.08096272277832031, 0.08109065246582031, 0.08145382690429688, 0.08139571380615235, 0.08143174743652344, 0.08196300506591797, 0.08137081909179687, 0.08129567718505859, 0.08125596618652343, 0.0819552993774414, 0.08114380645751954, 0.08120524597167969, 0.08156979370117187, 0.08160240173339844, 0.08184377288818359, 0.0840792007446289, 0.08699903869628907, 0.08175001525878907, 0.0816080322265625, 0.08108303833007813, 0.08119705963134766, 0.08124610900878906, 0.08142038726806641, 0.08098611450195313, 0.08133945465087891, 0.08415532684326171, 0.0812747802734375, 0.08139923095703125, 0.08186649322509766, 0.08166083526611329, 0.08215248107910156, 0.08125334167480469, 0.08132764434814453, 0.08116265869140625, 0.08125856018066406, 0.0810782699584961, 0.08131788635253906, 0.08167628479003906, 0.0811961898803711, 0.08174646759033204, 0.0817561264038086, 0.08151074981689453, 0.08181536102294922, 0.08365689849853515, 0.08216780853271484, 0.08189337921142578, 0.0812072982788086, 0.08105570983886719, 0.08120527648925781, 0.08271609497070312, 0.08119385528564453, 0.08143142700195312, 0.08152761840820312, 0.08163123321533203, 0.08189132690429687, 0.08136470031738281, 0.08107584381103515, 0.08121001434326172, 0.0810512924194336, 0.08098851013183593, 0.08118681335449218, 0.08154662322998046, 0.08110140991210937, 0.0814981460571289, 0.08232351684570313, 0.08503453063964844, 0.08158214569091797, 0.08190322875976562, 0.08158882904052735, 0.0812195816040039, 0.08120114898681641, 0.08126054382324219, 0.08142233276367188, 0.08107552337646484, 0.08138377380371094, 0.08151689910888672, 0.08138690948486328, 0.0811825942993164, 0.08165769958496094, 0.0809515838623047, 0.08113597106933594, 0.08553292846679687, 0.08135456085205078, 0.08183622741699219, 0.08177024078369141, 0.08139180755615234, 0.08138553619384765, 0.08156336212158204, 0.08240345764160156, 0.08122380828857421, 0.08144809722900391, 0.08222374725341797, 0.08048016357421875, 0.08054000091552735, 0.0803082275390625, 0.0805580825805664, 0.08026930999755859, 0.08139116668701171, 0.08060720062255859, 0.0810948486328125, 0.08069149017333985, 0.08025209808349609, 0.08007737731933594, 0.08037401580810546, 0.0803465576171875, 0.08038864135742188, 0.08049673461914063, 0.08043686676025391, 0.08030182647705078, 0.08039424133300781, 0.08069385528564453, 0.08080381011962891, 0.08014643096923828, 0.08007270050048829, 0.08012083435058594, 0.08004684448242187, 0.08041702270507813, 0.08051507568359376, 0.0801154556274414, 0.08074585723876954, 0.0809048614501953, 0.08027772521972656, 0.08075997161865234, 0.08020259094238281, 0.08148076629638672, 0.08132844543457031, 0.0806344985961914, 0.08027481842041016, 0.08012239837646484, 0.08031427001953124, 0.07987977600097657, 0.08101526641845704, 0.08053791809082031, 0.08052041625976562, 0.08022505950927734, 0.08356163024902344, 0.084087646484375, 0.0808155517578125, 0.08041939544677734, 0.08036147308349609, 0.08023228454589844, 0.08050198364257813, 0.08028800201416016, 0.0803416976928711, 0.08027107238769532, 0.08090857696533203, 0.08104959869384766, 0.08179302215576172, 0.0813682861328125, 0.08118761444091797, 0.08078067016601563, 0.08014707183837891, 0.08023782348632813, 0.08046463775634766, 0.08051305389404297, 0.0801374740600586, 0.08071417236328125, 0.08039657592773437, 0.08028105926513672, 0.08012620544433594, 0.08102326202392578, 0.08028470611572265, 0.0804483871459961, 0.0800564193725586, 0.08287641906738281, 0.0805191650390625, 0.08080384063720703, 0.08044544219970703, 0.08053126525878906, 0.08039202880859375, 0.0801527328491211, 0.08017635345458984, 0.08091545867919922, 0.08046307373046875, 0.08041343688964844, 0.08031027221679687, 0.08047206115722656, 0.08047401428222656, 0.0813650894165039, 0.08160451507568359, 0.08154303741455078, 0.08127101135253906, 0.08152422332763672, 0.08102349090576172, 0.0812353286743164, 0.08093488311767578, 0.08205606079101563, 0.0810841293334961, 0.08071372985839843, 0.08044550323486328, 0.0805552978515625, 0.08089823913574219, 0.08093341064453125, 0.0809896011352539, 0.08124620819091796, 0.08123769378662109, 0.0809393310546875, 0.08155104064941407, 0.0817462387084961, 0.08064988708496093, 0.08082262420654297, 0.0805560302734375, 0.08400688171386719, 0.08125433349609375, 0.08162108612060547, 0.08144425964355469, 0.08181820678710937, 0.081438720703125, 0.08163430023193359, 0.08181613159179688, 0.08186271667480469, 0.08084105682373047, 0.08115609741210937, 0.08085302734375, 0.08071292877197266, 0.08100534057617187, 0.08160841369628906, 0.0815137939453125, 0.08216912078857422, 0.0811533432006836, 0.08118697357177734, 0.0810129623413086, 0.08103321838378906, 0.08069039916992188, 0.08079849243164063, 0.08117453002929688, 0.08071167755126953, 0.08065590667724609, 0.08108668518066406, 0.08131404876708985, 0.08095334625244141, 0.08087075042724609, 0.08091693115234375, 0.08119522857666016, 0.08088780975341797, 0.08085894775390624, 0.08071593475341797, 0.08096534729003907, 0.08101100921630859, 0.0810904312133789, 0.08085926055908203, 0.08092467498779297, 0.0811335678100586, 0.08090624237060547, 0.08067295837402344, 0.08078733062744141, 0.08135913848876954, 0.08374348449707031, 0.08193875122070313, 0.08096009826660157, 0.08091056060791016, 0.08057218933105469, 0.08171315002441407, 0.08117030334472657, 0.08154086303710938, 0.08220915222167968, 0.08116838073730469]",tokens/s,12.275647591249905,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1179.246592,2348.679168,0.0,1946.157056,1819.994112,s,1,10.36339453125,10.36339453125,0.0,10.36339453125,10.36339453125,10.36339453125,10.36339453125,[10.36339453125],,kWh,9.986422734162563e-05,1.1008655498902557e-05,3.411724951599138e-05,0.00014499013235651958,,MB,1493.54496,2589.851648,0.0,2174.746624,2099.204096,s,10,3.725802703857422,0.37258027038574215,0.0009209239677104277,0.37281675720214846,0.3735780120849609,0.37373337249755856,0.3738576608276367,"[0.37299371337890624, 0.37153475952148435, 0.37082427978515625, 0.37160650634765624, 0.3725549621582031, 0.3727164306640625, 0.37291708374023436, 0.3735434875488281, 0.37388873291015623, 0.3732227478027344]",tokens/s,687.1002582475891,kWh,1.1086388762037636e-05,1.2226389351860206e-06,7.314378279481333e-06,1.9623405976704987e-05,tokens/kWh,13045645.608305637,MB,1518.538752,2589.851648,0.0,2174.746624,2099.206656,s,10,30.331373535156246,3.0331373535156247,0.02039168930965082,3.0342186279296874,3.054112231445312,3.0606399780273437,3.0658621752929687,"[3.046565673828125, 3.067167724609375, 3.05266162109375, 3.029514404296875, 3.048822265625, 3.0389228515625, 3.01695361328125, 3.023489990234375, 3.008666015625, 2.998609375]",tokens/s,20.77057272957931,kWh,8.798827779420764e-05,9.705296334293901e-06,4.9625806161519796e-05,0.00014731938029002135,tokens/kWh,427642.30935518874,,s,630,30.32839582061767,0.048140310826377264,0.0008452838301605457,0.04804070281982421,0.048802370834350586,0.04916962661743164,0.051050263900756836,"[0.05087846374511719, 0.04905526351928711, 0.04890633773803711, 0.04884928131103516, 0.0535470085144043, 0.04907417678833008, 0.049127422332763675, 0.04891827011108398, 0.04821241760253906, 0.047886081695556644, 0.04786745452880859, 0.04759807968139648, 0.04818329620361328, 0.04752348709106445, 0.047851871490478516, 0.0477388801574707, 0.04778496170043945, 0.048081153869628905, 0.04773519897460937, 0.047833438873291015, 0.04774278259277344, 0.047720638275146485, 0.048363521575927736, 0.04852243041992187, 0.04846675109863281, 0.048699390411376955, 0.048565601348876955, 0.04840105438232422, 0.048189441680908204, 0.04831151962280274, 0.047895328521728515, 0.04783212661743164, 0.04764531326293946, 0.048648448944091795, 0.0485601921081543, 0.048099327087402347, 0.04804748916625977, 0.048761470794677735, 0.04802764892578125, 0.04825907135009765, 0.04821401596069336, 0.04972771072387695, 0.04842473602294922, 0.04834112167358398, 0.04866035079956055, 0.048357566833496096, 0.048234302520751955, 0.04806041717529297, 0.048064510345458986, 0.048070335388183595, 0.04803152084350586, 0.0479865608215332, 0.04770883178710938, 0.04757411193847656, 0.048439361572265624, 0.04841587066650391, 0.04785737609863281, 0.047767551422119144, 0.04905472183227539, 0.047737056732177735, 0.04809603118896484, 0.04772771072387695, 0.04831119918823242, 0.051028385162353515, 0.04870124816894531, 0.04860947036743164, 0.048175167083740235, 0.048989566802978515, 0.04855174255371094, 0.0482457275390625, 0.04812777709960937, 0.048494590759277346, 0.04816214370727539, 0.04858524703979492, 0.04853878402709961, 0.04863689422607422, 0.04890009689331055, 0.04825737762451172, 0.04822800064086914, 0.04819910430908203, 0.047911457061767575, 0.04788633728027344, 0.04813343811035156, 0.048584705352783204, 0.04848713684082031, 0.04799283218383789, 0.04803100967407226, 0.04913580703735351, 0.04814604949951172, 0.048410846710205076, 0.04853747177124024, 0.04811859130859375, 0.048165054321289064, 0.04803715133666992, 0.048366111755371095, 0.04814233779907227, 0.04850688171386719, 0.04970700836181641, 0.048936958312988284, 0.048723968505859375, 0.048535552978515625, 0.04844287872314453, 0.048304351806640625, 0.04831875228881836, 0.04867071914672851, 0.04855091094970703, 0.04815359878540039, 0.04830739212036133, 0.04914054489135742, 0.048328128814697266, 0.04834777450561523, 0.04826214218139648, 0.0488969612121582, 0.04835110473632812, 0.04823014450073242, 0.0482553596496582, 0.048207839965820315, 0.057468097686767576, 0.050361183166503905, 0.049382625579833986, 0.04895209503173828, 0.04908473587036133, 0.04875350570678711, 0.048513694763183596, 0.04825494384765625, 0.048410751342773437, 0.05075353622436524, 0.04851945495605469, 0.04815024185180664, 0.048028766632080076, 0.048253631591796874, 0.048556449890136716, 0.04823020935058594, 0.04807884979248047, 0.04812787246704102, 0.04772671890258789, 0.04770025634765625, 0.04793075180053711, 0.048524703979492184, 0.048409217834472655, 0.04803219223022461, 0.0479918098449707, 0.04922457504272461, 0.04872192001342773, 0.04818739318847656, 0.047931072235107425, 0.04816428756713867, 0.04800400161743164, 0.048175071716308596, 0.048347007751464846, 0.04851667022705078, 0.048154529571533204, 0.047814334869384766, 0.04804441452026367, 0.04826713562011719, 0.04823641586303711, 0.051409759521484376, 0.048470367431640626, 0.04882601547241211, 0.048471553802490235, 0.048167423248291014, 0.04840828704833984, 0.04904729461669922, 0.048382495880126955, 0.04823040008544922, 0.04843241500854492, 0.04862796783447266, 0.04815305709838867, 0.048215072631835935, 0.04835017776489258, 0.04879974365234375, 0.04872771072387695, 0.04914940643310547, 0.04915894317626953, 0.04875872039794922, 0.04876918411254883, 0.048393310546875, 0.04800831985473633, 0.048098239898681644, 0.04817327880859375, 0.048207969665527345, 0.048122112274169924, 0.04877340698242188, 0.04887664031982422, 0.048427230834960935, 0.04879430389404297, 0.04840652847290039, 0.04854947280883789, 0.048159008026123044, 0.05063865661621094, 0.04872726440429687, 0.04861027145385742, 0.04825907135009765, 0.04817715072631836, 0.04814012908935547, 0.04829763031005859, 0.049100513458251956, 0.04833769607543945, 0.0489246711730957, 0.04802150344848633, 0.048123905181884766, 0.048103199005126954, 0.048110942840576175, 0.047813503265380856, 0.04816479873657226, 0.048183135986328125, 0.04878707122802734, 0.04856383895874023, 0.04823548889160156, 0.04818534469604492, 0.04825497436523438, 0.04802150344848633, 0.04803523254394531, 0.04797296142578125, 0.04789452743530274, 0.04762364959716797, 0.047718017578125, 0.04841670227050781, 0.04822934341430664, 0.048318462371826174, 0.04813983917236328, 0.04799942398071289, 0.048140289306640625, 0.04781164932250977, 0.04767839813232422, 0.04859321594238281, 0.04779945755004883, 0.047611423492431644, 0.04765411376953125, 0.04752278518676758, 0.04772537612915039, 0.04762009429931641, 0.047710208892822265, 0.04752134323120117, 0.04776800155639648, 0.04903936004638672, 0.04768972778320312, 0.04775321578979492, 0.048347137451171876, 0.048955360412597654, 0.0478515510559082, 0.0478023681640625, 0.04769177627563476, 0.047777793884277345, 0.04779199981689453, 0.047691902160644534, 0.04765081787109375, 0.04754579162597656, 0.04748931121826172, 0.04749929428100586, 0.047632030487060543, 0.04774303817749023, 0.05069065475463867, 0.048639999389648435, 0.04853478240966797, 0.04854451370239258, 0.04846716690063477, 0.048564830780029294, 0.04850707244873047, 0.04890009689331055, 0.04864636611938476, 0.0486864013671875, 0.04798716735839844, 0.048256385803222654, 0.04810611343383789, 0.04828160095214844, 0.047734783172607424, 0.047709632873535156, 0.04822892761230469, 0.04795391845703125, 0.04792899322509766, 0.04773283386230469, 0.04808319854736328, 0.047992321014404295, 0.0482327995300293, 0.04848041534423828, 0.0478289909362793, 0.048025825500488284, 0.04798441696166992, 0.04844867324829102, 0.04834918212890625, 0.04902115249633789, 0.04790131378173828, 0.047915008544921874, 0.047683521270751955, 0.04783318328857422, 0.04772016143798828, 0.04796236801147461, 0.0476868782043457, 0.047610721588134765, 0.04783270263671875, 0.049100833892822264, 0.0487786865234375, 0.04842982482910156, 0.04806256103515625, 0.04818659210205078, 0.04921404647827148, 0.04810153579711914, 0.04832863998413086, 0.048148574829101565, 0.04964761734008789, 0.05005516815185547, 0.05024563217163086, 0.04848563385009766, 0.04843807983398438, 0.04856620788574219, 0.04838729476928711, 0.04817795181274414, 0.048330753326416016, 0.0480052490234375, 0.048000896453857425, 0.049723358154296876, 0.04868918228149414, 0.0482529296875, 0.048438751220703125, 0.05040332794189453, 0.0481607666015625, 0.04800486373901367, 0.04807513427734375, 0.04801113510131836, 0.04782460784912109, 0.04783747100830078, 0.048023551940917966, 0.0476910400390625, 0.04753420639038086, 0.047910846710205075, 0.047788063049316404, 0.04818188858032227, 0.04838592147827148, 0.04806054306030273, 0.048248096466064455, 0.04810764694213867, 0.04818175888061523, 0.04807894515991211, 0.048568672180175784, 0.04813520050048828, 0.047863712310791014, 0.048007743835449215, 0.04817116928100586, 0.048211967468261716, 0.051920192718505856, 0.048670528411865234, 0.04869411087036133, 0.04864412689208984, 0.048625057220458984, 0.04874095916748047, 0.04859494400024414, 0.048674816131591796, 0.04823040008544922, 0.04828681564331055, 0.04832294464111328, 0.048286209106445314, 0.04842691040039063, 0.04810969543457031, 0.049178367614746095, 0.04824700927734375, 0.048887744903564456, 0.047802463531494144, 0.04786083221435547, 0.04792127990722656, 0.04769827270507813, 0.047832992553710936, 0.04795548629760742, 0.04788435363769531, 0.04768387222290039, 0.047755329132080075, 0.04790476989746094, 0.04780441665649414, 0.04796067047119141, 0.04778188705444336, 0.04840809631347656, 0.04820595169067383, 0.048038238525390624, 0.04819279861450195, 0.04806320190429687, 0.04797644805908203, 0.04791910552978516, 0.047982593536376954, 0.05020876693725586, 0.04835286331176758, 0.048095615386962894, 0.04845571136474609, 0.048086463928222654, 0.04800979232788086, 0.048345088958740234, 0.04789657592773437, 0.04791862487792969, 0.048245121002197265, 0.04842412948608398, 0.04841155242919922, 0.04861337661743164, 0.04868710327148437, 0.04820099258422852, 0.048417598724365234, 0.048070560455322264, 0.04822220611572266, 0.04804316711425781, 0.047740001678466794, 0.04744780731201172, 0.04732108688354492, 0.04732707214355469, 0.04791129684448242, 0.048361248016357425, 0.04849078369140625, 0.048572353363037106, 0.04846979141235352, 0.048762081146240234, 0.048167713165283205, 0.04779753494262695, 0.04966060638427734, 0.04764460754394531, 0.04762428665161133, 0.04768767929077149, 0.0474071044921875, 0.047360286712646485, 0.04798543930053711, 0.04748588943481445, 0.04720844650268555, 0.04715676879882812, 0.04703039932250976, 0.04715590286254883, 0.04722208023071289, 0.04794403076171875, 0.04754415893554687, 0.048046241760253905, 0.047247360229492184, 0.047364097595214844, 0.047755104064941406, 0.04750556945800781, 0.0473741455078125, 0.047712448120117185, 0.04736000061035156, 0.04782912063598633, 0.04782886505126953, 0.047124481201171874, 0.04730291366577148, 0.04759676742553711, 0.04735644912719727, 0.047468574523925784, 0.04751923370361328, 0.0471044807434082, 0.050974720001220705, 0.04823244857788086, 0.04779756927490234, 0.048995006561279295, 0.04801884841918945, 0.04797020721435547, 0.047645374298095705, 0.047670654296875, 0.04734835052490234, 0.047489025115966796, 0.0474521598815918, 0.04743135833740234, 0.0473193588256836, 0.0474152946472168, 0.04758883285522461, 0.047546207427978514, 0.04795257568359375, 0.048123390197753906, 0.04773721694946289, 0.04753420639038086, 0.047631393432617186, 0.04775215911865234, 0.04758687973022461, 0.04811734390258789, 0.048337249755859374, 0.05105920028686523, 0.04867071914672851, 0.047607040405273436, 0.04748569488525391, 0.047354145050048826, 0.04734672164916992, 0.047450527191162106, 0.04731727981567383, 0.04716543960571289, 0.047495166778564454, 0.04767264175415039, 0.04758393478393555, 0.04776345443725586, 0.04773068618774414, 0.048142654418945316, 0.04759651184082031, 0.04816790390014648, 0.04827484893798828, 0.048291423797607425, 0.04960537719726563, 0.048997665405273436, 0.0523633918762207, 0.04833113479614258, 0.04820934295654297, 0.04812262344360352, 0.04754819107055664, 0.04751571273803711, 0.048122112274169924, 0.04744800186157227, 0.04804393768310547, 0.04748239898681641, 0.047309375762939455, 0.047785888671875, 0.047666400909423826, 0.047583553314208986, 0.048075328826904296, 0.04749497604370117, 0.04766534423828125, 0.04978870391845703, 0.04767558288574219, 0.047495201110839845, 0.04759971237182617, 0.04732307052612305, 0.047286209106445314, 0.047236225128173825, 0.04788054275512695, 0.04772409439086914, 0.04758940887451172, 0.04799388885498047, 0.04750739288330078, 0.047675392150878904, 0.04749456024169922, 0.04757059097290039, 0.047755680084228515, 0.04752220916748047, 0.04730278396606445, 0.04747478485107422, 0.04742092895507812, 0.04742345428466797, 0.050151870727539065, 0.047766624450683595, 0.04826204681396484, 0.04771430587768555, 0.047703422546386716, 0.047370880126953126, 0.04750950241088867, 0.04759084701538086, 0.04779475021362305, 0.04751955032348633, 0.04770563125610352, 0.047529953002929684, 0.047655616760253906, 0.04745830535888672, 0.04759756851196289, 0.047355777740478514, 0.04766249465942383, 0.0476464958190918, 0.047379390716552734, 0.047306751251220705, 0.04739596939086914, 0.047104896545410155, 0.04720028686523438, 0.04708963012695312, 0.047877281188964844, 0.04721731185913086, 0.047325374603271485, 0.04732928085327148, 0.047531520843505856, 0.04757350540161133, 0.047540191650390626, 0.04726992034912109, 0.0472209587097168, 0.04752566528320312, 0.047981727600097654, 0.047817569732666015, 0.0481525764465332, 0.04750044631958008, 0.0481165771484375, 0.04807884979248047, 0.04785100936889648, 0.05417206573486328, 0.050284160614013675, 0.047768383026123046, 0.04749107360839844, 0.04730195236206055, 0.04769862365722656, 0.04754431915283203, 0.04820377731323242, 0.04762572860717774, 0.047650718688964845, 0.04796067047119141, 0.0477913932800293, 0.04776214218139648, 0.04754179382324219, 0.04760009765625, 0.04758118438720703, 0.047263614654541014, 0.047190143585205076, 0.047089599609375, 0.04696995162963867, 0.04708652877807617, 0.04705814361572266, 0.047729473114013675, 0.047511550903320314, 0.04875468826293945, 0.04722687911987305, 0.047677566528320316, 0.04751551818847656, 0.04767513656616211, 0.04967654418945312, 0.047734783172607424, 0.047255455017089845, 0.04718719863891602, 0.04703254318237305, 0.04726396942138672, 0.04734524917602539, 0.04743158340454102, 0.047207584381103514, 0.04729232025146484, 0.04777558517456055, 0.04698102569580078, 0.04695859146118164, 0.04705100631713867, 0.04780220794677734, 0.04737542343139649, 0.047362655639648435, 0.04718656158447266, 0.0471956787109375, 0.047248798370361327, 0.047358753204345704, 0.048863231658935545, 0.047988510131835936, 0.047479007720947264, 0.04722406387329101, 0.047370399475097656, 0.04736185455322266, 0.047524639129638675, 0.048320510864257815, 0.04800307083129883, 0.047578529357910154, 0.04756886291503906, 0.04746316909790039, 0.04754419326782226, 0.04775526428222656]",tokens/s,20.772612034155696,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,26488.0128,13910.278144,0.0,13507.756032,13505.835008,s,1,53.273609375,53.273609375,0.0,53.273609375,53.273609375,53.273609375,53.273609375,[53.273609375],,kWh,0.0013481496333958905,0.0001487039198143759,0.0004644234270940184,0.001961276980304285,,MB,1371.398144,14803.664896,0.0,14380.171264,14187.445248,s,10,1.8204776611328126,0.18204776611328127,0.0004964463502543814,0.18191283416748047,0.18264312744140626,0.18274633026123047,0.18282889251708986,"[0.18184060668945312, 0.18262019348144531, 0.18163623046875, 0.18260643005371094, 0.1816216583251953, 0.1828495330810547, 0.182376708984375, 0.18154063415527344, 0.18140060424804688, 0.18198506164550782]",tokens/s,1406.2243413670956,kWh,5.38754152469692e-06,5.941542099601631e-07,3.581891754399592e-06,9.563587489056677e-06,tokens/kWh,26768197.634301253,MB,1396.46976,14868.676608,0.0,14443.085824,14355.886592,s,10,63.78037890625001,6.378037890625,0.027835208976177757,6.37039501953125,6.41710244140625,6.4190070312500005,6.420530703125,"[6.42091162109375, 6.39886865234375, 6.41667919921875, 6.40148095703125, 6.33780224609375, 6.359380859375, 6.3693330078125, 6.3494677734375, 6.37145703125, 6.35499755859375]",tokens/s,9.877645928162787,kWh,0.00018809045271571726,2.0747214212136673e-05,0.0001143148970074014,0.0003231525639352553,tokens/kWh,194954.35602554047,,s,630,63.777180549621605,0.10123361992003427,0.000969677036979659,0.10107561874389648,0.10219965667724609,0.10279846763610839,0.10533807144165039,"[0.10164749145507812, 0.10149468994140624, 0.10114556884765626, 0.1019731216430664, 0.10200563049316407, 0.10196377563476562, 0.10180515289306641, 0.10174966430664062, 0.10144528198242188, 0.10110192108154296, 0.10066329956054687, 0.10088409423828125, 0.10073535919189452, 0.10136297607421875, 0.10127843475341797, 0.10197737884521485, 0.10189065551757813, 0.10173369598388672, 0.10147257232666015, 0.10159053039550782, 0.10147977447509765, 0.10110736083984374, 0.10167024230957031, 0.10116572570800782, 0.10116710662841796, 0.10101299285888672, 0.10130278778076172, 0.10226892852783204, 0.1068787841796875, 0.10173049926757813, 0.10166681671142579, 0.10101961517333985, 0.10083740997314453, 0.10113433837890624, 0.10152754974365234, 0.10147430419921875, 0.1039462432861328, 0.10448025512695312, 0.10261529541015625, 0.10509040069580078, 0.10410028839111328, 0.10164876556396485, 0.10102700805664062, 0.10123980712890625, 0.10216365051269531, 0.1015571517944336, 0.10207123565673829, 0.10214614105224609, 0.10105513763427734, 0.10143869018554688, 0.10119235229492188, 0.10113664245605469, 0.1015720672607422, 0.10184566497802734, 0.10219945526123046, 0.10150383758544922, 0.10235600280761718, 0.10221456146240235, 0.10202428436279297, 0.10302236938476562, 0.10286892700195313, 0.10323286437988281, 0.10341426849365234, 0.10232012939453125, 0.10282787322998047, 0.10262486267089843, 0.10125574493408203, 0.10161145782470703, 0.1016115493774414, 0.10168265533447265, 0.10140115356445313, 0.10121126556396484, 0.10128800201416016, 0.10167378997802734, 0.10219929504394532, 0.10215948486328125, 0.101806396484375, 0.10187363433837891, 0.10055123138427734, 0.10074729919433593, 0.10116057586669921, 0.10089078521728516, 0.10158073425292968, 0.10132921600341797, 0.10314102172851562, 0.10202758026123047, 0.10158643341064454, 0.10242304229736328, 0.10162380981445313, 0.10120191955566406, 0.10209587097167969, 0.10157347106933594, 0.1018328628540039, 0.10256735992431641, 0.10157727813720703, 0.1026723861694336, 0.101644287109375, 0.10107443237304688, 0.10125363159179687, 0.10181958770751953, 0.10240608215332031, 0.10144649505615234, 0.10182572937011719, 0.10222627258300782, 0.10195340728759765, 0.10127747344970703, 0.10146492767333984, 0.10165074920654296, 0.10191667175292969, 0.10115555572509766, 0.1015654067993164, 0.10137939453125, 0.10120464324951171, 0.10138121795654297, 0.10076665496826172, 0.10096460723876953, 0.10107469177246094, 0.10052390289306641, 0.10091737365722656, 0.10107814025878906, 0.10130111694335937, 0.10092982482910157, 0.10117091369628907, 0.10141212463378907, 0.10085830688476563, 0.10079261016845703, 0.10087340545654297, 0.10103072357177735, 0.10058751678466797, 0.10264985656738282, 0.10152674865722656, 0.10410089874267578, 0.10532144165039062, 0.10222262573242187, 0.10315980529785156, 0.10130345916748047, 0.10098111724853516, 0.10082303619384765, 0.10234022521972656, 0.10238857269287109, 0.10121421051025391, 0.10177529907226562, 0.10191673278808594, 0.10127565002441406, 0.10103411102294922, 0.10108710479736328, 0.10091519927978515, 0.10093718719482422, 0.10098537445068359, 0.10100054168701172, 0.1011014404296875, 0.10134403228759765, 0.10135072326660156, 0.10363545227050781, 0.1018369903564453, 0.10187980651855469, 0.10145177459716796, 0.101718017578125, 0.101607421875, 0.10159305572509765, 0.10149686431884766, 0.10176041412353516, 0.10176777648925782, 0.10166390228271484, 0.10217967987060547, 0.10220146942138672, 0.10320269012451172, 0.10246892547607422, 0.10267513275146484, 0.1021006088256836, 0.10169324493408204, 0.10433184051513672, 0.10172211456298828, 0.10171392059326172, 0.10137583923339843, 0.10176322937011718, 0.10214236450195313, 0.10169852447509765, 0.1014686050415039, 0.10115705871582031, 0.1013054428100586, 0.10276252746582032, 0.10198105621337891, 0.1016094741821289, 0.10184909057617188, 0.10148892974853516, 0.10158617401123046, 0.10224483489990234, 0.10199199676513672, 0.10157337951660156, 0.10249823760986328, 0.10103398132324219, 0.10134124755859375, 0.10187359619140625, 0.10192691040039062, 0.10188800048828126, 0.10210508728027344, 0.10224809265136718, 0.10144710540771484, 0.10110063934326172, 0.10090681457519532, 0.10085715484619141, 0.10068438720703125, 0.10154118347167969, 0.10155519866943359, 0.10150889587402344, 0.10161561584472656, 0.1014082260131836, 0.10152019500732422, 0.1011229476928711, 0.10118780517578126, 0.10140735626220704, 0.10101116943359376, 0.10159689331054687, 0.10184761810302734, 0.10159513854980469, 0.10123468780517578, 0.10111366271972656, 0.10169497680664062, 0.10166690826416015, 0.10098681640625, 0.10177584075927734, 0.10073516845703125, 0.1010544662475586, 0.10128998565673829, 0.10125721740722657, 0.10122467041015625, 0.1019411849975586, 0.10534486389160157, 0.10255903625488282, 0.10249286651611328, 0.1057791976928711, 0.10158412933349609, 0.10161833953857421, 0.10180960083007813, 0.10139427185058594, 0.10156114959716797, 0.10137388610839844, 0.10211443328857422, 0.10186793518066406, 0.10130691528320312, 0.10138563537597656, 0.10065900421142578, 0.10147510528564453, 0.10152960205078125, 0.1006929931640625, 0.10054994964599609, 0.10075289916992188, 0.1015374755859375, 0.10100377655029297, 0.10290790557861328, 0.10150093078613281, 0.10114064025878906, 0.10089075469970703, 0.10112000274658203, 0.10087961578369141, 0.10221855926513672, 0.1010953598022461, 0.10050355529785156, 0.10107654571533203, 0.10049753570556641, 0.10069344329833985, 0.1008174057006836, 0.10085414123535157, 0.10105840301513672, 0.10205609893798828, 0.10051522827148437, 0.10007103729248047, 0.10016233825683593, 0.09985820770263672, 0.09992339324951172, 0.10035298919677735, 0.10027986907958984, 0.10079865264892578, 0.10043196868896484, 0.10058380889892578, 0.10109228515625, 0.10098531341552734, 0.10009638214111329, 0.09996492767333984, 0.09979801940917969, 0.10023625946044921, 0.10047081756591797, 0.10130009460449219, 0.10082316589355468, 0.10077129364013672, 0.10034146881103516, 0.1021529312133789, 0.09997731018066407, 0.10008751678466797, 0.10010454559326172, 0.1018116455078125, 0.10039756774902343, 0.10071858978271485, 0.10057673645019531, 0.1006509780883789, 0.10034345245361329, 0.10005299377441407, 0.10072489929199219, 0.09961923217773437, 0.10017247772216797, 0.10039087677001954, 0.10107180786132812, 0.10065382385253906, 0.10090496063232422, 0.10055474853515625, 0.10033766174316407, 0.09999462127685547, 0.10004486083984375, 0.09986361694335938, 0.10028230285644531, 0.10058745574951172, 0.10062815856933593, 0.10039900970458984, 0.10061222076416015, 0.10092144012451172, 0.1006263656616211, 0.10006329345703124, 0.10014742279052734, 0.10029750061035156, 0.1005823974609375, 0.10070767974853516, 0.10063734436035156, 0.10273321533203125, 0.10274416351318359, 0.1017257308959961, 0.1012573471069336, 0.10373030090332032, 0.10048220825195313, 0.10053711700439454, 0.10063667297363281, 0.10037785339355469, 0.10035238647460938, 0.10074486541748047, 0.10050313568115235, 0.10018294525146484, 0.10027552032470703, 0.10010652923583985, 0.10015785980224609, 0.1012326431274414, 0.10104185485839844, 0.10053459167480469, 0.1005506591796875, 0.10057698822021484, 0.100388671875, 0.1011143035888672, 0.10347449493408203, 0.10063932800292968, 0.10021929931640625, 0.10075516510009766, 0.10080844879150391, 0.10101785278320312, 0.10095645141601563, 0.10240995025634765, 0.10243507385253907, 0.10107059478759765, 0.10072380828857422, 0.10063555145263672, 0.10074694061279296, 0.10143775939941406, 0.10119308471679687, 0.10085820770263672, 0.1007209243774414, 0.10068377685546875, 0.10076290893554687, 0.1004244155883789, 0.10038697814941407, 0.0999559326171875, 0.10050182342529297, 0.10079615783691406, 0.100892578125, 0.10095622253417968, 0.1012291488647461, 0.10125676727294922, 0.10197875213623046, 0.1014510726928711, 0.10114691162109375, 0.10060002899169922, 0.101281982421875, 0.10095823669433594, 0.10110009765625, 0.10084172821044922, 0.10210601806640625, 0.1010185317993164, 0.10054867553710937, 0.1013163833618164, 0.10118675231933594, 0.10178195190429687, 0.10168982696533203, 0.10104771423339844, 0.10139299011230468, 0.10151538848876954, 0.10105228424072266, 0.101031005859375, 0.1009501724243164, 0.10067430114746094, 0.10077756500244141, 0.1006324462890625, 0.10137449645996094, 0.1004031982421875, 0.10100972747802735, 0.10022025299072265, 0.1002356185913086, 0.10063817596435547, 0.10025628662109375, 0.10008985900878906, 0.09986406707763672, 0.10067756652832031, 0.10114080047607422, 0.10041165161132813, 0.10078028869628906, 0.10078749084472656, 0.10040473937988281, 0.100864990234375, 0.1004070053100586, 0.10039324951171875, 0.10045353698730469, 0.10080137634277343, 0.10115657806396484, 0.10088658905029296, 0.10379199981689453, 0.10406793975830078, 0.10146598052978516, 0.10164031982421876, 0.10564812469482422, 0.10193305969238281, 0.10083936309814454, 0.10112611389160156, 0.10151945495605469, 0.10100736236572265, 0.10185523223876954, 0.10138419342041016, 0.10128304290771484, 0.10086809539794922, 0.10069487762451172, 0.10053421020507812, 0.1007042236328125, 0.10027830505371094, 0.10043497467041015, 0.10064800262451172, 0.10113629150390625, 0.10185939025878907, 0.10143775939941406, 0.10073014068603516, 0.10103638458251953, 0.10149478149414062, 0.10150038146972656, 0.10070620727539062, 0.1007784652709961, 0.10113078308105469, 0.10056630706787109, 0.10018889617919922, 0.10109078216552735, 0.10067008209228516, 0.10065910339355469, 0.10045645141601563, 0.10089266967773437, 0.10110566711425781, 0.10112812805175782, 0.103993408203125, 0.10040060424804688, 0.1004467544555664, 0.10048921966552735, 0.10039910125732422, 0.10074931335449219, 0.10078646087646484, 0.10098659515380859, 0.10031228637695312, 0.10077859497070313, 0.10134297943115235, 0.10056134033203125, 0.10023935699462891, 0.10025526428222656, 0.1004139175415039, 0.10059161376953125, 0.10075341033935546, 0.10141030120849609, 0.10104473876953125, 0.10069401550292968, 0.10050726318359375, 0.10146025848388672, 0.10063238525390625, 0.10044185638427734, 0.10028086090087891, 0.10076774597167969, 0.10061619567871094, 0.10066492462158202, 0.10176486206054687, 0.1013889617919922, 0.10160128021240235, 0.10032921600341797, 0.1003399658203125, 0.10006880187988282, 0.10031718444824218, 0.10035472106933593, 0.10029987335205078, 0.1004408950805664, 0.10077993774414062, 0.1002742691040039, 0.10021260833740234, 0.10081228637695312, 0.10033942413330078, 0.10010262298583984, 0.10027053070068359, 0.10105661010742187, 0.10107667541503906, 0.10144528198242188, 0.10118243408203124, 0.10038272094726562, 0.10012876892089843, 0.1004444808959961, 0.1002627182006836, 0.10066828918457031, 0.10080425262451172, 0.10060221099853515, 0.10072409820556641, 0.10699225616455078, 0.10205110168457031, 0.10177523040771484, 0.10102665710449218, 0.10384393310546874, 0.10067670440673829, 0.10118125152587891, 0.10125004577636719, 0.10141046142578125, 0.10189830780029296, 0.10107958221435547, 0.10107878112792969, 0.10028339385986328, 0.10033452606201172, 0.10031472015380859, 0.10025849914550782, 0.10058668518066406, 0.10098365020751954, 0.10033139038085938, 0.10090646362304688, 0.10050153350830078, 0.10063091278076172, 0.1027499237060547, 0.10133926391601562, 0.10117366027832031, 0.10125081634521485, 0.10140467071533203, 0.10153778839111328, 0.10161459350585937, 0.10202191925048829, 0.1024268798828125, 0.10110358428955078, 0.10046406555175781, 0.10065385437011719, 0.10029622650146484, 0.10033081817626953, 0.10060076904296875, 0.10095820617675781, 0.10072809600830078, 0.10133296203613282, 0.1005739517211914, 0.10008716583251953, 0.10071887969970703, 0.10112445068359376, 0.10064076995849609, 0.1007738265991211, 0.1016517105102539, 0.1011383056640625, 0.10082572937011719, 0.10138246154785156, 0.10201497650146485, 0.10078822326660156, 0.10051583862304687, 0.10039199829101562, 0.10055923461914062, 0.10023686218261718, 0.10113536071777343, 0.10119782257080077, 0.10061971282958984, 0.10156829071044922, 0.10074505615234375, 0.10040777587890624, 0.10090758514404297, 0.10051165008544923, 0.10097641754150391, 0.10032355499267578, 0.10145536041259766, 0.10073446655273438, 0.100600830078125, 0.10064895629882813, 0.09980217742919922, 0.09978975677490234, 0.09980518341064454, 0.10005264282226563, 0.10005948638916015, 0.10079212951660156, 0.10077401733398438, 0.10171807861328125, 0.10103302764892579, 0.1008987808227539, 0.10047792053222657, 0.09986185455322266, 0.10036905670166016, 0.1003499526977539, 0.100523681640625, 0.10033001708984375, 0.10053929901123047, 0.10436479949951172, 0.10101990509033203, 0.10065910339355469, 0.10019337463378906, 0.10003753662109376, 0.1000041275024414, 0.10051267242431641, 0.10063750457763672, 0.10156204986572266, 0.10087372589111328, 0.10555398559570313, 0.10424192047119141, 0.1014520034790039, 0.10045417785644531, 0.10035171508789062, 0.10586742401123046, 0.10079004669189454, 0.10056646728515625, 0.10070694732666016, 0.10055814361572266, 0.10078511810302734, 0.10046835327148437, 0.10001651000976562, 0.10040707397460938, 0.10034537506103515, 0.10026681518554688, 0.1004903335571289]",tokens/s,9.878141281423234,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,6526.050304,3728.605184,0.0,3326.083072,3249.416192,s,1,17.305298828125,17.305298828125,0.0,17.305298828125,17.305298828125,17.305298828125,17.305298828125,[17.305298828125],,kWh,0.00029594516212914636,3.2637697373544626e-05,0.00010046730259599679,0.00042905016209868776,,MB,1940.676608,4049.46944,0.0,3625.975808,3532.033024,s,10,0.8770157165527344,0.08770157165527345,0.0016537796944527958,0.08708961486816406,0.0884048599243164,0.09045150299072265,0.09208881744384766,"[0.08736131286621093, 0.08698825836181641, 0.08795005035400391, 0.08690732574462891, 0.0924981460571289, 0.08711548614501953, 0.08677187347412109, 0.08783792114257813, 0.08652159881591796, 0.0870637435913086]",tokens/s,2918.9898786107656,kWh,2.534329761350421e-06,2.7948988666495557e-07,1.642206773534572e-06,4.456026421549948e-06,tokens/kWh,57450287.71866102,MB,1953.681408,4114.481152,0.0,3688.890368,3607.643648,s,10,51.105751953125,5.110575195312499,0.014895957701192604,5.114392333984375,5.124807421875,5.129295556640625,5.132886064453125,"[5.11543359375, 5.11758642578125, 5.10783203125, 5.12381005859375, 5.0875693359375, 5.11399169921875, 5.13378369140625, 5.11479296875, 5.08115625, 5.1097958984375]",tokens/s,12.327379520368783,kWh,0.00014804865970990383,1.6330211783801587e-05,6.213764980586442e-05,0.00022651652129956985,tokens/kWh,278125.40841858514,,s,630,51.10262809753419,0.08111528269449869,0.0008990395406975577,0.08090079879760742,0.0818833381652832,0.08279611015319824,0.08426427558898926,"[0.08148540496826172, 0.0815481948852539, 0.08094477081298829, 0.08105232238769532, 0.0810636444091797, 0.0810618896484375, 0.08078659057617188, 0.08065689849853516, 0.08127884674072265, 0.08590383911132812, 0.08042281341552734, 0.08140204620361328, 0.08089807891845703, 0.08094681549072266, 0.0810355224609375, 0.08083602905273438, 0.0818589096069336, 0.08143484497070312, 0.08101446533203124, 0.08111750030517578, 0.08196630096435546, 0.08073919677734374, 0.08054752349853515, 0.08096585845947266, 0.08104886627197265, 0.08096236419677734, 0.08155977630615234, 0.08076448059082031, 0.08105792236328126, 0.08096358489990234, 0.08161686706542969, 0.08126882934570312, 0.08114569854736328, 0.08163132476806641, 0.0814336929321289, 0.08118399810791016, 0.08174784088134765, 0.0809933090209961, 0.08147622680664063, 0.08141766357421874, 0.08051757049560547, 0.08084489440917969, 0.08057059478759766, 0.08079510498046875, 0.08064227294921875, 0.08069145965576172, 0.08171625518798828, 0.08048880004882812, 0.08061750030517578, 0.08292361450195312, 0.081070556640625, 0.08103721618652343, 0.08123519897460937, 0.08088256072998047, 0.0822632293701172, 0.08083715057373046, 0.08068243408203125, 0.08109539031982421, 0.08070569610595703, 0.0809221420288086, 0.08109919738769532, 0.0814195556640625, 0.08084700775146485, 0.08083193969726563, 0.08098242950439453, 0.08068838500976562, 0.08035609436035156, 0.08025907135009766, 0.08049049377441406, 0.0805041275024414, 0.08017171478271484, 0.07989453125, 0.08156361389160156, 0.08092867279052735, 0.08105382537841797, 0.08053964996337891, 0.08092047882080078, 0.08113478088378906, 0.08133026885986328, 0.08097238159179687, 0.08151900482177735, 0.08140806579589843, 0.08081097412109375, 0.08082077026367188, 0.08090025329589844, 0.08137737274169922, 0.08102706909179687, 0.08058863830566407, 0.0810634536743164, 0.08098592376708984, 0.08422688293457031, 0.08248115539550781, 0.08118476867675781, 0.08099465942382812, 0.08085059356689453, 0.08044322967529297, 0.08082998657226563, 0.0813839340209961, 0.08081769561767578, 0.08306489562988281, 0.08279708862304687, 0.08111676788330079, 0.0811585922241211, 0.08307266998291016, 0.08069766235351562, 0.08075052642822265, 0.08073017883300782, 0.08064157104492188, 0.08373868560791016, 0.08136454772949218, 0.081357666015625, 0.08081305694580078, 0.08069174194335937, 0.08077152252197266, 0.08147151947021485, 0.08062361907958984, 0.0809013442993164, 0.08130668640136719, 0.08354723358154297, 0.08149779510498047, 0.08157635498046875, 0.08127542114257813, 0.08174329376220703, 0.0822421112060547, 0.08084310150146484, 0.08116336059570313, 0.08121753692626953, 0.08159212493896484, 0.08088595581054688, 0.0815841293334961, 0.08084636688232422, 0.08077359771728515, 0.0814261474609375, 0.08173596954345703, 0.08125004577636719, 0.08077942657470703, 0.08078108978271484, 0.08077139282226563, 0.08287567901611329, 0.08328985595703126, 0.08248419189453125, 0.08084486389160156, 0.08073603057861328, 0.08053571319580079, 0.0805722885131836, 0.08104873657226562, 0.08150438690185546, 0.08091939544677734, 0.08079666900634766, 0.08060415649414063, 0.08088495635986329, 0.08187782287597656, 0.08130876922607422, 0.0810564193725586, 0.08072739410400391, 0.08050601959228515, 0.08048368072509765, 0.08036697387695313, 0.08172835540771485, 0.08130147552490234, 0.08118841552734375, 0.08143746948242188, 0.08108016204833984, 0.08112742614746093, 0.08105340576171875, 0.08051945495605468, 0.08033261108398437, 0.08025312042236328, 0.08040243530273437, 0.08022758483886719, 0.08181430053710938, 0.08156531524658203, 0.08102127838134765, 0.08083865356445312, 0.08097583770751954, 0.08121113586425781, 0.08208185577392578, 0.08104707336425782, 0.08061551666259766, 0.08084268951416015, 0.0814823989868164, 0.0810631332397461, 0.08073458862304687, 0.0811107177734375, 0.08073699188232422, 0.08076258850097656, 0.08106192016601563, 0.08035558319091797, 0.08049254608154296, 0.08046543884277343, 0.08038153839111328, 0.08020671844482422, 0.08065795135498047, 0.08058313751220703, 0.08168447875976563, 0.080653564453125, 0.08012467193603516, 0.08044879913330077, 0.08028848266601563, 0.08105561828613281, 0.08063404846191406, 0.08059648132324218, 0.08056678771972656, 0.080604736328125, 0.08144547271728515, 0.08147740936279296, 0.08116633605957031, 0.08167424011230469, 0.08175615692138671, 0.08102722930908203, 0.08080982208251954, 0.08376729583740235, 0.08302944183349609, 0.0815474853515625, 0.08340697479248046, 0.08074262237548828, 0.08058493041992187, 0.08042425537109375, 0.08069967651367188, 0.08227040100097656, 0.08198758697509766, 0.08150940704345704, 0.08156412506103515, 0.08160944366455078, 0.08173750305175781, 0.08060704040527343, 0.08120105743408203, 0.08074473571777344, 0.08048822021484375, 0.09004704284667969, 0.08075552368164063, 0.08054774475097656, 0.0814642562866211, 0.080559326171875, 0.08066063690185547, 0.08218892669677734, 0.08068915557861328, 0.08045772552490234, 0.08289443206787109, 0.08279491424560546, 0.08188518524169922, 0.08075609588623046, 0.0822872314453125, 0.0808958740234375, 0.08144854736328125, 0.08111567687988282, 0.08096562957763671, 0.08075878143310547, 0.08087142181396484, 0.08115593719482422, 0.08131804656982422, 0.0807833251953125, 0.08054217529296875, 0.08062550354003906, 0.08074012756347657, 0.08104179382324218, 0.08077311706542968, 0.08082646179199218, 0.08088976287841797, 0.08088076782226562, 0.0805633316040039, 0.08041001892089844, 0.0806915512084961, 0.08077926635742187, 0.08098995208740234, 0.08038870239257813, 0.08046527862548829, 0.08056861114501954, 0.08102483367919922, 0.08059686279296875, 0.08058092498779297, 0.08028569793701172, 0.08172895812988282, 0.0814578857421875, 0.08214717102050781, 0.08169471740722656, 0.08161280059814453, 0.08092880249023438, 0.08089545440673829, 0.08070771026611329, 0.08149046325683594, 0.08117958068847657, 0.08080592346191406, 0.08056662750244141, 0.08053404998779297, 0.08076525115966797, 0.08144249725341797, 0.08014163208007813, 0.08027206420898438, 0.08048435211181641, 0.08017715454101562, 0.08162310028076172, 0.08057849884033202, 0.08105593872070313, 0.0806021728515625, 0.08027212524414062, 0.08040652465820312, 0.08071177673339844, 0.08129936218261719, 0.08106777954101563, 0.08035763549804688, 0.07996323394775391, 0.08067756652832031, 0.08000678253173828, 0.08021871948242187, 0.08075468444824219, 0.08070134735107422, 0.08032998657226563, 0.0805302734375, 0.080289794921875, 0.08012185668945312, 0.08117411041259766, 0.0811496353149414, 0.08035769653320313, 0.08031171417236328, 0.08037129974365234, 0.08051055908203125, 0.08123474884033204, 0.08055398559570312, 0.08019558715820313, 0.08082617950439454, 0.08484697723388672, 0.08031212615966797, 0.08084035491943359, 0.08061782073974609, 0.08051708984375, 0.08077494049072266, 0.08081433868408203, 0.08080793762207031, 0.08116838073730469, 0.0810847396850586, 0.08076675415039063, 0.08621398162841797, 0.08307321929931641, 0.08132233428955078, 0.08176028442382813, 0.08064409637451173, 0.08036966705322265, 0.08040038299560547, 0.08302973175048828, 0.08056451416015625, 0.08048576354980469, 0.08136563110351562, 0.0807710723876953, 0.08077945709228515, 0.08158188629150391, 0.08038969421386719, 0.08114947509765626, 0.08060604858398437, 0.08043840026855469, 0.08057308959960938, 0.08525174713134766, 0.08176223754882812, 0.08118915557861328, 0.0817260513305664, 0.0811756820678711, 0.08152134704589843, 0.08090214538574218, 0.08066671752929687, 0.08021161651611328, 0.08399523162841797, 0.08109337615966797, 0.08047532653808594, 0.08056982421875, 0.08099456024169922, 0.08184614562988281, 0.08058035278320312, 0.08113804626464843, 0.08051744079589844, 0.08072793579101563, 0.08132592010498046, 0.08068297576904297, 0.08123350524902344, 0.08072029113769531, 0.08037580871582031, 0.08016665649414062, 0.08047465515136719, 0.080580322265625, 0.08095708465576172, 0.08077922821044922, 0.0808128662109375, 0.08126054382324219, 0.08049987030029297, 0.08059990692138672, 0.08064205169677735, 0.08068300628662109, 0.08023766326904297, 0.08040121459960937, 0.08097392272949219, 0.08115721893310547, 0.0809054718017578, 0.08137635040283203, 0.08071574401855469, 0.08182640075683593, 0.08119926452636719, 0.08110662078857422, 0.0811644515991211, 0.08139318084716797, 0.08136752319335938, 0.08127283477783204, 0.08124006652832032, 0.0812126693725586, 0.08160768127441406, 0.0817886734008789, 0.08124620819091796, 0.0812747802734375, 0.08121929931640624, 0.08123609924316406, 0.0814655990600586, 0.08158207702636719, 0.08188313293457031, 0.08136297607421875, 0.08271027374267578, 0.08128108978271484, 0.08259136199951173, 0.08192044830322266, 0.0821924819946289, 0.08404991912841797, 0.08247225952148438, 0.08167289733886719, 0.08309465789794922, 0.08231014251708985, 0.08158592224121093, 0.08145539093017579, 0.08176009368896485, 0.0815782699584961, 0.08196707153320312, 0.0819195556640625, 0.08212105560302735, 0.0813951644897461, 0.08117686462402343, 0.08100198364257813, 0.08142908477783203, 0.0825323486328125, 0.08115756988525391, 0.0807923812866211, 0.08156953430175781, 0.08122713470458984, 0.08186739349365234, 0.08178073883056641, 0.08236188507080078, 0.08350924682617188, 0.08130560302734376, 0.0809197769165039, 0.08166460418701171, 0.08141574096679688, 0.08091302490234376, 0.08090828704833984, 0.08129132843017578, 0.08111302185058594, 0.0827100830078125, 0.08168492889404297, 0.08248016357421875, 0.08505062103271484, 0.08231807708740234, 0.08127286529541015, 0.08161564636230469, 0.08109891510009766, 0.0808917465209961, 0.08058265686035156, 0.08051318359375, 0.08058998107910156, 0.08109756469726563, 0.08059490966796876, 0.08117046356201171, 0.08071091461181641, 0.0813139190673828, 0.0807430419921875, 0.081080322265625, 0.08074147033691406, 0.08041561889648438, 0.0802911376953125, 0.08310157012939454, 0.08068966674804688, 0.08085123443603516, 0.08050428771972656, 0.08122806549072266, 0.08230281829833984, 0.08021858978271484, 0.08021196746826172, 0.08056422424316406, 0.08427954864501953, 0.08078256225585938, 0.08032310485839844, 0.08046797180175781, 0.08116429138183594, 0.08076665496826171, 0.08082463836669922, 0.08067072296142579, 0.08141327667236328, 0.08147440338134766, 0.08070259094238282, 0.08048934173583984, 0.08102092742919922, 0.08035958099365234, 0.08017308807373047, 0.08020137786865235, 0.08038748931884766, 0.08033686065673828, 0.08019213104248046, 0.08051907348632813, 0.08099456024169922, 0.08110489654541016, 0.08416665649414062, 0.08104460906982422, 0.08049107360839844, 0.08023897552490235, 0.08032691192626953, 0.08016079711914062, 0.08048406219482422, 0.08069939422607422, 0.08029507446289062, 0.0804312286376953, 0.08138569641113282, 0.08159862518310547, 0.0810499496459961, 0.08035123443603516, 0.08092896270751954, 0.080504638671875, 0.08012300872802734, 0.08026201629638671, 0.08021759796142579, 0.08049462127685547, 0.08084143829345704, 0.0805823974609375, 0.08078031921386719, 0.08107081604003906, 0.0804427490234375, 0.08032115173339843, 0.08056438446044922, 0.08001868438720704, 0.08036994934082031, 0.08026566314697266, 0.0827720947265625, 0.08056214141845704, 0.08097740936279296, 0.08048000335693359, 0.0805282211303711, 0.08121952056884765, 0.08053932952880859, 0.08067526245117188, 0.080408447265625, 0.08126873779296875, 0.08032006072998046, 0.07986358642578124, 0.07995286560058594, 0.08072978973388673, 0.0812933120727539, 0.08059081268310547, 0.08076290893554687, 0.08116758728027344, 0.08076719665527343, 0.08065001678466797, 0.08088150024414062, 0.08126969909667969, 0.08054911804199219, 0.08016153717041016, 0.08003174591064453, 0.08024607849121093, 0.08042976379394531, 0.08041680145263672, 0.08074156951904297, 0.08080258941650391, 0.08167327880859375, 0.08073107147216797, 0.08053874969482422, 0.0804351043701172, 0.08018099212646485, 0.08049177551269532, 0.08031158447265625, 0.08054806518554687, 0.08073072052001953, 0.08103900909423828, 0.08078147125244141, 0.0839612808227539, 0.08409139251708984, 0.08155126190185547, 0.08204902648925781, 0.08166191864013672, 0.08089942169189453, 0.0806219482421875, 0.08039778900146484, 0.0806769256591797, 0.08083535766601563, 0.08118067169189454, 0.0809933090209961, 0.08108335876464844, 0.08137014770507812, 0.08088674926757812, 0.08063356781005859, 0.08138166046142578, 0.08108758544921875, 0.0804927978515625, 0.08028431701660156, 0.08414844512939453, 0.08123990631103516, 0.08082630157470704, 0.08107782745361328, 0.08034963226318359, 0.08128307342529296, 0.08105587005615235, 0.08103103637695312, 0.08378108978271484, 0.0821785888671875, 0.08164351654052734, 0.08117453002929688, 0.08072374725341797, 0.08083683013916015, 0.08107008361816406, 0.08077677154541016, 0.08088371276855469, 0.08081247711181641, 0.08131187438964843, 0.08092387390136718, 0.0804051513671875, 0.08149327850341796, 0.08182447814941406, 0.08054768371582031, 0.08041283416748046, 0.08055958557128906, 0.08041731262207032, 0.08063804626464843, 0.08067913818359375, 0.08065180969238281, 0.08044969940185547, 0.08068096160888671, 0.08010726165771484, 0.08084060668945313, 0.08133596801757813, 0.0810869140625]",tokens/s,12.328133081484296,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,23940.902912,13042.057216,0.0,12639.535104,12621.66016,s,1,48.7922109375,48.7922109375,0.0,48.7922109375,48.7922109375,48.7922109375,48.7922109375,[48.7922109375],,kWh,0.001218012351120827,0.00013434876003615829,0.000418478112559989,0.0017708392237169744,,MB,1365.331968,13916.5696,0.0,13493.075968,13249.793024,s,10,1.6201024627685547,0.16201024627685548,0.0005028943328675747,0.16198939514160154,0.16257440643310547,0.16269108810424804,0.1627844334411621,"[0.16254847717285156, 0.1619168701171875, 0.16128125, 0.16119212341308595, 0.16171250915527344, 0.16204698181152344, 0.162544677734375, 0.1621199951171875, 0.16193180847167968, 0.16280776977539063]",tokens/s,1580.1469714608522,kWh,4.791256844194078e-06,5.283887978157189e-07,3.1662092724916696e-06,8.485854914501468e-06,tokens/kWh,30167850.214186657,MB,1390.522368,13918.666752,0.0,13493.075968,13389.080064,s,10,58.24422216796875,5.824422216796875,0.03523833504969869,5.816780517578126,5.854045947265625,5.8847771240234374,5.909362065429687,"[5.91550830078125, 5.847216796875, 5.82382861328125, 5.8110390625, 5.8313349609375, 5.82236767578125, 5.811193359375, 5.80757275390625, 5.79488818359375, 5.7792724609375]",tokens/s,10.816523537444832,kWh,0.0001691215006620592,1.8654786400183648e-05,0.00010343540378650418,0.000291211690848747,tokens/kWh,216337.4685143451,,s,630,58.24093447113039,0.09244592773195297,0.001085733096124194,0.09218611145019531,0.09354929733276367,0.09422799339294434,0.09733385246276856,"[0.0937213134765625, 0.09424710083007813, 0.09278825378417968, 0.09289990234375, 0.09375936126708985, 0.093253662109375, 0.09383936309814453, 0.093517822265625, 0.09349529266357422, 0.09316278076171874, 0.092838623046875, 0.0929466552734375, 0.09768739318847657, 0.09560195159912109, 0.09324534606933593, 0.09373977661132812, 0.0939683837890625, 0.09425305938720703, 0.09304064178466796, 0.09440255737304687, 0.09354854583740234, 0.09311404418945313, 0.09380429077148437, 0.10101577758789063, 0.09346288299560547, 0.09321453094482422, 0.09364889526367187, 0.09405609893798828, 0.09365081787109375, 0.09390278625488281, 0.09396047973632812, 0.09341177368164062, 0.09325708770751953, 0.09315388488769531, 0.0978694076538086, 0.09338719940185547, 0.09302630615234375, 0.09382064056396484, 0.09347058868408203, 0.09324124908447265, 0.09329254150390626, 0.09386239624023437, 0.09289318084716797, 0.09293231964111329, 0.09299501037597656, 0.09781283569335937, 0.09342156982421874, 0.0932162857055664, 0.09436224365234375, 0.093763427734375, 0.09313270568847656, 0.09659814453125, 0.09424262237548828, 0.0935650863647461, 0.09338441467285157, 0.09279923248291015, 0.0932855987548828, 0.09323564910888672, 0.09333763122558594, 0.09408751678466797, 0.09362419128417969, 0.09316770935058594, 0.09374476623535156, 0.09279897308349609, 0.0933785629272461, 0.0930499496459961, 0.09395906829833985, 0.09257558441162109, 0.09278246307373048, 0.09300025939941406, 0.09376124572753906, 0.09332121276855469, 0.09246720123291016, 0.0957871322631836, 0.09291149139404296, 0.09227593231201171, 0.09241680145263671, 0.09283071899414062, 0.0924311065673828, 0.0924142074584961, 0.09226882934570313, 0.09242182159423828, 0.09373442840576172, 0.09250870513916015, 0.09270486450195313, 0.09274073791503906, 0.09267024230957031, 0.09241426849365235, 0.09266918182373046, 0.09255619049072265, 0.09353113555908203, 0.09263616180419922, 0.09290854644775391, 0.09585561370849609, 0.09182224273681641, 0.09205744171142578, 0.09176268768310547, 0.09191180419921875, 0.09254541015625, 0.09272051239013672, 0.0926583023071289, 0.09244255828857421, 0.09262416076660156, 0.09287251281738282, 0.09318089294433594, 0.09247917175292969, 0.09271078491210938, 0.09242578887939454, 0.09254182434082031, 0.09251808166503907, 0.09281327819824219, 0.09249827575683593, 0.0927001953125, 0.0927562255859375, 0.09308531188964844, 0.09224578857421875, 0.09246125030517578, 0.09275251007080078, 0.0936302719116211, 0.09257615661621094, 0.09277827453613281, 0.0928472671508789, 0.09271295928955078, 0.09254937744140625, 0.09270438385009766, 0.09270537567138672, 0.0918321304321289, 0.09591094207763672, 0.09192444610595703, 0.09281132507324219, 0.09279692840576172, 0.09291363525390625, 0.09245420837402343, 0.0923572769165039, 0.09218252563476563, 0.0921349105834961, 0.09235939025878906, 0.09234591674804687, 0.09255072021484376, 0.09220771026611328, 0.09295673370361328, 0.09259932708740234, 0.09240879821777344, 0.09227468872070313, 0.09275801849365234, 0.09277849578857422, 0.09186624145507813, 0.09211788940429687, 0.09223158264160156, 0.09182396697998046, 0.09214511871337891, 0.09311634826660156, 0.09250701141357422, 0.09245807647705079, 0.09237506866455078, 0.0944280014038086, 0.09551007843017578, 0.09222598266601563, 0.09196662139892578, 0.09211897277832032, 0.0920626220703125, 0.09167686462402344, 0.09220630645751954, 0.0920950698852539, 0.09191046142578126, 0.0921347198486328, 0.0928828125, 0.0919516143798828, 0.09182537841796876, 0.09172252655029296, 0.09178521728515625, 0.09182208251953125, 0.0918702392578125, 0.09233302307128906, 0.09194525146484375, 0.09202658843994141, 0.09235897827148437, 0.09229074859619141, 0.09222978973388672, 0.0934767074584961, 0.09398886108398438, 0.09227833557128906, 0.0920986557006836, 0.09215420532226562, 0.09176054382324218, 0.09238508605957031, 0.09195343780517579, 0.09215526580810547, 0.09268672180175781, 0.09200844573974609, 0.09255935668945313, 0.09159884643554687, 0.09194204711914063, 0.09188438415527343, 0.09198159790039062, 0.09168303680419922, 0.09216432189941406, 0.09229904174804687, 0.09193395233154297, 0.09161190032958984, 0.09187318420410157, 0.09223174285888672, 0.0916910400390625, 0.09183334350585938, 0.0918661117553711, 0.09217228698730469, 0.09195929718017579, 0.09187315368652343, 0.0928584976196289, 0.09218867492675781, 0.09166591644287109, 0.09187129974365234, 0.09186348724365234, 0.09157017517089844, 0.09171148681640626, 0.09188556671142578, 0.09211698913574219, 0.09194496154785156, 0.09247484588623046, 0.0923628158569336, 0.09171337890625, 0.09157491302490234, 0.09175218963623047, 0.09421011352539063, 0.09173628997802734, 0.09195107269287109, 0.09185689544677735, 0.09200198364257813, 0.09218284606933594, 0.09187757110595703, 0.0925769271850586, 0.09175424194335938, 0.09157849884033203, 0.0918617935180664, 0.0957594223022461, 0.09146035003662109, 0.0940169906616211, 0.09265574645996094, 0.09206845092773437, 0.09211670684814453, 0.09229888153076173, 0.09313139343261718, 0.09223375701904298, 0.09251232147216797, 0.09209769439697266, 0.09199900817871094, 0.09208627319335938, 0.09213542175292969, 0.09235894775390625, 0.09198467254638672, 0.09205036926269532, 0.09741107177734375, 0.09203231811523438, 0.09209503936767578, 0.09196553802490234, 0.09219075012207031, 0.09190777587890625, 0.09187359619140625, 0.09156940460205078, 0.09220992279052734, 0.09548595428466797, 0.09614540863037109, 0.09529708862304688, 0.09275641632080078, 0.09253449249267579, 0.09218844604492188, 0.09219251251220703, 0.09250685119628907, 0.09244576263427734, 0.09241053009033204, 0.09216441345214844, 0.0927344970703125, 0.09214665222167968, 0.09225421142578125, 0.09183849334716797, 0.09216512298583984, 0.09201558685302734, 0.09250121307373046, 0.0920440673828125, 0.09285638427734375, 0.0920940170288086, 0.09234265899658203, 0.09260137939453125, 0.09258083343505859, 0.09460572814941406, 0.09262242889404297, 0.0921534423828125, 0.09259049224853516, 0.09240921783447266, 0.09193536376953125, 0.09254093170166015, 0.09262028503417968, 0.09225212860107422, 0.09319888305664062, 0.09219993591308594, 0.09267302703857422, 0.09260749053955078, 0.09222576141357422, 0.09257039642333985, 0.09270838165283203, 0.09243901062011718, 0.0924813461303711, 0.09208022308349609, 0.09288883209228516, 0.0936185302734375, 0.0920716781616211, 0.09198207855224609, 0.09249699401855468, 0.09227327728271484, 0.09223999786376953, 0.09233833312988281, 0.09238527679443359, 0.09236006164550781, 0.09267266845703125, 0.09259619140625, 0.09213404846191406, 0.09237071990966797, 0.09194953918457031, 0.09206281280517578, 0.09216095733642578, 0.09204508972167968, 0.09194076538085938, 0.09192457580566406, 0.0919283218383789, 0.09208060455322266, 0.09336399841308594, 0.09160492706298828, 0.09183766174316406, 0.09390937805175781, 0.09217791748046875, 0.09248019409179688, 0.09257814025878906, 0.09193260955810546, 0.0921866226196289, 0.09314002990722656, 0.092608642578125, 0.09218057250976562, 0.09230409240722656, 0.09296806335449219, 0.09368412780761719, 0.09229366302490234, 0.09229714965820313, 0.09237615966796875, 0.09199504089355469, 0.09226207733154297, 0.09222691345214844, 0.09238626861572266, 0.09255961608886719, 0.0917523193359375, 0.09174124908447266, 0.09256633758544922, 0.09193881225585937, 0.09187551879882813, 0.09216944122314454, 0.09183309173583984, 0.0921250228881836, 0.09261670684814453, 0.09187020874023437, 0.0920362548828125, 0.09185078430175782, 0.09193778991699218, 0.09196614074707031, 0.0924019546508789, 0.09188130950927734, 0.0925549087524414, 0.09232569885253906, 0.09780278778076172, 0.09491683197021485, 0.09276290893554688, 0.09254505920410157, 0.09208454132080078, 0.09206646728515624, 0.09232793426513672, 0.09265462493896484, 0.09230025482177734, 0.09248886108398438, 0.09233026885986328, 0.09233670043945312, 0.09211996459960937, 0.09248870086669922, 0.09203094482421875, 0.09245388793945312, 0.09288294219970702, 0.09215180969238282, 0.09229926300048828, 0.09217228698730469, 0.09280716705322266, 0.09239552307128907, 0.09248767852783203, 0.09538127899169922, 0.09301181030273438, 0.09324172973632812, 0.0924172134399414, 0.09274559783935547, 0.09203398132324218, 0.09207929229736328, 0.09277523040771485, 0.09301516723632812, 0.09315385437011718, 0.09254729461669922, 0.09215395355224609, 0.09212313842773437, 0.09210675048828125, 0.09223583984375, 0.09260230255126953, 0.09205315399169922, 0.09199683380126954, 0.09228256225585937, 0.0958644790649414, 0.09279523468017578, 0.09227606201171876, 0.09181459045410156, 0.09146189117431641, 0.09160700988769531, 0.09170304107666015, 0.09190550231933593, 0.09182669067382812, 0.09188355255126954, 0.09224396514892579, 0.09234432220458984, 0.09235456085205078, 0.09235395050048828, 0.09158921813964843, 0.09140390777587891, 0.09155110168457031, 0.09163468933105469, 0.09180774688720703, 0.09184265899658203, 0.09132637023925781, 0.09244182586669922, 0.09160787200927735, 0.09143292999267578, 0.09125888061523438, 0.09085049438476563, 0.09251052856445313, 0.09157424163818359, 0.09151439666748047, 0.09157222747802735, 0.09218560028076171, 0.09207193756103516, 0.09203302764892578, 0.09156864166259765, 0.09171481323242188, 0.09115289306640625, 0.09549632263183594, 0.09189507293701171, 0.09149651336669921, 0.09172418975830078, 0.09211459350585938, 0.09232764434814453, 0.09194595336914063, 0.09216761779785156, 0.091716064453125, 0.09189580535888672, 0.09198966217041016, 0.09192214202880859, 0.0925120620727539, 0.09211721801757812, 0.09205769348144531, 0.09199391937255859, 0.09187728118896485, 0.09231244659423828, 0.09151478576660156, 0.09188111877441406, 0.09184934234619141, 0.0918148193359375, 0.09163228607177734, 0.09207823944091797, 0.09212662506103515, 0.09231330871582032, 0.0920558090209961, 0.09396211242675781, 0.09445433807373046, 0.09248092651367187, 0.09245366668701172, 0.09159881591796876, 0.09147977447509766, 0.09354402923583985, 0.09220124816894532, 0.09231404876708985, 0.0924037094116211, 0.09211897277832032, 0.0928625259399414, 0.09188556671142578, 0.09150422668457031, 0.09157254028320312, 0.0919000015258789, 0.09152515411376953, 0.09146774291992188, 0.0919552001953125, 0.09203916931152344, 0.09210809326171875, 0.09207465362548828, 0.09217167663574219, 0.09233881378173828, 0.09219107055664062, 0.0961337890625, 0.09217536163330078, 0.09185008239746094, 0.09190057373046875, 0.09224329376220704, 0.09174012756347656, 0.0915401611328125, 0.09178521728515625, 0.09144102478027344, 0.0918817901611328, 0.09637667083740234, 0.09172582244873047, 0.09165824127197265, 0.09183174133300781, 0.09146015930175781, 0.09205779266357422, 0.09171868896484375, 0.09312233734130859, 0.09207516479492188, 0.09144303894042968, 0.09179888153076173, 0.09155241394042969, 0.09139209747314453, 0.091467041015625, 0.09139699554443359, 0.09168870544433594, 0.09169862365722656, 0.0918431396484375, 0.09252559661865234, 0.09308668518066407, 0.09182166290283203, 0.09181942749023438, 0.09181728363037109, 0.0912056655883789, 0.0918691177368164, 0.09167024230957031, 0.09187760162353516, 0.09165555572509766, 0.09186275482177735, 0.09180838775634766, 0.0921910400390625, 0.0911910400390625, 0.09112287902832031, 0.09279366302490234, 0.09169452667236327, 0.09172569274902344, 0.09169526672363282, 0.09152985382080078, 0.09157417297363281, 0.092657470703125, 0.0916851806640625, 0.09182195281982422, 0.0912088623046875, 0.09172374725341798, 0.09158454132080078, 0.09863414764404296, 0.09203862762451172, 0.09199708557128906, 0.09173117065429688, 0.09175897979736328, 0.09196797180175781, 0.09193910217285156, 0.09172553253173828, 0.09187324523925781, 0.09156307220458984, 0.09198713684082031, 0.0915167007446289, 0.09180368041992187, 0.09226207733154297, 0.09158070373535156, 0.0913407974243164, 0.09215545654296875, 0.09155619049072265, 0.09142707061767578, 0.09169910430908203, 0.09146070098876953, 0.09131244659423828, 0.09135939025878906, 0.09138137817382813, 0.09145244598388672, 0.09355606079101562, 0.09370368194580078, 0.09282649230957031, 0.09183433532714844, 0.09144089508056641, 0.09172512054443359, 0.09139417266845704, 0.09153008270263673, 0.09127321624755859, 0.09172787475585938, 0.09137664031982422, 0.09122057342529297, 0.0914354248046875, 0.09152518463134765, 0.09151881408691406, 0.09129366302490234, 0.09181919860839843, 0.09114128112792969, 0.09154332733154297, 0.09131414031982422, 0.09144841766357421, 0.09141343688964844, 0.09123395538330079, 0.09143974304199219, 0.09149987030029297, 0.09714479827880859, 0.09263081359863282, 0.09165213012695313, 0.09111980438232421, 0.09168093109130859, 0.09104946899414063, 0.09112857818603516, 0.09150054168701172, 0.09137107086181641, 0.09185453033447266, 0.09091353607177734, 0.09142662048339843, 0.09174861145019532, 0.09169094085693359, 0.09099244689941406, 0.09171292877197265, 0.091846435546875, 0.09164083099365235, 0.09240156555175781, 0.09197571563720704, 0.09269459533691406, 0.0915098876953125, 0.09137049865722656, 0.09160284423828124, 0.09124208068847656, 0.09182860565185547, 0.09183392333984375, 0.09178675079345704, 0.09157933044433594]",tokens/s,10.817134129471887,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,4232.462336,1943.928832,0.0,1541.40672,1525.63712,s,1,12.35570703125,12.35570703125,0.0,12.35570703125,12.35570703125,12.35570703125,12.35570703125,[12.35570703125],,kWh,0.00015669463710833042,1.7270043666757326e-05,5.2319208522014216e-05,0.00022628388929710197,,MB,2652.151808,2090.729472,0.0,1667.23584,1626.063872,s,10,0.804010269165039,0.0804010269165039,0.0007741366207162823,0.0805223503112793,0.08124668731689454,0.08144886322021484,0.0816106039428711,"[0.08165103912353516, 0.0812017593383789, 0.08098668670654297, 0.08047750091552734, 0.08056719970703125, 0.08064601898193359, 0.08015161895751953, 0.08003984069824219, 0.07916998291015626, 0.07911862182617188]",tokens/s,3184.038933555598,kWh,2.3561418247312006e-06,2.598409775124303e-07,1.2360717773871268e-06,3.852054579630758e-06,tokens/kWh,66458040.69176484,MB,2656.284672,2176.712704,0.0,1751.12192,1664.864768,s,10,48.7730068359375,4.8773006835937505,0.02059338334331693,4.883781982421875,4.8947681640625005,4.89657255859375,4.89801607421875,"[4.88473291015625, 4.87610205078125, 4.8828310546875, 4.8943671875, 4.87853955078125, 4.8871533203125, 4.898376953125, 4.84141162109375, 4.8354521484375, 4.8940400390625]",tokens/s,12.91698094643195,kWh,0.00014262664362859776,1.5732025553145447e-05,5.4088500260013306e-05,0.00021244716944175655,tokens/kWh,296544.31341939705,,s,630,48.76993956756596,0.07741260248819987,0.0010499755814703867,0.077283935546875,0.07828720474243164,0.07910481910705566,0.08121451965332031,"[0.07736166381835938, 0.07750809478759765, 0.077497314453125, 0.07784835052490234, 0.07719340515136719, 0.07690652465820312, 0.07750860595703125, 0.07716044616699219, 0.0772353286743164, 0.07711014556884765, 0.07741426849365235, 0.07766022491455078, 0.07733161926269531, 0.07742671966552735, 0.07770611572265625, 0.07769087982177734, 0.07749427032470703, 0.07718195343017578, 0.07987712097167969, 0.07832780456542969, 0.07795507049560547, 0.0792615966796875, 0.07741449737548828, 0.07775823974609375, 0.07839151763916016, 0.07777484893798828, 0.07754956817626953, 0.07741645050048829, 0.07788275146484375, 0.07783283233642578, 0.07723939514160157, 0.07698540496826171, 0.07716204833984375, 0.07723446655273437, 0.07721369934082031, 0.07725286102294922, 0.07716838073730468, 0.07772284698486329, 0.07777696228027343, 0.077652099609375, 0.07856742095947265, 0.07744367980957031, 0.0805212173461914, 0.07714816284179687, 0.07689215850830078, 0.07719026947021485, 0.07693401336669922, 0.07679590606689453, 0.07705203247070312, 0.07709219360351563, 0.07744092559814453, 0.07760140991210937, 0.07747929382324219, 0.07710374450683594, 0.07768045043945312, 0.07788870239257813, 0.07717324829101563, 0.07703807830810547, 0.0772933120727539, 0.07679001617431641, 0.0772216339111328, 0.07696409606933594, 0.0770334701538086, 0.07757465362548828, 0.07874301147460938, 0.07754297637939453, 0.0770956802368164, 0.07728128051757813, 0.07679385375976562, 0.07664345550537109, 0.07713881683349609, 0.0770288314819336, 0.07671651458740235, 0.07713401794433594, 0.07712111663818359, 0.07706857299804687, 0.07695565032958984, 0.07720902252197266, 0.07714669036865235, 0.07715225219726562, 0.07710514831542968, 0.07664812469482422, 0.07664876556396484, 0.07739417266845704, 0.07699225616455078, 0.07701503753662109, 0.07711321258544922, 0.07698445129394531, 0.07758425903320312, 0.07686566162109375, 0.077264892578125, 0.0779500503540039, 0.07771148681640624, 0.07731689453125, 0.07700511932373047, 0.07717036437988281, 0.077264892578125, 0.07684710693359376, 0.07704780578613281, 0.07757324981689454, 0.07791280364990234, 0.07754755401611328, 0.07689395141601563, 0.07757453155517578, 0.07770499420166016, 0.07789590454101562, 0.07721600341796875, 0.07682969665527344, 0.07672492980957031, 0.07728339385986328, 0.07707186889648437, 0.0776995849609375, 0.07705513763427735, 0.07763139343261719, 0.07802534484863281, 0.0771259536743164, 0.07716864013671874, 0.07975424194335938, 0.07956527709960938, 0.07780358123779296, 0.07777532958984375, 0.07731609344482422, 0.0771518096923828, 0.07807561492919922, 0.0787523193359375, 0.07824806213378906, 0.07769318389892578, 0.0775331802368164, 0.0776437759399414, 0.07810643005371094, 0.07735747528076171, 0.07667593383789062, 0.0765551986694336, 0.07740560150146485, 0.07841238403320312, 0.07897046661376952, 0.07810883331298828, 0.07820928192138672, 0.07877632141113282, 0.07817139434814453, 0.07822364807128906, 0.0773628158569336, 0.07725552368164063, 0.07756390380859375, 0.07718297576904297, 0.07989427185058594, 0.0783403549194336, 0.07797491455078125, 0.07954847717285156, 0.07728800201416015, 0.07712329864501953, 0.07763362884521484, 0.0772999038696289, 0.0771747817993164, 0.07699590301513672, 0.07718572998046876, 0.0773017578125, 0.07703155517578125, 0.07671743774414062, 0.07707698822021485, 0.07806707000732421, 0.0770545883178711, 0.07689389038085938, 0.07723040008544922, 0.07764521789550781, 0.07731465911865235, 0.07734681701660157, 0.07716864013671874, 0.07678959655761719, 0.07705206298828125, 0.07746089935302734, 0.07672227478027344, 0.0768721923828125, 0.07648390197753906, 0.07705366516113281, 0.07662271881103516, 0.07694937896728515, 0.07730198669433594, 0.07700070190429688, 0.07707350158691406, 0.07763603210449219, 0.07773257446289063, 0.07911334228515625, 0.07818508911132813, 0.07722188568115235, 0.07722598266601563, 0.07701913452148437, 0.07756185913085938, 0.07696998596191407, 0.07754208374023437, 0.07829039764404297, 0.07889740753173828, 0.07796131134033203, 0.07729337310791015, 0.07742723083496093, 0.07776473236083985, 0.07739596557617187, 0.07685667419433594, 0.07750723266601563, 0.07760076904296875, 0.07790118408203126, 0.07769152069091798, 0.07763558197021485, 0.07801856231689454, 0.07828598022460938, 0.0774537582397461, 0.07767059326171875, 0.07719939422607422, 0.0770087661743164, 0.07708502197265625, 0.07685433959960937, 0.0765798110961914, 0.0774258575439453, 0.0770763168334961, 0.07726092529296875, 0.07724873352050782, 0.07718150329589844, 0.07715020751953125, 0.07734476470947266, 0.07748512268066406, 0.07735731506347657, 0.07706285095214843, 0.07731404876708985, 0.0768835220336914, 0.07687007904052734, 0.07778816223144532, 0.07710749053955078, 0.07722211456298828, 0.07782450866699218, 0.0772669448852539, 0.07739186859130859, 0.07772723388671875, 0.07810620880126953, 0.08168905639648437, 0.07785926055908203, 0.0772442855834961, 0.07733055877685546, 0.07753273773193359, 0.07755411529541016, 0.07731737518310547, 0.07732908630371094, 0.07721900939941406, 0.07717158508300781, 0.07750559997558594, 0.08081231689453125, 0.08104003143310547, 0.0792200927734375, 0.07751744079589844, 0.0780627212524414, 0.07811286163330078, 0.0777305908203125, 0.07775151824951172, 0.07786905670166015, 0.07792025756835938, 0.07756800079345703, 0.07770317077636718, 0.07771340942382812, 0.07713382720947265, 0.07671401977539062, 0.07664380645751953, 0.07694182586669922, 0.07702089691162109, 0.07730518341064453, 0.07750665283203124, 0.07723299407958985, 0.07725670623779297, 0.07763526153564453, 0.07743929290771484, 0.07756169891357421, 0.07725199890136719, 0.07714585876464844, 0.0771402587890625, 0.07926399993896484, 0.07780745697021485, 0.0772775650024414, 0.07803292846679688, 0.0810289306640625, 0.07772406768798829, 0.07800822448730468, 0.07793414306640625, 0.07752758026123047, 0.07728562927246094, 0.07748786926269531, 0.07703244781494141, 0.07712242889404297, 0.07681446075439453, 0.07693312072753906, 0.07680361938476563, 0.07731657409667969, 0.0775331802368164, 0.07738572692871094, 0.07736319732666015, 0.07702448272705079, 0.07721849822998048, 0.07767664337158203, 0.07752217864990234, 0.07705010986328124, 0.0767615966796875, 0.07700070190429688, 0.07696284484863282, 0.07681942749023438, 0.07702700805664063, 0.07762566375732421, 0.07783837127685547, 0.07794684600830078, 0.07713996887207031, 0.07711949157714844, 0.0772010269165039, 0.07808153533935547, 0.07828339385986328, 0.07740569305419921, 0.07720217895507812, 0.07717273712158203, 0.0768306884765625, 0.07682870483398438, 0.07745935821533204, 0.07750518035888672, 0.07741840362548828, 0.07888310241699219, 0.07756771087646484, 0.07729958343505859, 0.07719129943847657, 0.07688217926025391, 0.07679148864746094, 0.07697004699707032, 0.07714201354980468, 0.07708646392822266, 0.07816828918457032, 0.07776464080810547, 0.07760691070556641, 0.07718463897705079, 0.07718745422363281, 0.0780083236694336, 0.0779161605834961, 0.07817362976074219, 0.07737401580810546, 0.07713177490234376, 0.07711436462402343, 0.07698038482666016, 0.07742960357666015, 0.07782329559326172, 0.07796723175048828, 0.07781868743896485, 0.07704911804199219, 0.07786345672607421, 0.07877241516113281, 0.07711494445800782, 0.07653421020507813, 0.07728447723388672, 0.0769359359741211, 0.07686771392822266, 0.0764559326171875, 0.07674390411376954, 0.07729593658447266, 0.07777024078369141, 0.0776580810546875, 0.07759564971923828, 0.07825612640380859, 0.07797760009765625, 0.07780147552490234, 0.07708057403564453, 0.07724143981933594, 0.0773264617919922, 0.07726515197753907, 0.07757865905761718, 0.07723635101318359, 0.0781107177734375, 0.07792230224609376, 0.0785316162109375, 0.07744406127929687, 0.07848896026611328, 0.08073075103759765, 0.07925965118408203, 0.07812831878662109, 0.07760160064697266, 0.07711743927001953, 0.0770169906616211, 0.07700828552246093, 0.07745814514160156, 0.0799169921875, 0.07846099090576172, 0.0784212188720703, 0.07760499572753907, 0.07791388702392578, 0.07727603149414063, 0.07714582061767578, 0.07714585876464844, 0.07760678100585937, 0.07732701110839844, 0.07684095764160156, 0.0776003189086914, 0.07732064056396484, 0.07831346893310547, 0.07751065826416016, 0.0777127685546875, 0.07755350494384766, 0.07713037109375, 0.07705801391601562, 0.07690873718261719, 0.08078336334228516, 0.07723814392089844, 0.07712985229492188, 0.07696998596191407, 0.07895654296875, 0.07770317077636718, 0.07780352020263671, 0.07755366516113281, 0.07781785583496094, 0.07757993316650391, 0.07771376037597656, 0.07726306915283203, 0.07727689361572265, 0.07717279815673828, 0.07815984344482421, 0.07728336334228515, 0.07719014739990235, 0.07726182556152343, 0.07761305236816406, 0.07788448333740235, 0.07798675537109374, 0.07828684997558594, 0.07909580993652343, 0.07726898956298828, 0.08068710327148437, 0.07725260925292969, 0.07738162994384766, 0.0773076171875, 0.07707408142089844, 0.07698086547851563, 0.0781496353149414, 0.07774124908447265, 0.07801305389404296, 0.07746934509277344, 0.07772828674316407, 0.07769670104980468, 0.07861254119873047, 0.078176513671875, 0.07783219146728515, 0.07759667205810547, 0.0777433624267578, 0.07741311645507812, 0.07747583770751953, 0.0775331802368164, 0.07777894592285156, 0.08174748992919922, 0.07745174407958984, 0.07733200073242187, 0.07625933074951172, 0.07648713684082031, 0.0761258544921875, 0.07596463775634765, 0.07618163299560547, 0.07613187408447265, 0.07598941040039063, 0.07697618865966797, 0.07691264343261718, 0.07676723480224609, 0.07646979522705079, 0.08093449401855468, 0.07675174713134765, 0.07659225463867188, 0.07603020477294922, 0.07589952087402344, 0.07599088287353516, 0.07584076690673829, 0.0758034896850586, 0.07592352294921875, 0.07639222717285156, 0.07640700531005859, 0.07626547241210938, 0.07605190277099609, 0.08030457305908204, 0.0769148178100586, 0.07626668548583984, 0.07601859283447265, 0.07599298858642578, 0.07578214263916015, 0.076430908203125, 0.07599517059326172, 0.0759966049194336, 0.07624352264404297, 0.07603033447265625, 0.0782023696899414, 0.07670428466796875, 0.0764211196899414, 0.08154930877685547, 0.076293212890625, 0.07586064147949219, 0.07592781066894531, 0.07564288330078126, 0.07562854766845703, 0.07575347137451172, 0.07569612884521484, 0.07610777282714844, 0.0758661117553711, 0.07675049591064453, 0.0761760025024414, 0.0763511962890625, 0.08256265258789063, 0.08052162933349609, 0.07714006042480469, 0.07692803192138672, 0.07624079895019531, 0.07640573120117188, 0.07597196960449219, 0.07650303649902344, 0.07657571411132813, 0.07704576110839843, 0.07628774261474609, 0.0764664306640625, 0.07907670593261719, 0.07840764617919922, 0.0762550048828125, 0.07615580749511719, 0.07573680114746094, 0.07578377532958984, 0.07596511840820312, 0.075589599609375, 0.07589276885986328, 0.07584767913818359, 0.07635126495361329, 0.07618377685546875, 0.07635501098632813, 0.07637049865722656, 0.08079100799560547, 0.07653363037109374, 0.07836534118652344, 0.07749836730957031, 0.07645311737060546, 0.07647923278808594, 0.07594802856445312, 0.07911219024658203, 0.07648451232910156, 0.07698236846923828, 0.07642726135253906, 0.07631657409667969, 0.07687283325195313, 0.08128578948974609, 0.07657218933105468, 0.0761390380859375, 0.07609133148193359, 0.07670156860351562, 0.07586367797851562, 0.07621305847167968, 0.07643341064453125, 0.07618339538574219, 0.07656658935546876, 0.07640854644775391, 0.076312255859375, 0.07661446380615235, 0.07616703796386719, 0.08049571228027344, 0.07608617401123047, 0.07605657958984376, 0.07617536163330078, 0.075683837890625, 0.07571046447753907, 0.07610546875, 0.07589027404785156, 0.07640950775146485, 0.07631053161621094, 0.07649609375, 0.07592015838623047, 0.07731404876708985, 0.0809668197631836, 0.07677629089355469, 0.07617330932617188, 0.07755951690673828, 0.07632915496826172, 0.0764273910522461, 0.07647081756591798, 0.07698361968994141, 0.07665049743652344, 0.07744377899169921, 0.07664230346679687, 0.07614463806152344, 0.07650918579101562, 0.07643939208984375, 0.07618911743164063, 0.07639113616943359, 0.07646185302734375, 0.0760117416381836, 0.07792431640625, 0.07829244995117188, 0.07874729919433594, 0.07789862060546875, 0.0773686752319336, 0.07754412841796875, 0.07806566619873047, 0.07795916748046874, 0.07891795349121093, 0.077735107421875, 0.07707401275634766, 0.07705897521972656, 0.07681638336181641, 0.0769781723022461, 0.077253662109375, 0.07808057403564453, 0.07779984283447265, 0.07788339233398438, 0.07869644927978516, 0.07734067535400391, 0.07769283294677734, 0.07729776000976563, 0.0778583984375, 0.07750083160400391, 0.07719273376464844, 0.07749174499511718, 0.07821202850341796, 0.07725193786621094, 0.07806800079345703, 0.07772166442871094, 0.0769268798828125, 0.0785821762084961, 0.08753971099853515, 0.0774471664428711, 0.07733020782470704, 0.0777113265991211, 0.07734912109375, 0.07796691131591797, 0.07733702087402344, 0.07748198699951171, 0.07736729431152344, 0.07799375915527344, 0.07749775695800781, 0.07897171020507812, 0.08140595245361328, 0.0784544677734375, 0.07820105743408202, 0.07782723236083984]",tokens/s,12.917793328966452,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1324.843008,13209.829376,0.0,12807.307264,12661.927936,s,1,25.991318359375,25.991318359375,0.0,25.991318359375,25.991318359375,25.991318359375,25.991318359375,[25.991318359375],,kWh,0.0005549466774832732,6.12078150970314e-05,0.0001892393180580043,0.0008053938106383089,,MB,1426.153472,15734.800384,0.0,15319.69536,14320.027648,s,10,28.385652343750003,2.8385652343750003,0.008557072022897365,2.839904052734375,2.8477900146484374,2.8484974243164065,2.8490633520507815,"[2.817494384765625, 2.83102001953125, 2.837189697265625, 2.838500244140625, 2.84007421875, 2.841717529296875, 2.843084716796875, 2.83973388671875, 2.8476328125, 2.849204833984375]",tokens/s,90.18640716790378,kWh,8.259546809208436e-05,9.110142081871719e-06,5.497918287220016e-05,0.00014668479304615623,tokens/kWh,1745238.8532153184,MB,1452.560384,15736.897536,0.0,15319.69536,14320.030208,s,10,136.85613867187502,13.685613867187502,0.021408079173102803,13.691218261718749,13.70692109375,13.71305234375,13.71795734375,"[13.64780859375, 13.659224609375, 13.66548828125, 13.6774609375, 13.690373046875, 13.6920634765625, 13.70555859375, 13.71918359375, 13.7019306640625, 13.697046875]",tokens/s,4.6033740693976615,kWh,0.00040041858573500416,4.416928857604746e-05,0.0002656402958454013,0.0007102281701564531,tokens/kWh,88703.88791551594,,s,630,136.8520502624511,0.21722547660706532,0.001749714675588267,0.21717462921142577,0.21837890625,0.21880101470947266,0.22709250579833987,"[0.22803578186035156, 0.21371987915039062, 0.21333197021484376, 0.21593046569824217, 0.21824946594238281, 0.21633372497558595, 0.21327743530273438, 0.21640602111816407, 0.21646131896972656, 0.21760604858398438, 0.21550819396972656, 0.21618319702148436, 0.2158428192138672, 0.21487657165527344, 0.21755705261230468, 0.21617050170898439, 0.21331968688964845, 0.21600393676757812, 0.21491778564453126, 0.21755699157714845, 0.21682176208496093, 0.2145771484375, 0.21628518676757813, 0.21560525512695314, 0.21719206237792968, 0.21643507385253907, 0.21741091918945313, 0.21684217834472655, 0.21606063842773438, 0.2165411834716797, 0.21516493225097658, 0.2177454071044922, 0.21677261352539062, 0.2170421142578125, 0.21584774780273439, 0.21709823608398438, 0.2156195831298828, 0.21583273315429688, 0.21776287841796876, 0.2168383026123047, 0.21739173889160157, 0.21630361938476564, 0.2169646759033203, 0.21756153869628905, 0.21792279052734376, 0.21526812744140625, 0.21717315673828125, 0.21567164611816406, 0.21686224365234374, 0.2165452423095703, 0.21741004943847655, 0.21756927490234376, 0.21663075256347655, 0.21728048706054687, 0.21567059326171875, 0.21719322204589844, 0.21755903625488282, 0.21760000610351563, 0.21764054870605468, 0.21778668212890626, 0.21780694580078125, 0.21602304077148438, 0.21680230712890625, 0.23054704284667968, 0.21521040344238282, 0.21545269775390624, 0.2154498291015625, 0.219042236328125, 0.21698316955566407, 0.21351434326171875, 0.21545619201660157, 0.21629353332519533, 0.21785763549804688, 0.21699932861328125, 0.2161029052734375, 0.2149918670654297, 0.21596531677246095, 0.2169997100830078, 0.21616496276855468, 0.21565029907226563, 0.2153164825439453, 0.21577728271484375, 0.21701017761230468, 0.21559613037109376, 0.21642332458496094, 0.21575404357910155, 0.21699244689941405, 0.2169090576171875, 0.2157752685546875, 0.2152864990234375, 0.216985595703125, 0.21721072387695312, 0.21744041442871093, 0.21565846252441406, 0.21618031311035157, 0.21720480346679688, 0.21751808166503905, 0.21707315063476562, 0.21686566162109375, 0.2162296905517578, 0.21632429504394532, 0.21729280090332032, 0.21730508422851563, 0.21705113220214844, 0.21748121643066406, 0.21623532104492188, 0.21707951354980468, 0.21715615844726563, 0.21550845336914062, 0.2167120361328125, 0.21665187072753905, 0.2178560028076172, 0.21807923889160155, 0.2165022735595703, 0.2174150390625, 0.21760678100585937, 0.21761433410644532, 0.21577317810058594, 0.21630157470703126, 0.2179784698486328, 0.21661311340332032, 0.21697142028808594, 0.21670454406738282, 0.21751036071777344, 0.21614553833007813, 0.21710447692871093, 0.22655561828613283, 0.21569923400878907, 0.21568118286132812, 0.21542735290527343, 0.2189140167236328, 0.21697544860839843, 0.21437327575683593, 0.21537586975097656, 0.21594869995117189, 0.2176925506591797, 0.21536585998535157, 0.2173173828125, 0.2165964813232422, 0.21605990600585936, 0.21663320922851562, 0.21570343017578125, 0.2166266632080078, 0.2169188232421875, 0.21586274719238283, 0.2166966094970703, 0.21590042114257812, 0.21548902893066407, 0.215267333984375, 0.21680332946777345, 0.21694668579101561, 0.21519973754882812, 0.2166313018798828, 0.21689958190917968, 0.21699993896484376, 0.21701837158203124, 0.2170818634033203, 0.2160161895751953, 0.21649603271484374, 0.21706976318359375, 0.21710258483886719, 0.21678933715820312, 0.21580790710449219, 0.2181771240234375, 0.21741148376464844, 0.216646240234375, 0.2159508514404297, 0.2154132537841797, 0.21632940673828124, 0.21724038696289064, 0.21780274963378907, 0.2171658172607422, 0.21723738098144532, 0.21648297119140625, 0.21774435424804686, 0.21797843933105468, 0.21797932434082032, 0.21784716796875, 0.21710415649414064, 0.2166321563720703, 0.2182054443359375, 0.21720550537109376, 0.21616639709472657, 0.21683392333984375, 0.21813046264648436, 0.21789401245117188, 0.21827069091796875, 0.21712617492675781, 0.21813682556152345, 0.22728909301757813, 0.21494277954101562, 0.21553208923339845, 0.21560879516601564, 0.21830543518066406, 0.2174169616699219, 0.21651327514648439, 0.21595135498046875, 0.21563343811035157, 0.21601679992675782, 0.21738531494140625, 0.2162382049560547, 0.2154921875, 0.21716018676757812, 0.21785189819335937, 0.21657305908203126, 0.21594403076171875, 0.2164183349609375, 0.21625401306152345, 0.21687135314941405, 0.21619815063476563, 0.21634355163574218, 0.21776792907714843, 0.2164674530029297, 0.2168422393798828, 0.21582806396484375, 0.2168973388671875, 0.2159232940673828, 0.21635276794433594, 0.2180458526611328, 0.21654179382324218, 0.21809458923339844, 0.21728973388671874, 0.2171835174560547, 0.21754733276367189, 0.21710044860839844, 0.2177103729248047, 0.216074462890625, 0.21721270751953126, 0.21797264099121094, 0.21544172668457032, 0.21688319396972655, 0.2172948455810547, 0.21656527709960938, 0.21810223388671876, 0.216077880859375, 0.21725228881835937, 0.2178744354248047, 0.21818553161621093, 0.2172929992675781, 0.2169917449951172, 0.21712896728515624, 0.2176162872314453, 0.21783151245117188, 0.21826106262207032, 0.2167279968261719, 0.21739244079589845, 0.21683209228515626, 0.21818988037109374, 0.218186279296875, 0.21692343139648437, 0.21719728088378906, 0.21803935241699218, 0.2266112060546875, 0.21568301391601563, 0.2164752655029297, 0.21506480407714842, 0.21982176208496093, 0.2169221496582031, 0.21631231689453126, 0.21658111572265626, 0.2171627502441406, 0.21729075622558594, 0.21630770874023436, 0.21554512023925781, 0.21569190979003905, 0.21791343688964843, 0.2170634307861328, 0.21646890258789062, 0.2159785919189453, 0.2182062072753906, 0.2169528350830078, 0.21787632751464844, 0.21705690002441405, 0.21530841064453124, 0.2177250518798828, 0.21537155151367188, 0.2171130828857422, 0.21743002319335938, 0.2170194854736328, 0.21758041381835938, 0.21542486572265626, 0.2158916778564453, 0.21554217529296876, 0.21711264038085937, 0.21699693298339845, 0.21736317443847655, 0.21736674499511718, 0.21756092834472657, 0.21697865295410157, 0.2170777587890625, 0.21749168395996094, 0.21748994445800782, 0.21771034240722656, 0.21709584045410157, 0.21778306579589843, 0.21788262939453126, 0.21755903625488282, 0.21784371948242187, 0.2176914520263672, 0.2181854705810547, 0.2181866912841797, 0.21828813171386718, 0.21769830322265624, 0.21790669250488282, 0.21738304138183595, 0.21844607543945313, 0.2172314910888672, 0.2183164825439453, 0.21794589233398437, 0.2179033966064453, 0.21828329467773439, 0.21763375854492187, 0.21570150756835937, 0.21682952880859374, 0.21764547729492187, 0.22862847900390626, 0.21549635314941407, 0.21508746337890625, 0.21531033325195312, 0.22042355346679687, 0.21721891784667968, 0.21514720153808595, 0.2164995880126953, 0.21557936096191407, 0.21815501403808593, 0.21569126892089843, 0.21591766357421874, 0.21598086547851564, 0.21561354064941407, 0.2180400390625, 0.21601718139648438, 0.21719859313964843, 0.21730918884277345, 0.21771058654785155, 0.21736968994140626, 0.2155647430419922, 0.21628361511230468, 0.21739651489257814, 0.21739926147460936, 0.21786061096191406, 0.21618508911132814, 0.21677253723144532, 0.21569526672363282, 0.21805072021484376, 0.21679823303222656, 0.21701936340332031, 0.2177736053466797, 0.21692784118652345, 0.21720562744140626, 0.21705503845214844, 0.2186691131591797, 0.21729087829589844, 0.21581951904296875, 0.2169310760498047, 0.21819801330566407, 0.21818572998046876, 0.216606689453125, 0.2175364532470703, 0.2156934051513672, 0.21819711303710937, 0.21886419677734376, 0.21849530029296876, 0.21750711059570313, 0.2177969207763672, 0.21867971801757813, 0.21785804748535156, 0.21800550842285157, 0.21675149536132812, 0.21817535400390625, 0.217868896484375, 0.21802365112304686, 0.21843763732910157, 0.21563343811035157, 0.2179122314453125, 0.21657334899902345, 0.21879458618164063, 0.21683926391601563, 0.2179019775390625, 0.228272705078125, 0.21547843933105468, 0.2156255340576172, 0.215152099609375, 0.21843836975097655, 0.21632205200195312, 0.216848388671875, 0.2157178955078125, 0.21728665161132812, 0.21764649963378907, 0.21704150390625, 0.21569126892089843, 0.21575680541992187, 0.21780274963378907, 0.2171023406982422, 0.21621554565429688, 0.216783935546875, 0.217295654296875, 0.2164102783203125, 0.2158214111328125, 0.21764598083496095, 0.2173519287109375, 0.21715379333496093, 0.215510009765625, 0.21638450622558594, 0.21677395629882812, 0.2178135986328125, 0.21520188903808593, 0.2171937255859375, 0.2173509063720703, 0.21821836853027343, 0.21781721496582032, 0.21684124755859374, 0.21739414978027344, 0.21838543701171875, 0.21817648315429689, 0.21800959777832032, 0.21760963439941405, 0.216287841796875, 0.21847596740722655, 0.2190894775390625, 0.21629542541503907, 0.21737644958496094, 0.21750816345214843, 0.21939404296875, 0.2171466827392578, 0.21865338134765624, 0.2168359375, 0.21749900817871093, 0.21886647033691406, 0.2167553253173828, 0.218874755859375, 0.21839462280273436, 0.2188369903564453, 0.21816729736328125, 0.2189107208251953, 0.21806080627441407, 0.21874671936035156, 0.21862141418457032, 0.21722384643554687, 0.21909202575683595, 0.21821334838867187, 0.218281982421875, 0.23020236206054687, 0.2157900848388672, 0.21458723449707032, 0.21598854064941406, 0.22187657165527344, 0.21725715637207033, 0.21535165405273438, 0.21678504943847657, 0.21593939208984375, 0.2185984649658203, 0.21679200744628907, 0.2154659881591797, 0.21703648376464843, 0.21901747131347657, 0.21750790405273437, 0.2169845733642578, 0.2158191680908203, 0.21730284118652343, 0.21838262939453126, 0.21771463012695313, 0.21670895385742187, 0.21606626892089845, 0.2155089874267578, 0.21824496459960938, 0.21801712036132812, 0.215515869140625, 0.21619923400878907, 0.21558889770507814, 0.21688905334472655, 0.21848013305664063, 0.21675033569335939, 0.21781167602539062, 0.2181275177001953, 0.21768208312988283, 0.2174530487060547, 0.21779046630859375, 0.21743385314941407, 0.21805824279785158, 0.2180738525390625, 0.2177269744873047, 0.2166579132080078, 0.2183800048828125, 0.21825360107421876, 0.2174668731689453, 0.2186588134765625, 0.21893939208984375, 0.21797068786621093, 0.21810316467285157, 0.21772694396972656, 0.21823965454101563, 0.21926911926269532, 0.21843919372558593, 0.21804598999023436, 0.21770297241210937, 0.21861576843261718, 0.21789532470703124, 0.21819801330566407, 0.21853094482421875, 0.21918194580078126, 0.21840812683105468, 0.21818643188476564, 0.2187879638671875, 0.21856051635742188, 0.22738383483886718, 0.215267333984375, 0.21581350708007813, 0.21563381958007813, 0.2203981170654297, 0.2176104278564453, 0.21608038330078125, 0.21567079162597655, 0.21623603820800782, 0.2182386932373047, 0.2168568572998047, 0.21669810485839844, 0.21638829040527344, 0.21582771301269532, 0.21776467895507812, 0.21780274963378907, 0.21673983764648438, 0.21628518676757813, 0.2178682861328125, 0.21588172912597656, 0.21698690795898437, 0.21739593505859375, 0.21604547119140624, 0.2177347869873047, 0.21745689392089843, 0.21767555236816405, 0.2173321533203125, 0.217776123046875, 0.21767936706542967, 0.21727232360839843, 0.2183740539550781, 0.2157957763671875, 0.21611907958984375, 0.21795079040527343, 0.21809510803222656, 0.21752041625976562, 0.21742169189453125, 0.2172500762939453, 0.2182617645263672, 0.218107421875, 0.21718400573730468, 0.2173324432373047, 0.21777113342285156, 0.21649702453613281, 0.2180457305908203, 0.21819981384277343, 0.21738919067382811, 0.2183787841796875, 0.21714930725097656, 0.21832876586914063, 0.21742054748535156, 0.21848428344726561, 0.21575315856933594, 0.21705728149414064, 0.21787794494628906, 0.21640147399902343, 0.21875187683105468, 0.2168730926513672, 0.2184110107421875, 0.21758976745605468, 0.21865037536621093, 0.21818598937988282, 0.2190474853515625, 0.22637773132324218, 0.21569699096679687, 0.21533027648925782, 0.21636396789550782, 0.21971469116210937, 0.2169496307373047, 0.21688851928710937, 0.2154995880126953, 0.21596153259277343, 0.21886282348632813, 0.2152006072998047, 0.21661080932617188, 0.21550799560546874, 0.21789385986328125, 0.21692416381835938, 0.2155089874267578, 0.2170386505126953, 0.21531369018554689, 0.21798495483398436, 0.21684323120117188, 0.21598165893554688, 0.21736898803710938, 0.21731123352050782, 0.21731324768066407, 0.2171761016845703, 0.21694464111328124, 0.2172190704345703, 0.21580595397949218, 0.21828402709960937, 0.2173662109375, 0.2176161346435547, 0.21758393859863281, 0.217272216796875, 0.21771913146972657, 0.2172252197265625, 0.2174913330078125, 0.2183880920410156, 0.2177274932861328, 0.2177576904296875, 0.21756211853027344, 0.2172791290283203, 0.21793417358398437, 0.21799264526367187, 0.21794192504882812, 0.21663929748535157, 0.21779869079589845, 0.2161703338623047, 0.21824201965332032, 0.21792767333984375, 0.21546394348144532, 0.21830184936523436, 0.21753097534179688, 0.2184601287841797, 0.21811386108398437, 0.21637142944335938, 0.21802505493164062, 0.21832920837402345, 0.21825001525878907, 0.2168865966796875, 0.21762322998046876, 0.21858714294433593, 0.2188062744140625, 0.2183739776611328]",tokens/s,4.60351159366486,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1169.805312,2585.657344,0.0,2183.135232,2081.564672,s,1,10.3506728515625,10.3506728515625,0.0,10.3506728515625,10.3506728515625,10.3506728515625,10.3506728515625,[10.3506728515625],,kWh,9.814126236240857e-05,1.0818493143363063e-05,3.3597804656004815e-05,0.00014255756016177644,,MB,1364.19328,3141.402624,0.0,2726.2976,2478.86848,s,10,3.6045412902832035,0.36045412902832036,0.0008999233954839674,0.3603119659423828,0.3613371368408203,0.36178706817626954,0.3621470132446289,"[0.3612020874023438, 0.35909786987304687, 0.35983514404296874, 0.35952764892578126, 0.3622369995117187, 0.3602056274414063, 0.360935791015625, 0.35984466552734373, 0.3612371520996094, 0.36041830444335937]",tokens/s,710.2151962861451,kWh,1.072306040729245e-05,1.182555884922125e-06,7.1166425980714476e-06,1.9022258890286025e-05,tokens/kWh,13457917.983164968,MB,1377.09568,3141.402624,0.0,2726.2976,2478.87104,s,10,25.39707446289062,2.539707446289063,0.01798704695222174,2.531724365234375,2.55820927734375,2.5706767822265624,2.5806507861328125,"[2.5509248046875, 2.532150390625, 2.529824462890625, 2.583144287109375, 2.555438720703125, 2.52100927734375, 2.541313232421875, 2.5292255859375, 2.522745361328125, 2.53129833984375]",tokens/s,24.80600672807946,kWh,7.440115300353756e-05,8.206544409324022e-06,4.5317066015528635e-05,0.00012792476342839024,tokens/kWh,492476.9709288238,,s,630,25.393227943420385,0.04030671102130225,0.0008293006179430763,0.04012748908996582,0.04079084777832031,0.04134579601287842,0.044695716438293454,"[0.042205631256103514, 0.04076134490966797, 0.04045209503173828, 0.0405667839050293, 0.04049027252197265, 0.04059363174438477, 0.0402949104309082, 0.04051715087890625, 0.04084521484375, 0.04091296005249023, 0.04079872131347656, 0.04051459121704101, 0.04004671859741211, 0.04003478240966797, 0.04006540679931641, 0.0402606086730957, 0.04003871917724609, 0.04005091094970703, 0.04004886245727539, 0.0400366096496582, 0.040052734375, 0.03994214248657227, 0.03982915115356445, 0.03989132690429688, 0.04020339202880859, 0.04018057632446289, 0.040079360961914064, 0.040243198394775394, 0.04014694213867188, 0.03994182586669922, 0.04027795028686523, 0.0404947509765625, 0.040473312377929685, 0.0439967041015625, 0.04064508819580078, 0.040279518127441405, 0.04039120101928711, 0.04023206329345703, 0.04011711883544922, 0.040082817077636716, 0.04075788879394531, 0.0444659538269043, 0.0407042236328125, 0.040497150421142575, 0.04024652862548828, 0.040632511138916014, 0.03998777770996094, 0.039995166778564455, 0.04026607894897461, 0.04040480041503906, 0.04018387222290039, 0.04017561721801758, 0.04033929443359375, 0.04062428665161133, 0.04055654525756836, 0.04072652816772461, 0.04054355239868164, 0.0404384651184082, 0.04017353439331055, 0.04008348846435547, 0.04048227310180664, 0.04115039825439453, 0.04019033432006836, 0.04165222549438476, 0.04088217544555664, 0.04056063842773437, 0.040658432006835936, 0.040514049530029295, 0.040513534545898434, 0.04013382339477539, 0.040360767364501955, 0.04038582229614258, 0.0401743049621582, 0.04039452743530274, 0.04090041732788086, 0.04002812957763672, 0.03992211151123047, 0.03990528106689453, 0.039843647003173825, 0.03975596618652344, 0.039798782348632815, 0.039825408935546876, 0.03989503860473633, 0.040091102600097656, 0.03999334335327148, 0.04001232147216797, 0.039882366180419924, 0.03978278350830078, 0.039815521240234374, 0.04005148696899414, 0.040145633697509765, 0.03999951934814453, 0.04001599884033203, 0.04020764923095703, 0.040040321350097656, 0.039997535705566405, 0.040149185180664064, 0.04016595077514649, 0.041328609466552736, 0.04036374282836914, 0.04025084686279297, 0.04039561462402344, 0.04073814392089844, 0.040771839141845706, 0.040443359375, 0.04004345703125, 0.040043617248535154, 0.03991030502319336, 0.040011680603027344, 0.039946273803710936, 0.040003520965576175, 0.039741569519042966, 0.039880702972412106, 0.03990937423706055, 0.040120319366455076, 0.039984512329101565, 0.03993407821655273, 0.039895553588867184, 0.04009164810180664, 0.03985811233520508, 0.040265792846679686, 0.040431041717529294, 0.04004009628295899, 0.04029328155517578, 0.040424510955810546, 0.04029740905761719, 0.04167728042602539, 0.04023513412475586, 0.03987564849853516, 0.039717823028564456, 0.04027724838256836, 0.041237247467041015, 0.0397946891784668, 0.04006092834472656, 0.040048641204833986, 0.03974720001220703, 0.03991334533691406, 0.03983513641357422, 0.0398551025390625, 0.03986227035522461, 0.039890945434570314, 0.04014284896850586, 0.039948287963867186, 0.040005470275878904, 0.04032118225097656, 0.04038041687011719, 0.04043487930297852, 0.04001990509033203, 0.03984828948974609, 0.03974102401733398, 0.040046943664550784, 0.04026428985595703, 0.04010550308227539, 0.04004288101196289, 0.03988079833984375, 0.040062976837158204, 0.04055654525756836, 0.039933536529541014, 0.03977983856201172, 0.03988572692871094, 0.03982271957397461, 0.04030527877807617, 0.04068518447875977, 0.04066156768798828, 0.04056659317016602, 0.04049916839599609, 0.04043929672241211, 0.040403488159179685, 0.04012851333618164, 0.04002012634277344, 0.0397740478515625, 0.039794273376464843, 0.03982172775268555, 0.04033030319213867, 0.04026873779296875, 0.040691200256347655, 0.040232639312744144, 0.04047916793823242, 0.040012161254882814, 0.040011680603027344, 0.04014499282836914, 0.04023091125488281, 0.03993513488769531, 0.040045406341552736, 0.040062976837158204, 0.03986223983764649, 0.04006889724731445, 0.04002764892578125, 0.0407902717590332, 0.04163792037963867, 0.04091904067993164, 0.040775360107421874, 0.04061215972900391, 0.04482252883911133, 0.04060768127441406, 0.04030879974365234, 0.040474273681640624, 0.040274112701416016, 0.040365409851074216, 0.04028294372558594, 0.04035286331176758, 0.040325279235839846, 0.04064332962036133, 0.040748096466064455, 0.04084832000732422, 0.041277374267578125, 0.04945235061645508, 0.04468147277832031, 0.0408540153503418, 0.04177091217041016, 0.041095264434814455, 0.04084121704101563, 0.040564735412597655, 0.04063843154907226, 0.04027190399169922, 0.04026163101196289, 0.040474624633789064, 0.040318977355957034, 0.04477532958984375, 0.04074095916748047, 0.04066025543212891, 0.040350433349609374, 0.04047372817993164, 0.040456798553466795, 0.04065734481811523, 0.04042940902709961, 0.040288257598876956, 0.04030047988891602, 0.040290367126464846, 0.04043366241455078, 0.04082688140869141, 0.041351009368896484, 0.04077987289428711, 0.04075321578979492, 0.04066678237915039, 0.04047443389892578, 0.04040313720703125, 0.040273727416992186, 0.04023251342773437, 0.04048112106323242, 0.040333953857421875, 0.040372161865234374, 0.04041120147705078, 0.04059852981567383, 0.045181407928466796, 0.0410588493347168, 0.040417278289794925, 0.04038595199584961, 0.04033500671386719, 0.04038956832885742, 0.040431072235107425, 0.0403656005859375, 0.04470153427124023, 0.04117238235473633, 0.04055526351928711, 0.040286209106445314, 0.04022806549072266, 0.040282913208007816, 0.04030841445922852, 0.0403438720703125, 0.0450437126159668, 0.04072243118286133, 0.04027571105957031, 0.04033715057373047, 0.0404299201965332, 0.040290016174316406, 0.040065471649169924, 0.040093505859375, 0.04033670425415039, 0.04046118545532226, 0.040226814270019534, 0.04064255905151367, 0.040210430145263674, 0.040746047973632814, 0.0402146224975586, 0.04012441635131836, 0.0401657600402832, 0.040197727203369144, 0.040284416198730466, 0.04037068939208984, 0.040212001800537106, 0.04024380874633789, 0.04033126449584961, 0.04039190292358399, 0.04015552139282227, 0.04321628952026367, 0.04195017623901367, 0.0402841911315918, 0.04029792022705078, 0.04050387191772461, 0.04024313735961914, 0.040373600006103516, 0.040302337646484374, 0.04024009704589844, 0.0401899528503418, 0.0403614387512207, 0.04020640182495117, 0.04011056137084961, 0.0400076789855957, 0.040079360961914064, 0.040052158355712894, 0.04006662368774414, 0.03996720123291016, 0.04002569580078125, 0.039998401641845704, 0.03999129486083984, 0.03984163284301758, 0.03999555206298828, 0.04009971237182617, 0.0400951042175293, 0.04050201416015625, 0.04479334259033203, 0.040718849182128904, 0.03993804931640625, 0.039772159576416014, 0.04157215881347656, 0.04033017730712891, 0.04022051239013672, 0.03964879989624023, 0.03992316818237305, 0.03998617553710938, 0.04088217544555664, 0.03984345626831055, 0.039857791900634765, 0.0407108154296875, 0.04055401611328125, 0.04005436706542969, 0.039599071502685546, 0.040109886169433596, 0.040392894744873044, 0.039847679138183593, 0.04082620620727539, 0.04019823837280274, 0.03975395202636719, 0.039766624450683595, 0.04003190231323242, 0.03974588775634766, 0.039591487884521485, 0.03979875183105469, 0.03978710556030273, 0.039809951782226564, 0.03969734573364258, 0.039817184448242185, 0.039669822692871094, 0.03952742385864258, 0.039895103454589846, 0.039527359008789065, 0.03982131195068359, 0.039777503967285154, 0.039588638305664066, 0.04046384048461914, 0.04133942413330078, 0.03963904190063477, 0.03997081756591797, 0.04017356872558594, 0.039739391326904294, 0.0395711669921875, 0.03967414474487305, 0.03964313507080078, 0.03977734375, 0.04058531188964844, 0.039678657531738284, 0.040546463012695315, 0.04047248077392578, 0.0401710090637207, 0.04002467346191406, 0.039829345703125, 0.03979894256591797, 0.03994393539428711, 0.03978675079345703, 0.03970563125610352, 0.0396376953125, 0.03971920013427734, 0.03960627365112305, 0.039766014099121096, 0.04065484619140625, 0.04095795059204101, 0.03969375991821289, 0.04157478332519531, 0.040664127349853516, 0.040205249786376955, 0.03986150360107422, 0.039747360229492185, 0.03970057678222656, 0.03979763031005859, 0.039944095611572264, 0.040029598236083985, 0.04064444732666016, 0.03990972900390625, 0.040008190155029294, 0.03990249633789063, 0.04085628890991211, 0.04023091125488281, 0.040048641204833986, 0.039935104370117186, 0.04051385498046875, 0.04005331039428711, 0.040209983825683596, 0.04014271926879883, 0.040038974761962894, 0.03986988830566406, 0.040223297119140626, 0.04006707382202149, 0.040086593627929684, 0.03989100646972656, 0.04003519821166992, 0.03991756820678711, 0.041199615478515625, 0.040004894256591796, 0.03995107269287109, 0.0398287353515625, 0.04015516662597656, 0.03982204818725586, 0.039989246368408206, 0.03980220794677734, 0.04002588653564453, 0.040799102783203124, 0.04061798477172852, 0.03991551971435547, 0.040114177703857425, 0.04035379028320313, 0.04066870498657227, 0.04024777603149414, 0.040605438232421874, 0.04017996978759766, 0.040253440856933595, 0.044058624267578124, 0.04207561492919922, 0.04205612945556641, 0.040566879272460936, 0.04060291290283203, 0.04082255935668945, 0.04071833419799804, 0.0402006721496582, 0.03992214584350586, 0.04008284759521484, 0.04014662551879883, 0.04069014358520508, 0.04015763092041016, 0.04007699203491211, 0.040229183197021484, 0.041957374572753905, 0.0407347183227539, 0.04090591812133789, 0.040483646392822266, 0.04026572799682617, 0.04017766571044922, 0.03990441513061523, 0.039881568908691406, 0.03988860702514648, 0.039930145263671876, 0.0397127685546875, 0.04015097427368164, 0.039892704010009765, 0.03989129638671875, 0.039923713684082034, 0.03984780883789062, 0.0413697280883789, 0.03989427185058594, 0.03992243194580078, 0.039825408935546876, 0.04015241622924805, 0.03973392105102539, 0.039970592498779295, 0.039984481811523434, 0.04031900787353516, 0.04031571197509766, 0.04293020629882813, 0.040114177703857425, 0.04012646484375, 0.04002816009521484, 0.0398636474609375, 0.039921886444091795, 0.04029814529418945, 0.042267295837402345, 0.040097854614257813, 0.03976383972167969, 0.039724704742431644, 0.03982992172241211, 0.03987612915039063, 0.03977891159057617, 0.039939102172851564, 0.041264095306396485, 0.03990528106689453, 0.03993974304199219, 0.03979504013061524, 0.04003430557250977, 0.03966729736328125, 0.039846305847167966, 0.03985203170776367, 0.040103935241699216, 0.0402083854675293, 0.03996227264404297, 0.04013091278076172, 0.04000358581542969, 0.040032257080078126, 0.039881919860839846, 0.039761726379394534, 0.03972713470458984, 0.04042031860351562, 0.03984764862060547, 0.03964137649536133, 0.039712703704833985, 0.03959199905395508, 0.04149248123168945, 0.04037392044067383, 0.04070147323608399, 0.0405225601196289, 0.04429619216918945, 0.04020947265625, 0.03990214538574219, 0.03980492782592773, 0.03956121444702149, 0.03963904190063477, 0.039618560791015625, 0.03967123031616211, 0.039636703491210935, 0.03986486434936523, 0.039678272247314454, 0.0397591667175293, 0.03969491195678711, 0.03953881454467773, 0.039518241882324216, 0.041283550262451174, 0.03999334335327148, 0.03975167846679688, 0.03977612686157227, 0.03987615966796875, 0.03967148971557617, 0.03983244705200195, 0.03958784103393555, 0.04055372619628906, 0.03957017517089844, 0.04006006240844726, 0.04165923309326172, 0.040796031951904295, 0.04044198226928711, 0.040287776947021486, 0.04008569717407227, 0.04029167938232422, 0.039807937622070313, 0.03978035354614258, 0.039602176666259765, 0.03963904190063477, 0.03953868865966797, 0.039593856811523435, 0.03948128128051758, 0.03974294281005859, 0.04104880142211914, 0.04011411285400391, 0.03974969482421875, 0.03969023895263672, 0.039718433380126955, 0.04011465454101563, 0.03958528137207031, 0.039647743225097655, 0.03981071853637695, 0.039809375762939456, 0.03978144073486328, 0.04058822250366211, 0.039860065460205076, 0.03980303955078125, 0.04005887985229492, 0.03995033645629883, 0.039626750946044925, 0.03971392059326172, 0.03955174255371094, 0.0417017936706543, 0.04067734527587891, 0.04037385559082031, 0.040667713165283205, 0.04067327880859375, 0.04064182281494141, 0.04036272048950195, 0.04011212921142578, 0.040285377502441405, 0.04006524658203125, 0.039998046875, 0.0400261116027832, 0.039841793060302735, 0.03992166519165039, 0.039892799377441404, 0.0398645133972168, 0.0397371826171875, 0.03972092819213867, 0.03974553680419922, 0.03987027359008789, 0.03981939315795899, 0.039806304931640626, 0.04104403305053711, 0.03998179244995117, 0.040197246551513674, 0.04005497741699219, 0.039813056945800784, 0.0398322868347168, 0.03981327819824219, 0.039908958435058595, 0.04021699142456055, 0.04041926574707031, 0.0404859504699707, 0.04032614517211914, 0.04061548614501953, 0.040171871185302736, 0.03997484970092773, 0.03992387390136719, 0.03998515319824219, 0.04031283187866211, 0.04008755111694336, 0.040153087615966795, 0.03978384017944336, 0.03989155197143555, 0.040334400177001954, 0.039936641693115234, 0.03983801651000977, 0.041401470184326175, 0.040027008056640626, 0.040012992858886716, 0.040772289276123044, 0.04001190567016601, 0.03998896026611328, 0.04008118438720703, 0.03974176025390625, 0.03989632034301758, 0.04006089782714844, 0.04009059143066406, 0.04032067108154297, 0.04008995056152344, 0.04050534439086914, 0.04028374481201172, 0.04026204681396484]",tokens/s,24.809764296359887,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1015, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 840, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1222.918144,8113.750016,0.0,7711.227904,7603.953664,s,1,18.52492578125,18.52492578125,0.0,18.52492578125,18.52492578125,18.52492578125,18.52492578125,[18.52492578125],,kWh,0.0003322100796583375,3.663787632951109e-05,0.00011262620121199607,0.00048147415719984463,,MB,1386.758144,9877.454848,0.0,9462.349824,8756.635648,s,10,15.755296997070314,1.575529699707031,0.006925942154352956,1.5773136596679689,1.5816152709960938,1.5823553649902344,1.582947440185547,"[1.55729931640625, 1.5715103759765625, 1.5744312744140625, 1.5746710205078125, 1.5757333984375, 1.57915869140625, 1.579052734375, 1.5788939208984376, 1.583095458984375, 1.5814508056640626]",tokens/s,162.48503601525448,kWh,4.5847560680836066e-05,5.056600147491604e-06,3.034202427360244e-05,8.124618510193011e-05,tokens/kWh,3150917.174496583,MB,1412.145152,9879.552,0.0,9462.349824,8756.638208,s,10,80.7916572265625,8.07916572265625,0.01575202309497257,8.083507812499999,8.0932962890625,8.095167919921876,8.096665224609376,"[8.04444189453125, 8.05692919921875, 8.07417041015625, 8.08251025390625, 8.08236279296875, 8.08450537109375, 8.08468359375, 8.09288037109375, 8.0921337890625, 8.09703955078125]",tokens/s,7.797834846156245,kWh,0.0002367631456604143,2.6116248928458303e-05,0.00015721218132519756,0.00042009157591407027,tokens/kWh,149967.30144592724,,s,630,80.78820293426513,0.1282352427528018,0.0016603304663934254,0.12806810760498047,0.12924994049072266,0.12952211990356446,0.13906864151000978,"[0.1383299865722656, 0.12857852172851564, 0.12726636505126954, 0.12677887725830078, 0.12779821014404297, 0.12680131530761718, 0.12680633544921874, 0.12673455810546874, 0.12701197052001953, 0.12683769226074218, 0.1269944305419922, 0.12689612579345702, 0.12855267333984374, 0.12746371459960937, 0.12689810943603516, 0.12730169677734374, 0.12693641662597657, 0.12676290893554687, 0.12672486114501952, 0.12700064086914062, 0.12699436950683593, 0.1275637435913086, 0.12722998046875, 0.12710912322998047, 0.12709881591796876, 0.12704803466796874, 0.1271904983520508, 0.1269598693847656, 0.12703948974609375, 0.12710454559326173, 0.1271526107788086, 0.1270497283935547, 0.12709478759765624, 0.12712745666503905, 0.1288623046875, 0.12826010131835938, 0.12718694305419923, 0.12716006469726562, 0.12725273895263672, 0.12937420654296874, 0.1279324188232422, 0.12730982208251954, 0.12722994995117187, 0.12746109008789064, 0.12749954986572265, 0.12751158142089844, 0.1275432662963867, 0.1289720916748047, 0.12786962890625, 0.12741571044921876, 0.12911471557617188, 0.12798566436767578, 0.12751197052001953, 0.1276343002319336, 0.1276371841430664, 0.12757609558105468, 0.12749231719970702, 0.12841862487792968, 0.12972335815429686, 0.12843008422851562, 0.12771737670898436, 0.12760829162597656, 0.1281315155029297, 0.1385784912109375, 0.12844857788085937, 0.12710707092285156, 0.12685107421875, 0.12714361572265626, 0.12718307495117187, 0.126738525390625, 0.12691046142578125, 0.12855609130859375, 0.12749273681640624, 0.1270479965209961, 0.12691152191162108, 0.12679388427734375, 0.126849853515625, 0.12687359619140626, 0.12811705017089844, 0.1279152603149414, 0.12679440307617187, 0.12762246704101562, 0.12702342224121094, 0.12689603424072265, 0.1269271011352539, 0.12722994995117187, 0.128855712890625, 0.12732144165039064, 0.12723436737060548, 0.1272466583251953, 0.12712611389160156, 0.12774172973632814, 0.12844776916503906, 0.12725030517578126, 0.12892355346679688, 0.1276833953857422, 0.12723827362060547, 0.1274449920654297, 0.12719427490234375, 0.12807061767578126, 0.12967039489746093, 0.12739238739013672, 0.12994149780273437, 0.1281228790283203, 0.12725247955322266, 0.12739324951171874, 0.12737574768066406, 0.12748348999023437, 0.12726329803466796, 0.12819471740722657, 0.1291180419921875, 0.12843008422851562, 0.12750771331787109, 0.12750425720214845, 0.12746966552734376, 0.12762159729003905, 0.12736544036865236, 0.12916940307617186, 0.12928582763671875, 0.1282911376953125, 0.12786073303222656, 0.12814906311035157, 0.12793055725097657, 0.1277032928466797, 0.12769872283935546, 0.129571044921875, 0.14067340087890626, 0.1282599334716797, 0.12689826965332032, 0.12695139312744141, 0.1268614044189453, 0.1269534683227539, 0.1269304962158203, 0.1274363555908203, 0.128846435546875, 0.12754566192626954, 0.129038330078125, 0.12776668548583983, 0.12700860595703126, 0.12702515411376952, 0.127083740234375, 0.12697837066650391, 0.1278325424194336, 0.128291015625, 0.1270167694091797, 0.12876358032226562, 0.12748384094238283, 0.12918617248535155, 0.12787865447998048, 0.12716492462158202, 0.12721526336669922, 0.12710524749755858, 0.12717804718017578, 0.12884233093261718, 0.1278404769897461, 0.12873638916015626, 0.1276611557006836, 0.1272688980102539, 0.1292305908203125, 0.12776649475097657, 0.1272466278076172, 0.12729875183105469, 0.12815402221679686, 0.12867926025390625, 0.12753289794921874, 0.12821836853027344, 0.12983839416503906, 0.1277761917114258, 0.12745238494873046, 0.12730655670166016, 0.12860307312011718, 0.1284537353515625, 0.12820742797851561, 0.12876835632324218, 0.12784230041503905, 0.12842393493652343, 0.12876377868652344, 0.1276410903930664, 0.12861094665527345, 0.1288581085205078, 0.12766207885742187, 0.12762521362304688, 0.1275189437866211, 0.12931394958496092, 0.12944834899902344, 0.12814767456054688, 0.1277276153564453, 0.1292737274169922, 0.1287192687988281, 0.14069145202636718, 0.12839430236816407, 0.1269250259399414, 0.126851806640625, 0.12682444763183592, 0.12703116607666015, 0.1269813461303711, 0.12791286468505858, 0.12846080017089845, 0.1289071044921875, 0.1290078125, 0.12706813049316407, 0.12705792236328126, 0.12698534393310548, 0.1270497283935547, 0.12702512359619142, 0.12887709045410156, 0.12854208374023438, 0.12840652465820312, 0.12825759887695312, 0.12702457427978517, 0.12725552368164061, 0.12719641876220703, 0.1281582336425781, 0.12843238830566406, 0.12882266235351564, 0.12806413269042968, 0.12763340759277345, 0.12879612731933593, 0.12767491149902344, 0.1274363555908203, 0.1273936004638672, 0.1288239288330078, 0.12851097106933593, 0.12820506286621094, 0.12874009704589845, 0.12916120910644532, 0.12806089782714844, 0.12760438537597657, 0.12806166076660155, 0.12887628173828125, 0.12771011352539063, 0.1274628143310547, 0.1292007293701172, 0.12842799377441405, 0.1275976333618164, 0.12751152038574218, 0.1295482940673828, 0.12822323608398437, 0.12750454711914064, 0.12849929809570312, 0.12865728759765624, 0.12917593383789064, 0.12817202758789062, 0.12788883209228516, 0.12930224609375, 0.12784317016601562, 0.1278730239868164, 0.12923699951171874, 0.12857133483886718, 0.12921040344238283, 0.12808543395996094, 0.1292724151611328, 0.13915280151367188, 0.1285530548095703, 0.12722841644287108, 0.12885568237304687, 0.12731005096435546, 0.1267930908203125, 0.12690716552734374, 0.12770508575439454, 0.12858160400390625, 0.1281304931640625, 0.12724419403076173, 0.12890170288085936, 0.1277359390258789, 0.12730982208251954, 0.12871241760253907, 0.1273695068359375, 0.12695756530761718, 0.12694937896728514, 0.12872496032714845, 0.12790589141845704, 0.12799788665771483, 0.12809957885742188, 0.12938726806640624, 0.1280239715576172, 0.12708515167236328, 0.12710297393798828, 0.12975039672851563, 0.12779078674316408, 0.12871775817871095, 0.12816546630859374, 0.12852674865722657, 0.12718899536132813, 0.12703529357910157, 0.12890713500976564, 0.1280242919921875, 0.12760543823242188, 0.1287897033691406, 0.12792691040039061, 0.1287926025390625, 0.1275268783569336, 0.12727699279785157, 0.12891154479980468, 0.12796002960205077, 0.1273394241333008, 0.12874659729003907, 0.12843338012695313, 0.13037330627441407, 0.12834141540527344, 0.12759510040283203, 0.12738764953613282, 0.12913424682617186, 0.12775689697265624, 0.12721536254882812, 0.12885606384277343, 0.12940681457519532, 0.12925558471679688, 0.12799180603027344, 0.1275719680786133, 0.1289707489013672, 0.12815939331054688, 0.1275047378540039, 0.12872499084472655, 0.12961695861816405, 0.13934323120117187, 0.12830000305175782, 0.12704768371582031, 0.12693644714355468, 0.1269678726196289, 0.12692447662353515, 0.12686630249023437, 0.12814454650878906, 0.12935459899902343, 0.12802662658691405, 0.12906495666503906, 0.12757606506347657, 0.12686131286621094, 0.1271050262451172, 0.12684083557128906, 0.12913253784179687, 0.1288642578125, 0.12802432250976561, 0.12885232543945313, 0.12755257415771484, 0.12702191925048828, 0.1271719970703125, 0.12717935943603514, 0.12886026000976564, 0.12822828674316405, 0.12877734375, 0.12818006896972656, 0.12857958984375, 0.12733030700683592, 0.1270804443359375, 0.12707350158691405, 0.12845960998535155, 0.12788662719726562, 0.12952584838867187, 0.12826025390625, 0.1288028106689453, 0.12778128051757812, 0.1272954864501953, 0.12781298828125, 0.12892428588867189, 0.12806553649902344, 0.12873231506347657, 0.12920509338378905, 0.1283225860595703, 0.12735177612304688, 0.12774400329589844, 0.12870860290527344, 0.12793036651611328, 0.1292493133544922, 0.12863894653320312, 0.1298145294189453, 0.12836370849609374, 0.12745791625976563, 0.12821522521972656, 0.12887644958496094, 0.12911318969726562, 0.12793138885498048, 0.12912745666503905, 0.12854165649414062, 0.12865286254882813, 0.12897734069824218, 0.1281249237060547, 0.1290260467529297, 0.13995225524902344, 0.1289833984375, 0.12696781158447265, 0.1268490219116211, 0.126742431640625, 0.12688803100585938, 0.12686707305908204, 0.12872947692871095, 0.12943580627441406, 0.12839669799804687, 0.12779055786132812, 0.12715721893310547, 0.1267281951904297, 0.12685244750976563, 0.1274353256225586, 0.1291510772705078, 0.12825599670410157, 0.12807781982421876, 0.1288089599609375, 0.1269903335571289, 0.126801025390625, 0.12701795196533203, 0.12855276489257814, 0.12840150451660157, 0.12847308349609374, 0.12943302917480468, 0.12782220458984375, 0.12693628692626954, 0.1269521942138672, 0.12833609008789063, 0.12838092041015625, 0.12770508575439454, 0.12951756286621094, 0.129038330078125, 0.12821078491210938, 0.12716582489013672, 0.12725532531738282, 0.128890625, 0.12761280059814453, 0.12900186157226562, 0.12841165161132811, 0.12838079833984375, 0.12836428833007812, 0.12825836181640626, 0.12762847900390625, 0.12741859436035155, 0.129310791015625, 0.12798153686523436, 0.12903488159179688, 0.1286492156982422, 0.12857139587402344, 0.1292779846191406, 0.12839727783203125, 0.12755667114257813, 0.12752787017822265, 0.12927565002441407, 0.12992332458496095, 0.12826966857910158, 0.12910044860839845, 0.1286280059814453, 0.12912098693847657, 0.12827033996582032, 0.1284382781982422, 0.1402142791748047, 0.128509765625, 0.12702534484863282, 0.12852806091308594, 0.12696534729003905, 0.12681504058837892, 0.1266541442871094, 0.1286453399658203, 0.1285463104248047, 0.1298908233642578, 0.12775833892822266, 0.12691152191162108, 0.12664848327636719, 0.12682733154296874, 0.1277480926513672, 0.12803669738769533, 0.12945834350585939, 0.12922412109375, 0.1277339553833008, 0.12694512176513673, 0.1289753875732422, 0.12838911437988282, 0.1270208969116211, 0.12779952239990233, 0.1287487335205078, 0.12919679260253905, 0.12784230041503905, 0.12775628662109376, 0.12864306640625, 0.12736534118652343, 0.1270695037841797, 0.1284653778076172, 0.12846659851074219, 0.12953021240234375, 0.12829901123046875, 0.12818022155761719, 0.1288970184326172, 0.1275389404296875, 0.12791353607177736, 0.12888339233398438, 0.12851814270019532, 0.1284505615234375, 0.1279504623413086, 0.12895663452148437, 0.12760489654541016, 0.12800201416015625, 0.12990467834472658, 0.12829417419433595, 0.1291680908203125, 0.12825404357910156, 0.12839920043945313, 0.128878662109375, 0.12764387512207032, 0.12909750366210937, 0.12944998168945313, 0.12974627685546875, 0.12913510131835937, 0.12765548706054688, 0.1294457550048828, 0.128074462890625, 0.1278177261352539, 0.12895027160644532, 0.12909158325195313, 0.13886259460449218, 0.12831744384765625, 0.12697151947021484, 0.12847346496582032, 0.12722793579101563, 0.12741171264648438, 0.12797968292236328, 0.12806559753417968, 0.12838294982910156, 0.12742070770263672, 0.1286441955566406, 0.12938902282714843, 0.12764205169677734, 0.12677257537841796, 0.1269664306640625, 0.12843551635742187, 0.12755948638916015, 0.1285842590332031, 0.12826246643066405, 0.12838911437988282, 0.12728921508789062, 0.1271270751953125, 0.1285740509033203, 0.12818022155761719, 0.12860755920410155, 0.12807795715332032, 0.12868185424804687, 0.1279047393798828, 0.12712464141845703, 0.12872352600097656, 0.12948069763183595, 0.12792195129394532, 0.12709705352783204, 0.1280924835205078, 0.12819378662109376, 0.12855699157714845, 0.1277935028076172, 0.12871490478515624, 0.12953500366210938, 0.12796412658691406, 0.1271745910644531, 0.12797958374023438, 0.12853248596191405, 0.1293702392578125, 0.1281056365966797, 0.12902989196777342, 0.12917756652832033, 0.12841676330566407, 0.12930435180664063, 0.12794448089599608, 0.12772806549072266, 0.12899276733398438, 0.12787977600097655, 0.12965866088867187, 0.12846893310546875, 0.12947251892089845, 0.12916140747070312, 0.1282743682861328, 0.12961753845214843, 0.12822486877441405, 0.12816458129882813, 0.12889862060546875, 0.12879075622558595, 0.14012640380859376, 0.1293148193359375, 0.12689202880859374, 0.1282539520263672, 0.12719065856933592, 0.1268125457763672, 0.12679373168945313, 0.1285952606201172, 0.1298643798828125, 0.1279587173461914, 0.128077880859375, 0.12809446716308595, 0.12705587005615235, 0.12693743896484375, 0.12713334655761718, 0.12929434204101561, 0.1281445770263672, 0.1293310089111328, 0.1277675552368164, 0.12854476928710937, 0.12749209594726563, 0.12705587005615235, 0.12831062316894531, 0.12829763793945312, 0.12874957275390625, 0.12953395080566407, 0.1285074920654297, 0.12790767669677736, 0.1282381134033203, 0.12712963104248046, 0.1272998046875, 0.12871980285644533, 0.12848214721679688, 0.12953395080566407, 0.12785254669189453, 0.12828230285644532, 0.12831776428222658, 0.12720902252197266, 0.12929055786132812, 0.12809986877441407, 0.12883413696289062, 0.12864617919921875, 0.12834288024902343, 0.12910604858398436, 0.12801242065429688, 0.12901362609863282, 0.12792841339111327, 0.12896847534179687, 0.1284915771484375, 0.12933290100097655, 0.12884771728515626, 0.12893431091308594, 0.12853263854980468, 0.1286298828125, 0.12853475952148438, 0.12868016052246095, 0.12850579833984374, 0.12884223937988282, 0.12944998168945313, 0.1278782730102539, 0.12887744140625, 0.12910386657714842, 0.12871437072753905]",tokens/s,7.798168261183029,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,12344.008704,6998.065152,0.0,6595.54304,6586.72896,s,1,28.701193359375,28.701193359375,0.0,28.701193359375,28.701193359375,28.701193359375,28.701193359375,[28.701193359375],,kWh,0.0006323349939083148,6.974417583136619e-05,0.00021698350691998214,0.0009190626766596632,,MB,1379.303424,7339.900928,0.0,6916.407296,6839.601152,s,10,1.1717490539550781,0.11717490539550782,0.00035224852047857175,0.11714131164550781,0.1175555290222168,0.11771899528503418,0.1178497682952881,"[0.11728502655029296, 0.11699759674072266, 0.11730847930908203, 0.11751920318603516, 0.11678982543945313, 0.11684864044189452, 0.11686112213134765, 0.11681001281738282, 0.11744668579101562, 0.11788246154785156]",tokens/s,2184.7681390132734,kWh,3.453780341323288e-06,3.8069024658865327e-07,2.2906815711059796e-06,6.1251521590179215e-06,tokens/kWh,41794880.08687214,MB,1404.3136,7383.94112,0.0,6958.350336,6910.273024,s,10,64.4125,6.441249999999999,0.025265869719783462,6.4350888671875,6.47456884765625,6.481800048828125,6.487585009765625,"[6.411396484375, 6.4220908203125, 6.4309697265625, 6.417234375, 6.41573291015625, 6.4392080078125, 6.4587724609375, 6.45510205078125, 6.48903125, 6.4729619140625]",tokens/s,9.780710265864546,kWh,0.00018832472261492744,2.0773202233328864e-05,8.752216805689415e-05,0.00029662009290515047,tokens/kWh,212392.89416629428,,s,630,64.409553817749,0.10223738701230002,0.0009621878296805495,0.10203643035888671,0.10318580474853516,0.10369823722839355,0.10568064811706546,"[0.10256742095947266, 0.10145433807373047, 0.10161151885986328, 0.10140370941162109, 0.10113664245605469, 0.10228396606445313, 0.10287052917480469, 0.10231244659423828, 0.10325606536865234, 0.10250621032714843, 0.10140643310546875, 0.10171657562255859, 0.10345990753173828, 0.10179875183105469, 0.10160963439941406, 0.10165395355224609, 0.102019775390625, 0.10137519836425782, 0.1013004150390625, 0.10172598266601562, 0.10157328033447266, 0.10115885162353516, 0.10163597106933593, 0.10103587341308594, 0.10109776306152343, 0.10113340759277344, 0.10127839660644532, 0.10110530853271485, 0.10153542327880859, 0.10233952331542968, 0.10186310577392578, 0.10232681274414063, 0.10188925170898437, 0.10127779388427735, 0.10131676483154296, 0.10124845123291015, 0.10148134613037109, 0.10140460968017578, 0.10157670593261718, 0.10202349090576172, 0.10135225677490234, 0.10471660614013673, 0.10486953735351562, 0.10207635498046876, 0.10199139404296875, 0.1014988784790039, 0.10151350402832031, 0.10103164672851563, 0.10154099273681641, 0.10186659240722656, 0.10135072326660156, 0.10209737396240234, 0.10135142517089844, 0.10417967987060547, 0.10108857727050781, 0.10078076934814453, 0.10074262237548828, 0.10095875549316406, 0.100976318359375, 0.10126892852783204, 0.10104102325439453, 0.10182860565185547, 0.10125926208496094, 0.10135763549804687, 0.10124691009521485, 0.10089472198486328, 0.10158080291748046, 0.10213375854492188, 0.10232637023925781, 0.10156825256347657, 0.10196720123291016, 0.10136857604980469, 0.10136377716064453, 0.10112726593017578, 0.10169795227050782, 0.10140902709960938, 0.10149298858642578, 0.10151936340332031, 0.102002685546875, 0.10234880065917969, 0.10385552215576171, 0.1019151382446289, 0.10136822509765625, 0.10106642913818359, 0.10132911682128906, 0.10136080169677734, 0.10129590606689454, 0.10129043579101563, 0.10170745849609375, 0.10193119812011718, 0.10228790283203125, 0.10169558715820312, 0.10188915252685547, 0.1017147216796875, 0.1014988784790039, 0.10158258819580078, 0.1014233627319336, 0.10197196960449219, 0.10276048278808594, 0.10228118133544922, 0.10205343627929687, 0.10181394958496094, 0.10164710235595703, 0.10236287689208984, 0.10241458892822265, 0.1025269775390625, 0.10197113800048828, 0.10204611206054688, 0.10209260559082031, 0.10320751953125, 0.10216985321044922, 0.1016651840209961, 0.10169993591308593, 0.10157698822021484, 0.1018704605102539, 0.1024399642944336, 0.10210240173339843, 0.10216502380371094, 0.10588380432128906, 0.10299337768554688, 0.10200262451171875, 0.10139593505859375, 0.10155916595458984, 0.10217881774902343, 0.10205184173583984, 0.10224845123291015, 0.1022603530883789, 0.10322300720214844, 0.10318502044677734, 0.10196553802490234, 0.10179821014404297, 0.10174873352050781, 0.10241577911376953, 0.10162358093261718, 0.10184758758544922, 0.10253334045410156, 0.10165455627441407, 0.10502124786376953, 0.10798287963867187, 0.10256813049316406, 0.10177705383300781, 0.10134140777587891, 0.1018963851928711, 0.10147628784179688, 0.10151478576660156, 0.1016335678100586, 0.1015428466796875, 0.10333984375, 0.1018369903564453, 0.10446028900146484, 0.10140672302246094, 0.10121727752685547, 0.10179277038574219, 0.10168508911132812, 0.10161321258544923, 0.10139692687988282, 0.10148870086669921, 0.1027499237060547, 0.10184867095947266, 0.10324217224121093, 0.10230995178222656, 0.10257193756103515, 0.10199065399169922, 0.10203052520751953, 0.10194416046142578, 0.10200064086914062, 0.10263346862792969, 0.10243504333496094, 0.10172160339355468, 0.10121990203857421, 0.10105510711669922, 0.10188317108154298, 0.10196051025390625, 0.10177753448486328, 0.10182428741455078, 0.10165257263183594, 0.10213798522949219, 0.10133900451660156, 0.10145193481445312, 0.1011624984741211, 0.10123513793945313, 0.10111106872558594, 0.10137789154052734, 0.10124777221679687, 0.10153180694580079, 0.10139571380615234, 0.10176163482666016, 0.1023078384399414, 0.10250035095214843, 0.10186627197265626, 0.10173628997802735, 0.10197382354736328, 0.101718017578125, 0.10326755523681641, 0.10181916809082031, 0.10197209930419922, 0.10103584289550781, 0.10130963134765625, 0.10155907440185546, 0.10177152252197266, 0.10148659515380859, 0.10171990203857421, 0.10153330993652344, 0.10172029113769532, 0.10171139526367187, 0.1028511962890625, 0.10267443084716797, 0.10266191864013671, 0.10202323150634765, 0.10156976318359374, 0.10194828796386719, 0.10171145629882812, 0.10173193359375, 0.10169203186035156, 0.10203366088867187, 0.10166000366210938, 0.1014974365234375, 0.1016033935546875, 0.1015578842163086, 0.10136204528808594, 0.1015335693359375, 0.10133270263671874, 0.10125103759765625, 0.10113887786865235, 0.10175484466552734, 0.1019101791381836, 0.1044647674560547, 0.10163814544677735, 0.10187145233154297, 0.10151692962646484, 0.10166553497314453, 0.10129180908203125, 0.10136089324951172, 0.10130850982666016, 0.1038990707397461, 0.10302742767333985, 0.10187980651855469, 0.10144742584228515, 0.10151123046875, 0.10113436889648438, 0.10134457397460937, 0.10128793334960938, 0.10131951904296875, 0.10121830749511719, 0.10134732818603516, 0.10095791625976562, 0.1037305908203125, 0.1025770263671875, 0.10271334075927735, 0.10236137390136718, 0.10239875030517578, 0.10198521423339844, 0.10247856140136719, 0.10182240295410157, 0.10130022430419922, 0.10361555480957031, 0.1018328628540039, 0.10152630615234375, 0.10145318603515625, 0.1014788818359375, 0.10127133178710937, 0.10185472106933594, 0.10153254699707032, 0.10283740997314453, 0.10139651489257813, 0.10140531158447266, 0.1012628173828125, 0.10138009643554688, 0.10145359802246094, 0.10286771392822265, 0.10252912139892578, 0.10251449584960938, 0.10272694396972656, 0.10192486572265624, 0.1012396469116211, 0.10125532531738281, 0.10132268524169921, 0.10129209899902344, 0.10133299255371093, 0.10160441589355469, 0.10138829040527343, 0.10158988952636719, 0.10176905822753907, 0.10219337463378907, 0.1026357421875, 0.10241001892089843, 0.10172415924072266, 0.10160505676269531, 0.10220780944824219, 0.10191449737548829, 0.10255987548828124, 0.10156851196289063, 0.1020231704711914, 0.10116620635986329, 0.10121920013427735, 0.10109449768066406, 0.10168822479248046, 0.10107289886474609, 0.10214736175537109, 0.10159801483154297, 0.10142835235595703, 0.10236803436279297, 0.10259046173095702, 0.10204876708984376, 0.10187673950195313, 0.10183679962158203, 0.10206204986572266, 0.10150278472900391, 0.10192326354980469, 0.10233833312988282, 0.10240614318847656, 0.10240819549560547, 0.10197325134277344, 0.1014681625366211, 0.10115161895751953, 0.10111065673828125, 0.10182592010498047, 0.10199922943115235, 0.10180403137207031, 0.10155382537841796, 0.10171631622314453, 0.10141900634765624, 0.10172621154785157, 0.10182892608642578, 0.10154534149169922, 0.10134464263916015, 0.1014683837890625, 0.10190512084960937, 0.10186710357666015, 0.10165721893310548, 0.10723919677734375, 0.10471571350097657, 0.1025215072631836, 0.10201692962646484, 0.10127078247070312, 0.10161542510986328, 0.10200886535644531, 0.10237407684326172, 0.1025063705444336, 0.10173680114746093, 0.10234265899658203, 0.10175878143310547, 0.10424339294433593, 0.10185699462890625, 0.10155452728271484, 0.10135340881347656, 0.1014988784790039, 0.10237449645996094, 0.10180496215820313, 0.10234611511230468, 0.10262799835205078, 0.1018545913696289, 0.10483891296386719, 0.10207318115234375, 0.10162700653076172, 0.10152845001220703, 0.1016258544921875, 0.10268016052246094, 0.10231033325195313, 0.10218649291992188, 0.10298198699951172, 0.10211341094970704, 0.10197811126708985, 0.10171501159667969, 0.10191744232177734, 0.10163833618164063, 0.10166486358642578, 0.10213529968261718, 0.10233430480957031, 0.1027220458984375, 0.1062441635131836, 0.10170191955566406, 0.10167049407958985, 0.1018482894897461, 0.10278937530517578, 0.1021938247680664, 0.10194124603271484, 0.10202521514892578, 0.10320313262939453, 0.10324140930175782, 0.10271295928955078, 0.10254374694824218, 0.1017968978881836, 0.10178643035888672, 0.1016538848876953, 0.10178633880615234, 0.10310015869140625, 0.10217938995361328, 0.10190003204345703, 0.10146534729003906, 0.1017474594116211, 0.10188502502441406, 0.10135030364990234, 0.10151248168945312, 0.10165936279296875, 0.10183174133300782, 0.10167814636230468, 0.10266969299316406, 0.10320521545410156, 0.10215805053710937, 0.10283023834228516, 0.10294512176513672, 0.10315564727783202, 0.10302979278564453, 0.10331187438964844, 0.10369891357421875, 0.10329251098632812, 0.10390374755859375, 0.1030567398071289, 0.10241014099121094, 0.10309417724609375, 0.10255436706542968, 0.10265513610839844, 0.10258927917480469, 0.10204291534423827, 0.10219993591308593, 0.10254463958740234, 0.10355715179443359, 0.10306841278076172, 0.10238572692871094, 0.10229347229003906, 0.10203958129882812, 0.10203695678710938, 0.10183529663085937, 0.10185702514648437, 0.1019415054321289, 0.10303282928466796, 0.10510540771484375, 0.102582275390625, 0.10211433410644531, 0.10175392150878906, 0.10139433288574219, 0.10253721618652344, 0.10149929809570313, 0.10499440002441406, 0.10209894561767578, 0.1023406753540039, 0.10295699310302735, 0.1017733154296875, 0.10518326568603516, 0.10175279998779296, 0.10224079895019532, 0.10191635131835937, 0.10215427398681641, 0.10240406036376953, 0.10216441345214844, 0.1025547866821289, 0.10467193603515625, 0.10281561279296875, 0.10213542175292968, 0.10201776123046875, 0.10214604949951171, 0.10239794921875, 0.1028071060180664, 0.10260320281982421, 0.10298982238769532, 0.10213091278076172, 0.10206082916259765, 0.1018603515625, 0.10193401336669922, 0.10187142181396484, 0.10188006591796875, 0.10193087768554687, 0.10236723327636718, 0.10341388702392579, 0.10203750610351563, 0.10198419189453126, 0.10200275421142578, 0.10160537719726563, 0.10153533172607422, 0.10170188903808594, 0.10182057952880859, 0.10170114898681641, 0.10224463653564453, 0.1028138885498047, 0.10314752197265625, 0.10238921356201172, 0.10398732757568359, 0.10196009826660156, 0.10242457580566407, 0.10244265747070312, 0.1029287338256836, 0.10351583862304688, 0.10300038146972657, 0.10293657684326171, 0.10270636749267578, 0.10245811462402343, 0.10227433776855469, 0.101785888671875, 0.10187417602539063, 0.10241996765136718, 0.1025759048461914, 0.10246012878417969, 0.102576416015625, 0.10376982116699218, 0.10279305267333984, 0.10238127899169921, 0.10260272216796874, 0.10247209930419922, 0.10241849517822266, 0.10218086242675781, 0.10301414489746094, 0.10322515106201172, 0.10313568115234376, 0.10309030151367188, 0.10365135955810546, 0.10290447998046875, 0.102327392578125, 0.1023512954711914, 0.1030466537475586, 0.10253206634521485, 0.1027791976928711, 0.10342380523681641, 0.10620275115966797, 0.10231549072265625, 0.10285135650634766, 0.10320470428466796, 0.10239619445800781, 0.10245839691162109, 0.10277523040771484, 0.10269058990478516, 0.1034676513671875, 0.10771353912353515, 0.10314854431152344, 0.1023050537109375, 0.10265440368652344, 0.10252521514892578, 0.10201497650146485, 0.10285609436035156, 0.10292694091796875, 0.10225373077392579, 0.10282233428955079, 0.10283686065673828, 0.1024468765258789, 0.10381954956054687, 0.10369741058349609, 0.10280419158935547, 0.10261673736572266, 0.1026296615600586, 0.10250450897216797, 0.10245689392089843, 0.10347154998779297, 0.10343369293212891, 0.10319286346435547, 0.10428851318359375, 0.10284627532958984, 0.10308128356933594, 0.10264643096923828, 0.1026193618774414, 0.10304512023925781, 0.10269081878662109, 0.10383526611328125, 0.10285311889648438, 0.10199849700927735, 0.10290163421630859, 0.10277849578857422, 0.10291273498535156, 0.10297318267822266, 0.10294834899902344, 0.10273369598388672, 0.10291168212890625, 0.10312786865234375, 0.10275798034667968, 0.10263433837890625, 0.10299359893798828, 0.10260889434814453, 0.10291769409179688, 0.10308214569091798, 0.10254147338867188, 0.10332115173339844, 0.1034244155883789, 0.1026580810546875, 0.10291404724121093, 0.10249625396728515, 0.10240121459960938, 0.10364979553222656, 0.10296351623535156, 0.10419830322265625, 0.10266162872314454, 0.10268093109130859, 0.10257926177978516, 0.10176918029785156, 0.10211222076416016, 0.10208460998535156, 0.10168524932861328, 0.10278399658203125, 0.10249219512939453, 0.10203590393066406, 0.10285433959960938, 0.10313609313964844, 0.10243881225585938, 0.10241769409179688, 0.10268482971191406, 0.10238019561767578, 0.10236109161376954, 0.10291200256347656, 0.10189791870117187, 0.102385986328125, 0.1024422378540039, 0.10261170959472657, 0.10195913696289062, 0.10242921447753907, 0.1021317138671875, 0.10196173095703125, 0.10203955078125, 0.10262528228759765, 0.10268262481689452, 0.10263916778564453, 0.10351651000976562, 0.10335222625732422, 0.10315385437011719, 0.10258636474609376, 0.1022845458984375, 0.10216464233398438, 0.10214460754394532, 0.10305328369140625, 0.10223619079589844, 0.1021603546142578, 0.11186557006835937, 0.10329325103759765, 0.10243196868896484, 0.10213660430908203, 0.10200873565673828, 0.1018921890258789, 0.10244879913330078, 0.10278233337402344, 0.1022699203491211, 0.10279526519775391, 0.1039974365234375, 0.10255753326416016]",tokens/s,9.781157649106305,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1220.886528,8113.750016,0.0,7711.227904,7603.953664,s,1,18.4128203125,18.4128203125,0.0,18.4128203125,18.4128203125,18.4128203125,18.4128203125,[18.4128203125],,kWh,0.00033239975979584716,3.665854995363107e-05,0.00011571259257001087,0.00048477090231948913,,MB,1371.111424,9877.454848,0.0,9462.349824,8756.504576,s,10,15.596762329101562,1.5596762329101561,0.005080788003923659,1.5618145751953125,1.5631335815429686,1.5633924743652343,1.563599588623047,"[1.5461129150390625, 1.5553743896484375, 1.5591842041015624, 1.5603385009765625, 1.5621392822265625, 1.5630760498046874, 1.5627899169921875, 1.5614898681640625, 1.5626058349609375, 1.5636513671875]",tokens/s,164.13662951209866,kWh,4.534203138500136e-05,4.999563004158226e-06,3.0117690760800288e-05,8.045928514995987e-05,tokens/kWh,3181733.4633643297,MB,1391.194112,9877.454848,0.0,9462.349824,8756.507136,s,10,76.93651123046875,7.693651123046875,0.008799582257569497,7.696724853515625,7.701958984375,7.703683837890625,7.705063720703125,"[7.67748291015625, 7.6830322265625, 7.68491943359375, 7.690185546875, 7.6953505859375, 7.70157568359375, 7.69809912109375, 7.69931591796875, 7.70114111328125, 7.70540869140625]",tokens/s,8.188569899053396,kWh,0.0002250537408816672,2.482647112548078e-05,0.0001494300639884011,0.00039931027599554906,tokens/kWh,157772.04792170748,,s,630,76.93265145874027,0.12211531977577815,0.0015868114767957707,0.12193806457519532,0.123113818359375,0.12361289329528809,0.13189941223144533,"[0.13201901245117187, 0.12042444610595703, 0.12040767669677735, 0.12039724731445313, 0.11966668701171874, 0.11998445129394532, 0.12065996551513672, 0.12296431732177734, 0.12445903778076171, 0.12168013000488281, 0.1216072006225586, 0.11988886260986328, 0.12126822662353516, 0.12078688049316406, 0.12117203521728516, 0.12255846405029297, 0.12268476867675782, 0.12154128265380859, 0.1211673583984375, 0.12083251190185547, 0.12156313323974609, 0.12087474822998047, 0.12075827026367188, 0.12187468719482422, 0.12198297882080078, 0.12233318328857422, 0.12159728240966797, 0.12088742065429688, 0.12164765167236329, 0.12083318328857422, 0.12114415740966797, 0.12154665374755859, 0.12177139282226562, 0.12262268829345703, 0.12142495727539063, 0.12183238220214844, 0.12115277099609376, 0.1223605728149414, 0.12107891082763672, 0.12222291564941407, 0.12170025634765624, 0.12192015838623046, 0.1223741455078125, 0.12205401611328125, 0.12150640106201172, 0.12191487884521485, 0.12154463958740234, 0.12196720123291016, 0.12215296173095704, 0.12186598205566407, 0.12188467407226562, 0.12262175750732422, 0.12246784210205078, 0.12209862518310546, 0.12167164611816406, 0.1225499496459961, 0.12269100952148437, 0.12211497497558593, 0.12258675384521485, 0.12283020782470704, 0.12259839630126954, 0.12207456207275391, 0.12216377258300781, 0.13426864624023438, 0.12031116485595703, 0.12035689544677734, 0.12070572662353515, 0.12067244720458985, 0.12055465698242188, 0.12065264129638673, 0.12239814758300781, 0.12370540618896485, 0.12135874938964844, 0.12093449401855469, 0.12081561279296875, 0.12076441955566407, 0.12043405151367187, 0.12101609802246094, 0.12185890960693359, 0.12290220642089844, 0.1226079330444336, 0.12065996551513672, 0.12136851501464843, 0.12063750457763672, 0.12116537475585938, 0.12125843048095702, 0.12185536193847656, 0.12228057861328125, 0.12249267578125, 0.12163241577148437, 0.12130944061279297, 0.12149488067626953, 0.12094947052001953, 0.12189724731445313, 0.12173926544189453, 0.12242447662353516, 0.12220838165283203, 0.12164988708496094, 0.12134400177001953, 0.12203004455566406, 0.120942626953125, 0.1220321273803711, 0.12264572906494141, 0.12219676971435547, 0.12254003143310546, 0.1225134048461914, 0.12131533050537109, 0.12183757019042969, 0.12165325164794921, 0.12190854644775391, 0.1220467529296875, 0.12187814331054687, 0.12257769775390626, 0.12322611236572266, 0.12159337615966796, 0.12195471954345703, 0.12222422027587891, 0.12171721649169921, 0.12228403472900391, 0.12241004943847657, 0.12215187072753907, 0.1233469467163086, 0.12164899444580078, 0.12265436553955078, 0.12301567840576172, 0.12164822387695312, 0.133051513671875, 0.1204005126953125, 0.12065315246582031, 0.12045139312744141, 0.12068515014648437, 0.120461181640625, 0.12081574249267578, 0.12234451293945313, 0.12391446685791016, 0.12113699340820312, 0.1216684799194336, 0.12057993316650391, 0.12066627502441406, 0.120970947265625, 0.12187884521484375, 0.1225514907836914, 0.12262278747558594, 0.12189081573486328, 0.1216184310913086, 0.1207352294921875, 0.12136908721923828, 0.12101017761230469, 0.12087859344482423, 0.12192364501953125, 0.12219846343994141, 0.12270941162109375, 0.12160630035400391, 0.12079763031005859, 0.12169625854492187, 0.12107571411132813, 0.12170598602294921, 0.12176435089111329, 0.12229363250732422, 0.12342902374267578, 0.12172335815429687, 0.12147404479980468, 0.1215428466796875, 0.1219797134399414, 0.12186524963378906, 0.12176601409912109, 0.12247331237792969, 0.12277760314941406, 0.12173926544189453, 0.121923583984375, 0.12181676483154297, 0.12194847869873048, 0.12192912292480469, 0.12198973083496094, 0.1233141098022461, 0.12223404693603515, 0.12192038726806641, 0.12231468963623048, 0.122185791015625, 0.12159795379638672, 0.12196249389648438, 0.12233219146728516, 0.12268643188476562, 0.12288563537597656, 0.12216371154785156, 0.12257484436035156, 0.12194815826416015, 0.12189481353759765, 0.122056640625, 0.13091136169433593, 0.12072844696044922, 0.12124492645263672, 0.12060953521728515, 0.120802978515625, 0.12096498870849609, 0.12137315368652343, 0.12250447845458984, 0.12363235473632812, 0.12173085021972656, 0.12100630187988282, 0.12056291198730469, 0.12086883544921875, 0.12125238037109375, 0.12087529754638672, 0.1224230728149414, 0.12322019195556641, 0.12231814575195313, 0.12131603240966797, 0.1208499526977539, 0.12172335815429687, 0.12078284454345703, 0.1212047348022461, 0.12179631805419922, 0.12274873352050782, 0.12244409942626953, 0.12241731262207031, 0.12115763092041015, 0.12186009979248047, 0.12143756866455079, 0.12141577911376954, 0.12139574432373047, 0.12298230743408203, 0.12216095733642578, 0.12269324493408203, 0.12167030334472656, 0.12176579284667968, 0.12149945831298828, 0.12157305908203125, 0.12181922912597656, 0.12244537353515625, 0.12239052581787109, 0.12361980438232421, 0.12195458984375, 0.12162032318115235, 0.12187433624267578, 0.12145299530029297, 0.12179046630859375, 0.12253388977050782, 0.12255017852783204, 0.1233081283569336, 0.12224307250976563, 0.12177814483642578, 0.12179251098632812, 0.12251744079589844, 0.1222327651977539, 0.12235308837890625, 0.12298496246337891, 0.12295113372802734, 0.12321631622314454, 0.12207081604003907, 0.12241532897949219, 0.1219587173461914, 0.13398629760742187, 0.1203589096069336, 0.12086067199707032, 0.12078034973144532, 0.12086873626708984, 0.12031238555908202, 0.12036463928222656, 0.12359721374511719, 0.12393062591552734, 0.12190924835205078, 0.12133139038085937, 0.12148153686523437, 0.12127133178710937, 0.12029027557373047, 0.12091331481933594, 0.12277616119384765, 0.12293119812011719, 0.12228307342529297, 0.12136544036865235, 0.12145804595947265, 0.12097344207763672, 0.12122982025146484, 0.12134114837646484, 0.1219400634765625, 0.12275276947021484, 0.12222077178955078, 0.12192841339111328, 0.12124915313720704, 0.12133843231201172, 0.12141165161132812, 0.12123881530761718, 0.12256483459472656, 0.12299900817871094, 0.12194230651855469, 0.12285497283935547, 0.12145906829833984, 0.12217078399658203, 0.12171440124511719, 0.12175574493408203, 0.12190803527832031, 0.12225859069824219, 0.12214771270751953, 0.12237324523925781, 0.12161475372314454, 0.12200150299072265, 0.12348863983154297, 0.12172697448730468, 0.12218163299560547, 0.12170240020751953, 0.1230633316040039, 0.12276016235351563, 0.12183757019042969, 0.12166329956054688, 0.12351433563232422, 0.1222066879272461, 0.12217552185058594, 0.12175382232666015, 0.1234606704711914, 0.12230073547363281, 0.12310182189941406, 0.12210995483398437, 0.12232230377197266, 0.12317529296875, 0.1332998046875, 0.1204658203125, 0.12032614135742188, 0.12079020690917969, 0.12051119995117188, 0.12076009368896484, 0.1207422103881836, 0.12289981079101563, 0.1240680923461914, 0.12245555114746094, 0.12174832153320313, 0.12129286193847656, 0.12145458984375, 0.12090470123291015, 0.12096768188476563, 0.12223334503173829, 0.12259638214111328, 0.12193612670898438, 0.12198780822753906, 0.12148934173583985, 0.12112397003173828, 0.12144060516357422, 0.1214736328125, 0.12165325164794921, 0.1222512664794922, 0.1221341094970703, 0.12243804931640626, 0.12164035034179688, 0.12155554962158203, 0.12246221160888672, 0.1219583969116211, 0.12197203063964844, 0.12229497528076172, 0.12289337921142578, 0.12197500610351562, 0.12166556549072266, 0.12254070281982422, 0.12213251495361328, 0.12227174377441406, 0.12196659088134766, 0.12241490936279296, 0.12250531005859375, 0.12343228912353515, 0.12205951690673827, 0.12218364715576172, 0.12194000244140625, 0.12328316497802734, 0.1223939208984375, 0.12216614532470703, 0.12280534362792969, 0.12242012786865235, 0.12261936187744141, 0.12189552307128906, 0.12376681518554687, 0.12219596862792968, 0.12191334533691406, 0.12283267211914063, 0.12307433319091797, 0.12222918701171875, 0.12369510650634766, 0.12209689331054688, 0.1224035873413086, 0.12202960205078126, 0.13204071044921875, 0.120274658203125, 0.12096470642089843, 0.1202808609008789, 0.12104182434082031, 0.1208846435546875, 0.1208776626586914, 0.12328537750244141, 0.12486463928222656, 0.12113100433349609, 0.12127436828613282, 0.12108595275878906, 0.12143987274169922, 0.12107814025878906, 0.12121046447753907, 0.12267759704589844, 0.12358179473876953, 0.12206563568115235, 0.12115283203125, 0.12106768035888672, 0.12111721801757812, 0.12079666900634765, 0.12145919799804687, 0.12240422058105468, 0.12249737548828125, 0.12209180450439452, 0.12209766387939452, 0.12203987121582031, 0.12105773162841797, 0.12169401550292969, 0.12184547424316407, 0.12246377563476563, 0.12230137634277344, 0.12235980987548828, 0.12230860900878907, 0.12139670562744141, 0.12173519897460937, 0.12194445037841797, 0.12171830749511718, 0.12275961303710937, 0.12346514892578125, 0.12199712371826171, 0.12289282989501953, 0.12195059204101563, 0.12227584075927735, 0.12195430755615234, 0.12176137542724609, 0.12254045104980468, 0.12267247772216797, 0.12252015686035156, 0.1224909439086914, 0.12358451080322265, 0.12170649719238281, 0.12242025756835938, 0.12170121765136718, 0.12265586853027344, 0.12283187103271484, 0.12294915008544922, 0.12307708740234374, 0.12270960235595703, 0.12189920043945313, 0.12192342376708984, 0.1233411865234375, 0.13160659790039062, 0.12020941162109375, 0.12044287872314453, 0.12058009338378907, 0.12008841705322265, 0.12091203308105469, 0.12105318450927735, 0.12371343994140625, 0.12423302459716797, 0.12182943725585937, 0.12108873748779297, 0.1206456298828125, 0.12097331237792969, 0.12180684661865235, 0.12089282989501954, 0.12289427185058593, 0.12346966552734374, 0.12200428771972656, 0.1211822052001953, 0.12152012634277344, 0.12132742309570313, 0.12141715240478515, 0.12147535705566406, 0.12300540924072266, 0.12376592254638671, 0.12190739440917969, 0.12132809448242188, 0.12124179077148438, 0.12146189117431641, 0.12136537933349609, 0.1215055694580078, 0.12303548431396484, 0.12360444641113282, 0.12206992340087891, 0.12156313323974609, 0.12184732818603515, 0.12183535766601562, 0.12233757019042969, 0.12203247833251953, 0.12234476470947266, 0.1234664306640625, 0.1222749786376953, 0.1231860122680664, 0.12197216033935547, 0.12203657531738281, 0.12174313354492187, 0.12162675476074218, 0.1222147216796875, 0.12246201324462891, 0.12410284423828125, 0.12247654724121093, 0.12176383972167969, 0.12180480194091797, 0.1228410873413086, 0.12251955413818359, 0.12249292755126953, 0.12237174224853516, 0.12273040008544922, 0.12277772521972656, 0.12285164642333984, 0.12269283294677734, 0.12240771484375, 0.12247222137451172, 0.13114572143554687, 0.12017664337158203, 0.12101177978515625, 0.12024425506591797, 0.12084060668945312, 0.12034047698974609, 0.12096102142333984, 0.124636962890625, 0.124069091796875, 0.12147193908691406, 0.12113107299804687, 0.12091187286376953, 0.12131533050537109, 0.12060211181640625, 0.12126873779296875, 0.12335923004150391, 0.12337971496582031, 0.12249906921386719, 0.12177552032470704, 0.12084284973144531, 0.12233475494384766, 0.12081785583496094, 0.1218287353515625, 0.12285225677490234, 0.12265471649169922, 0.12243762969970703, 0.12208937835693359, 0.12171804809570312, 0.1217720947265625, 0.12154694366455078, 0.12177196502685547, 0.12262255859375, 0.12223200225830078, 0.12334780883789062, 0.12201299285888671, 0.12151878356933593, 0.12142320251464844, 0.12142454528808594, 0.1221591033935547, 0.12260966491699218, 0.12291180419921875, 0.12248973083496094, 0.12315580749511719, 0.12186287689208984, 0.12215500640869141, 0.12236595153808594, 0.12163999938964844, 0.12281542205810547, 0.12271206665039062, 0.12275302124023438, 0.12272640228271485, 0.12175769805908203, 0.12222054290771485, 0.12257484436035156, 0.12175494384765626, 0.12234310150146484, 0.12399491119384766, 0.12311164855957031, 0.12266905975341796, 0.12256441497802735, 0.12245804595947266, 0.12234162902832031, 0.12224822235107421, 0.133518310546875, 0.12049603271484376, 0.12112086486816406, 0.1201070098876953, 0.12156079864501954, 0.12051014709472656, 0.12144700622558594, 0.12325068664550781, 0.12383641815185546, 0.12202349090576171, 0.1215185317993164, 0.12095487976074219, 0.12133513641357421, 0.1203776626586914, 0.12157084655761718, 0.12289734649658203, 0.12406771087646484, 0.12266496276855468, 0.12145836639404296, 0.12109401702880859, 0.12075846099853516, 0.12171881866455078, 0.12138912200927734, 0.12228419494628906, 0.12273868560791015, 0.12403437042236329, 0.12213318634033203, 0.12132339477539063, 0.12111475372314454, 0.12158771514892579, 0.12179865264892578, 0.12176521301269531, 0.1230137939453125, 0.12317852783203125, 0.12238896179199218, 0.12177932739257813, 0.12162137603759765, 0.12216831970214843, 0.12204544067382812, 0.12250931549072265, 0.12272230529785157, 0.12273049926757812, 0.12282822418212891, 0.12183174133300781, 0.12161049652099609, 0.12165229034423829, 0.12184646606445312, 0.12309276580810546, 0.12224543762207031, 0.12421731567382813, 0.1224659194946289, 0.12181561279296875, 0.12178636932373046, 0.12173235321044922, 0.12313667297363282, 0.12256060791015624, 0.12291072082519532, 0.1231333465576172, 0.1247811508178711, 0.12198707580566406, 0.12233052825927734, 0.12238294219970704, 0.12207670593261719]",tokens/s,8.188980726055899,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1172.127744,2585.657344,0.0,2183.135232,2081.564672,s,1,10.3592568359375,10.3592568359375,0.0,10.3592568359375,10.3592568359375,10.3592568359375,10.3592568359375,[10.3592568359375],,kWh,9.837715687914776e-05,1.0844451817010112e-05,3.2350859213992056e-05,0.00014157246791014994,,MB,1369.12896,3141.402624,0.0,2726.2976,2478.999552,s,10,3.716807281494141,0.3716807281494141,0.0009196405066977453,0.3714301452636719,0.3727471008300781,0.3731387634277344,0.37345209350585934,"[0.3705052185058594, 0.37098126220703126, 0.3720406188964844, 0.37247982788085937, 0.37083935546875, 0.3713574523925781, 0.3709102172851563, 0.37266006469726565, 0.37150283813476564, 0.3735304260253906]",tokens/s,688.7631792872754,kWh,1.109640045370545e-05,1.2229869595086598e-06,7.370077912519399e-06,1.968946532573351e-05,tokens/kWh,13001876.676936274,MB,1406.91456,3143.499776,0.0,2726.2976,2479.002112,s,10,29.94504833984375,2.994504833984375,0.01718762853255335,2.994223022460938,3.0151752685546875,3.0154058715820313,3.015590354003906,"[3.015636474609375, 2.99956689453125, 3.0151240234375, 3.00910791015625, 3.013547607421875, 2.979029296875, 2.988879150390625, 2.97709814453125, 2.9771953125, 2.969863525390625]",tokens/s,21.038536750723686,kWh,8.677038771754448e-05,9.571702320024342e-06,4.913507840267864e-05,0.00014547716844024746,tokens/kWh,433057.6452337007,,s,630,29.94193305969241,0.04752687787252759,0.0008434161715771577,0.04741120147705078,0.0480042293548584,0.04848182239532471,0.05161828590393067,"[0.05126460647583008, 0.04803039932250976, 0.04784896087646484, 0.04774166488647461, 0.04766217422485351, 0.047814849853515626, 0.04758515167236328, 0.04774387359619141, 0.0476262092590332, 0.047548160552978516, 0.047486400604248045, 0.047438655853271484, 0.04758227157592773, 0.04800428771972656, 0.04823126220703125, 0.05063161468505859, 0.0492072639465332, 0.04770630264282227, 0.04805817413330078, 0.047718399047851565, 0.04752384185791016, 0.04751718521118164, 0.04764313507080078, 0.04751769638061523, 0.04764815902709961, 0.04747897720336914, 0.04756460952758789, 0.04755683135986328, 0.047774078369140625, 0.04758467102050781, 0.047704673767089846, 0.047647998809814456, 0.04752051162719727, 0.04760371017456055, 0.04802150344848633, 0.04807884979248047, 0.04780441665649414, 0.0485181770324707, 0.04831884765625, 0.04789683151245117, 0.047828704833984374, 0.04833254241943359, 0.04784975814819336, 0.04803670501708984, 0.047857471466064457, 0.04763641738891602, 0.04770352172851562, 0.04758697509765625, 0.04755567932128906, 0.04741507339477539, 0.04774111938476563, 0.047441726684570314, 0.047747039794921876, 0.04765462493896484, 0.0476060791015625, 0.047470592498779295, 0.04848025512695313, 0.04753724670410156, 0.04780121612548828, 0.047477855682373046, 0.04743219375610352, 0.047514049530029294, 0.04774470520019531, 0.05174319839477539, 0.04795651245117188, 0.04757913589477539, 0.0473436164855957, 0.04731638336181641, 0.0474865608215332, 0.047975582122802736, 0.04756185531616211, 0.04782262420654297, 0.047463359832763674, 0.04734342575073242, 0.047336769104003903, 0.047352703094482425, 0.04765081787109375, 0.04733542251586914, 0.04733542251586914, 0.047310783386230466, 0.04722284698486328, 0.04728851318359375, 0.0474186897277832, 0.047501823425292966, 0.04750969696044922, 0.04757648086547851, 0.04754179382324219, 0.047696544647216794, 0.04754431915283203, 0.04761212921142578, 0.04810137557983398, 0.04792121505737305, 0.047787967681884765, 0.04762419128417969, 0.04742531204223633, 0.04745033645629883, 0.04731404876708984, 0.04756556701660156, 0.04784726333618164, 0.04759900665283203, 0.047621055603027346, 0.04753401565551758, 0.04751769638061523, 0.04726598358154297, 0.047599777221679684, 0.04748825454711914, 0.047325599670410154, 0.047341567993164066, 0.047494430541992184, 0.04745084762573242, 0.04749107360839844, 0.04740332794189453, 0.04813337707519531, 0.04791545486450195, 0.048252830505371096, 0.04759091186523438, 0.04748963165283203, 0.047306751251220705, 0.047400287628173825, 0.04758915328979492, 0.0477658576965332, 0.04741318511962891, 0.047346271514892575, 0.04735728073120117, 0.047299297332763675, 0.04744502258300781, 0.054267936706542966, 0.048596927642822266, 0.04874844741821289, 0.04809305572509766, 0.047855903625488284, 0.047701953887939456, 0.04743596649169922, 0.04738163375854492, 0.04741811370849609, 0.04727177429199219, 0.04738268661499023, 0.04744704055786133, 0.0476368637084961, 0.04714355087280273, 0.04749926376342774, 0.04741120147705078, 0.04795500946044922, 0.0473056640625, 0.04758249664306641, 0.04785251235961914, 0.05003548812866211, 0.050358814239501955, 0.0480975341796875, 0.04780451202392578, 0.047733951568603515, 0.04774345779418945, 0.047399265289306644, 0.047424800872802736, 0.04746249771118164, 0.0474134407043457, 0.04750096130371094, 0.047311744689941405, 0.04732953643798828, 0.04732819366455078, 0.04728070449829101, 0.04725990295410156, 0.04725139236450195, 0.047321121215820314, 0.04737868881225586, 0.0473372802734375, 0.0473664321899414, 0.04788188934326172, 0.04816099166870117, 0.04781427383422852, 0.047530174255371094, 0.04769734573364258, 0.052558399200439455, 0.048502975463867185, 0.0476479377746582, 0.04768377685546875, 0.047505279541015626, 0.047587070465087894, 0.04753286361694336, 0.04749430465698242, 0.04744819259643555, 0.047626976013183595, 0.04728575897216797, 0.04750387191772461, 0.04746652984619141, 0.047470558166503904, 0.0473620491027832, 0.047282176971435545, 0.0476151351928711, 0.05149257659912109, 0.04796031951904297, 0.04778521728515625, 0.04752668762207031, 0.04762112045288086, 0.04750233459472656, 0.0473900146484375, 0.04756755065917969, 0.04763238525390625, 0.047951873779296876, 0.04748294448852539, 0.047526912689208986, 0.048005279541015626, 0.04848310470581055, 0.04789059066772461, 0.04747043228149414, 0.04747411346435547, 0.0474711685180664, 0.047470592498779295, 0.04744806289672852, 0.047395198822021485, 0.04745382308959961, 0.04752585601806641, 0.04770819091796875, 0.04757052612304687, 0.04743004989624024, 0.04731523132324219, 0.04755836868286133, 0.04749107360839844, 0.04738844680786133, 0.04772867202758789, 0.048299358367919924, 0.048349822998046875, 0.04781897735595703, 0.049258750915527345, 0.04868508911132813, 0.04800422286987305, 0.04774358367919922, 0.04785971069335938, 0.04815052795410156, 0.04759961700439453, 0.047527935028076174, 0.047474048614501954, 0.04744374465942383, 0.04761788940429688, 0.04779043197631836, 0.04779075241088867, 0.04746854400634765, 0.04746035385131836, 0.04744972610473633, 0.04776383972167969, 0.04762038421630859, 0.04752560043334961, 0.0478532485961914, 0.047566303253173826, 0.048147647857666016, 0.048500415802001956, 0.047669376373291016, 0.04728019332885742, 0.04737206268310547, 0.04727603149414063, 0.047398529052734374, 0.04736793518066406, 0.05173299026489258, 0.04804959869384766, 0.047543968200683594, 0.047894847869873046, 0.0475120964050293, 0.04863107299804687, 0.04769011306762695, 0.04739311981201172, 0.04736614227294922, 0.04771004867553711, 0.047402816772460936, 0.04764092636108398, 0.04749027252197266, 0.04748748779296875, 0.047457759857177734, 0.04735388946533203, 0.04754716873168945, 0.04779417419433594, 0.0477891845703125, 0.04764092636108398, 0.047624961853027344, 0.047942558288574216, 0.04791814422607422, 0.047769569396972654, 0.04779196929931641, 0.04781852722167969, 0.047847232818603515, 0.04830607986450195, 0.04786022567749024, 0.04772454452514648, 0.047439617156982423, 0.047522048950195316, 0.04738662338256836, 0.047723838806152344, 0.0474376335144043, 0.047723648071289065, 0.0475656623840332, 0.04780636978149414, 0.04754947280883789, 0.04779436874389648, 0.04753395080566406, 0.047435806274414065, 0.04751545715332031, 0.04753823852539062, 0.04753408050537109, 0.047467521667480465, 0.05526572799682617, 0.04977104187011719, 0.04807478332519531, 0.04748857498168945, 0.04726214218139648, 0.04732070541381836, 0.04723686218261719, 0.04720828628540039, 0.04722358322143555, 0.04725145721435547, 0.047298206329345706, 0.04731119918823242, 0.04741120147705078, 0.04736819076538086, 0.04767852783203125, 0.048069854736328126, 0.04732428741455078, 0.05134572982788086, 0.04767571258544922, 0.047124736785888674, 0.047056926727294925, 0.04707084655761719, 0.04691801452636719, 0.04706918334960938, 0.04683366394042969, 0.0470568962097168, 0.04706304168701172, 0.04694572830200195, 0.04725612640380859, 0.04811740875244141, 0.04719036865234375, 0.04715871810913086, 0.04718345642089844, 0.04708451080322266, 0.04697292709350586, 0.05062489700317383, 0.047191680908203124, 0.047042560577392575, 0.04722867202758789, 0.047001857757568356, 0.04694563293457031, 0.04703087997436523, 0.04694633483886719, 0.047013919830322264, 0.047008831024169924, 0.046849246978759765, 0.04696031951904297, 0.04742915344238281, 0.04692176055908203, 0.04692630386352539, 0.04732223892211914, 0.04725030517578125, 0.04709708786010742, 0.04703718566894531, 0.04753203201293945, 0.04725526428222656, 0.046999103546142576, 0.046957313537597654, 0.04708143997192383, 0.047026176452636716, 0.04767910385131836, 0.04967391967773437, 0.04733817672729492, 0.04692569732666016, 0.04691775894165039, 0.04692172622680664, 0.046870529174804686, 0.04692377471923828, 0.04717363357543945, 0.04708966445922851, 0.04704665756225586, 0.04722687911987305, 0.04739052963256836, 0.04728422546386719, 0.04701932907104492, 0.04702195358276367, 0.04701900863647461, 0.04704051208496094, 0.047075328826904295, 0.047314945220947265, 0.05149222564697266, 0.047655807495117185, 0.04759833526611328, 0.04731596755981445, 0.047360031127929685, 0.047376190185546875, 0.04752124786376953, 0.04760851287841797, 0.04744988632202148, 0.04763401412963867, 0.04715296173095703, 0.0476126708984375, 0.0475847053527832, 0.04767363357543945, 0.047136959075927735, 0.047204254150390625, 0.04697113418579101, 0.047177791595458984, 0.046973121643066405, 0.046973857879638675, 0.04713724899291992, 0.04709532928466797, 0.04715331268310547, 0.04693881607055664, 0.04720025634765625, 0.04706918334960938, 0.04724051284790039, 0.04722550582885742, 0.04738217544555664, 0.047272319793701174, 0.047613441467285154, 0.04758169555664062, 0.04836937713623047, 0.04786819076538086, 0.04751897430419922, 0.048066783905029296, 0.04735977554321289, 0.0471201286315918, 0.04719283294677734, 0.04723292922973633, 0.04712310409545899, 0.047075008392333986, 0.04710819244384765, 0.04712847900390625, 0.04717363357543945, 0.047554561614990234, 0.04710771179199219, 0.047117950439453125, 0.047247135162353515, 0.04780239868164062, 0.047548606872558595, 0.047892574310302735, 0.04717171096801758, 0.047262241363525394, 0.0480948486328125, 0.04788057708740234, 0.04715871810913086, 0.04713913726806641, 0.046975231170654295, 0.047202049255371095, 0.04707968139648438, 0.04761600112915039, 0.04805017471313477, 0.05119174575805664, 0.048723743438720706, 0.04751996612548828, 0.0473702392578125, 0.04747622299194336, 0.047255233764648436, 0.047526718139648434, 0.04756054306030273, 0.0471778564453125, 0.04725945663452148, 0.04718719863891602, 0.047032352447509765, 0.04709676742553711, 0.0469483528137207, 0.04713881683349609, 0.047261119842529294, 0.04731552124023437, 0.0471605110168457, 0.047061824798583986, 0.047249408721923826, 0.04705676651000976, 0.04707136154174805, 0.04742707061767578, 0.04757964706420898, 0.04711983871459961, 0.04717007827758789, 0.04700163269042969, 0.04688832092285156, 0.04699427032470703, 0.046866111755371094, 0.04691180801391601, 0.046798206329345705, 0.04704265594482422, 0.04688515090942383, 0.047414302825927734, 0.04684873580932617, 0.04706739044189453, 0.04699065780639648, 0.047081375122070314, 0.04729919815063476, 0.047023326873779296, 0.04693833541870117, 0.047005760192871095, 0.04696131134033203, 0.047527935028076174, 0.04768767929077149, 0.047287776947021486, 0.0470533447265625, 0.04730441665649414, 0.04721657562255859, 0.04716783905029297, 0.04689920043945312, 0.04693753433227539, 0.04693459320068359, 0.04699747085571289, 0.04694633483886719, 0.04692287826538086, 0.046986400604248045, 0.04694224166870117, 0.047492801666259764, 0.04768499374389648, 0.04767782211303711, 0.047171169281005856, 0.05166963195800781, 0.047675392150878904, 0.047022014617919924, 0.046988990783691405, 0.04693423843383789, 0.046889118194580075, 0.046881824493408206, 0.04710908889770508, 0.04712623977661133, 0.0473001594543457, 0.04706364822387695, 0.04904512023925781, 0.04711849594116211, 0.04716572952270508, 0.047134113311767575, 0.04738115310668945, 0.04772147369384765, 0.0505354232788086, 0.04794515228271484, 0.047128734588623045, 0.047128288269042966, 0.047142688751220706, 0.047000286102294925, 0.046815425872802734, 0.04693916702270508, 0.04684400177001953, 0.046916481018066405, 0.04681523132324219, 0.04724326324462891, 0.047065086364746093, 0.04703587341308594, 0.04698575973510742, 0.04698044967651367, 0.04688528060913086, 0.04708582305908203, 0.04712563323974609, 0.04747568130493164, 0.04758927917480469, 0.046919326782226566, 0.047499198913574216, 0.04714652633666992, 0.04688175964355469, 0.04781046295166016, 0.047853153228759764, 0.04704707336425781, 0.04688185501098633, 0.0466063346862793, 0.046574527740478516, 0.04665958404541016, 0.046561279296875, 0.046749408721923826, 0.0465656623840332, 0.046728511810302735, 0.04667871856689453, 0.046598495483398436, 0.04831606292724609, 0.04672035217285156, 0.047360671997070315, 0.047089729309082035, 0.04727129745483399, 0.047282752990722654, 0.04716255950927734, 0.047043392181396484, 0.05098204803466797, 0.04723388671875, 0.04702566528320312, 0.04693875122070312, 0.04664723205566406, 0.046847007751464845, 0.04662492752075195, 0.046741886138916014, 0.046959102630615236, 0.04694822311401367, 0.046893054962158204, 0.05167539215087891, 0.047301887512207035, 0.047261856079101563, 0.04715484619140625, 0.04714108657836914, 0.04715158462524414, 0.04702844619750977, 0.046953502655029296, 0.046750465393066404, 0.04676198577880859, 0.04693360137939453, 0.04695286560058594, 0.04685004806518555, 0.04698931121826172, 0.04673334503173828, 0.047315040588378904, 0.046747520446777345, 0.046923454284667966, 0.04694998550415039, 0.04720918273925781, 0.04735315322875976, 0.04752361679077149, 0.0472911376953125, 0.047441631317138674, 0.047573406219482424, 0.047343807220458986, 0.04728380966186523, 0.0473606071472168, 0.04703408050537109, 0.04688479995727539, 0.04674150466918945, 0.04693964767456055, 0.04683622360229492, 0.04691558456420898, 0.04697436904907227, 0.04695068740844727, 0.04752211380004883, 0.04692377471923828, 0.04698457717895508, 0.04714368057250977, 0.04675337600708008, 0.04684767913818359, 0.04698527908325195, 0.046744415283203125, 0.04667548751831055, 0.04698080062866211, 0.04685990524291992, 0.04705539321899414, 0.04677030563354492, 0.04668764877319336, 0.04665628814697265, 0.046653438568115234]",tokens/s,21.040725685413463,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1078.972416,1400.766464,0.0,998.244352,995.459072,s,1,8.9691708984375,8.9691708984375,0.0,8.9691708984375,8.9691708984375,8.9691708984375,8.9691708984375,[8.9691708984375],,kWh,5.934822658333209e-05,6.539252200111781e-06,1.9478904472000624e-05,8.536638325544449e-05,,MB,1425.526784,1627.25888,0.0,1212.153856,1174.003712,s,10,1.960198013305664,0.1960198013305664,0.0006111303209426651,0.19587601470947263,0.19681426849365233,0.19685065383911132,0.19687976211547853,"[0.19551455688476563, 0.19639337158203124, 0.19578834533691405, 0.19688703918457032, 0.19497222900390626, 0.19668917846679687, 0.19596368408203124, 0.19561126708984375, 0.19557215881347656, 0.19680618286132812]",tokens/s,1305.9905084195213,kWh,5.895468667666592e-06,6.501654034719021e-07,3.927975364599878e-06,1.0473609435738372e-05,tokens/kWh,24442385.556832865,MB,1451.1104,1627.25888,0.0,1212.153856,1174.006272,s,10,23.056530761718754,2.3056530761718754,0.010584921999149744,2.30316259765625,2.3113807861328124,2.322688684082031,2.3317350024414063,"[2.300117919921875, 2.298115478515625, 2.33399658203125, 2.296547607421875, 2.308393798828125, 2.306207275390625, 2.308490966796875, 2.29817529296875, 2.308867919921875, 2.297617919921875]",tokens/s,27.32414544541984,kWh,6.711316673775233e-05,7.401867076383218e-06,3.226180358720077e-05,0.00010677683740133634,tokens/kWh,590015.6020093131,,s,630,23.054078517913837,0.036593775425260024,0.0005247885299330771,0.03646443367004394,0.03703644294738769,0.03743591995239258,0.03820559158325196,"[0.0369332160949707, 0.03674153518676758, 0.036468734741210936, 0.036429439544677734, 0.036417438507080076, 0.036853790283203125, 0.036852161407470704, 0.03694387054443359, 0.03696640014648438, 0.03672883224487305, 0.03644211196899414, 0.036224414825439456, 0.03623177719116211, 0.036124671936035156, 0.03617990493774414, 0.03629062271118164, 0.03655654525756836, 0.03678028869628906, 0.03625772857666015, 0.03626176071166992, 0.03625961685180664, 0.03616400146484375, 0.03628844833374024, 0.036405311584472654, 0.03632332611083984, 0.03637968063354492, 0.03636323165893555, 0.03631840133666992, 0.036665374755859376, 0.0367828483581543, 0.038258304595947264, 0.036546974182128905, 0.03739648056030274, 0.03803955078125, 0.03667055892944336, 0.0376800308227539, 0.036569087982177735, 0.036529312133789064, 0.036300640106201175, 0.03618304061889648, 0.03613695907592773, 0.036318336486816406, 0.03625868988037109, 0.036300609588623044, 0.036249790191650394, 0.03709132766723633, 0.03636633682250977, 0.036225025177001956, 0.03628188705444336, 0.03629641723632813, 0.03642240142822266, 0.0361756477355957, 0.03628054428100586, 0.03631289672851563, 0.036149215698242185, 0.036046913146972656, 0.03615513610839844, 0.03607593536376953, 0.03611795043945312, 0.036190784454345704, 0.03662438583374023, 0.03649039840698242, 0.03651670455932617, 0.03634995269775391, 0.03638623809814453, 0.036264511108398435, 0.03640115356445312, 0.03634995269775391, 0.036512928009033205, 0.03659001541137695, 0.03632988739013672, 0.036280319213867186, 0.03630080032348633, 0.03630246353149414, 0.03636444854736328, 0.036346080780029294, 0.03632121658325195, 0.03650112152099609, 0.036374977111816406, 0.03638886260986328, 0.03663052749633789, 0.03660748672485352, 0.036276287078857425, 0.037314624786376954, 0.03663091278076172, 0.03645849609375, 0.03659161758422851, 0.03653388977050781, 0.036507102966308595, 0.036514720916748046, 0.03639910507202149, 0.036435966491699216, 0.036567039489746093, 0.0365404167175293, 0.036634048461914065, 0.03654860687255859, 0.03716668701171875, 0.03671244812011719, 0.03636899185180664, 0.03645171356201172, 0.036578113555908204, 0.0365997428894043, 0.036681983947753904, 0.03646464157104492, 0.03638272094726563, 0.03642777633666992, 0.03650764846801758, 0.036334815979003905, 0.03635433578491211, 0.03628083038330078, 0.036972095489501956, 0.03686819076538086, 0.03646025466918945, 0.03721075057983399, 0.03627008056640625, 0.03621804809570312, 0.03623097610473633, 0.036160511016845705, 0.03624288177490234, 0.03628704071044922, 0.03623440170288086, 0.03631600189208985, 0.0362977294921875, 0.03627302551269531, 0.03629676818847656, 0.036671550750732425, 0.036792320251464845, 0.036603904724121096, 0.03644211196899414, 0.03646777725219726, 0.03629673767089844, 0.03654339218139648, 0.03764614486694336, 0.03814384078979492, 0.0381822738647461, 0.037730560302734376, 0.03742287826538086, 0.037591007232666014, 0.0376739501953125, 0.03721011352539062, 0.03694764709472656, 0.036843841552734374, 0.03687014389038086, 0.0366486701965332, 0.03653020858764648, 0.036570846557617186, 0.03672115325927734, 0.036714527130126955, 0.03680460739135742, 0.03662057495117187, 0.03662614440917969, 0.03702345657348633, 0.03800502395629883, 0.03809020614624024, 0.03818755340576172, 0.03813580703735352, 0.038212959289550784, 0.03772467041015625, 0.03802067184448242, 0.03761936187744141, 0.03851116943359375, 0.03789043045043945, 0.03718041610717773, 0.03679743957519531, 0.03672598266601562, 0.0364183349609375, 0.03668931198120117, 0.03651590347290039, 0.0364733772277832, 0.036378623962402344, 0.036429824829101565, 0.036443454742431644, 0.03649203109741211, 0.036671424865722654, 0.03651295852661133, 0.03688735961914062, 0.0370994873046875, 0.03683536148071289, 0.036765697479248044, 0.03653017425537109, 0.036808353424072265, 0.03696265411376953, 0.036741119384765625, 0.036628257751464846, 0.03659183883666992, 0.03667763137817383, 0.037187488555908206, 0.03717708969116211, 0.03707635116577149, 0.03728591918945313, 0.0369661750793457, 0.03668822479248047, 0.036614078521728516, 0.036559326171875, 0.03630080032348633, 0.03642313766479492, 0.036243999481201175, 0.036216224670410156, 0.036759998321533205, 0.0364931526184082, 0.036647232055664065, 0.03657113647460938, 0.03642777633666992, 0.036280319213867186, 0.03638060760498047, 0.03646265411376953, 0.03628012847900391, 0.03633580780029297, 0.036265567779541014, 0.03701558303833008, 0.036673824310302736, 0.03656211090087891, 0.03624185562133789, 0.03633404922485352, 0.03633356857299805, 0.03632735824584961, 0.03630070495605469, 0.036268192291259764, 0.03629244613647461, 0.03660815811157227, 0.03645977783203125, 0.0362850227355957, 0.036587711334228515, 0.0362608642578125, 0.036375518798828124, 0.03654182434082031, 0.03651443099975586, 0.03651484680175781, 0.03630166244506836, 0.03640496063232422, 0.03642204666137695, 0.036453567504882815, 0.03637097549438477, 0.036282112121582034, 0.03638924789428711, 0.036493473052978516, 0.0363680305480957, 0.03681299209594727, 0.036679840087890624, 0.0365588493347168, 0.036308223724365235, 0.036203262329101565, 0.03628441619873047, 0.03632128143310547, 0.03632672119140625, 0.03629305648803711, 0.03624576187133789, 0.03631718444824219, 0.03638272094726563, 0.036337665557861325, 0.03668172836303711, 0.03636838531494141, 0.036942623138427735, 0.03671039962768555, 0.03631657409667969, 0.03639561462402344, 0.03689798355102539, 0.036993408203125, 0.03633750534057617, 0.037754878997802735, 0.03783126449584961, 0.03747225570678711, 0.036913150787353514, 0.036523231506347655, 0.03667228698730469, 0.03653411102294922, 0.03649740982055664, 0.036407455444335934, 0.03662643051147461, 0.0364194221496582, 0.03636822509765625, 0.036415103912353516, 0.03675820922851562, 0.03636019134521484, 0.0364031982421875, 0.03627932739257812, 0.03639718246459961, 0.03671327972412109, 0.03699919891357422, 0.03658956909179688, 0.036452350616455076, 0.037764289855957034, 0.036651840209960936, 0.036483070373535154, 0.03655811309814453, 0.036783905029296876, 0.0366003532409668, 0.036800609588623044, 0.036555072784423825, 0.036503166198730466, 0.03629641723632813, 0.036483230590820315, 0.03635865783691406, 0.036599617004394534, 0.03637472152709961, 0.036235198974609376, 0.03643807983398437, 0.036734718322753906, 0.036442367553710935, 0.037330623626708984, 0.03634201431274414, 0.03654038238525391, 0.03648694229125977, 0.03630931091308594, 0.03641548919677735, 0.03631718444824219, 0.03687628936767578, 0.0365219841003418, 0.03645849609375, 0.03652556610107422, 0.03665151977539063, 0.03648819351196289, 0.03640422439575195, 0.03738623809814453, 0.036462337493896484, 0.03648627090454101, 0.03647366333007813, 0.036423614501953125, 0.03628464126586914, 0.03648675155639648, 0.0362820816040039, 0.03663942337036133, 0.036449630737304686, 0.036432193756103515, 0.0364271354675293, 0.03644720077514649, 0.03631923294067383, 0.036337440490722656, 0.036520160675048825, 0.03729817581176758, 0.03765177536010742, 0.03668966293334961, 0.03656083297729492, 0.036518177032470706, 0.03675564956665039, 0.03654838562011719, 0.03644697570800781, 0.03718345642089844, 0.036542495727539065, 0.03641139221191406, 0.036601505279541015, 0.036528480529785155, 0.03700672149658203, 0.03642534255981445, 0.0363983039855957, 0.03656179046630859, 0.03626432037353516, 0.03648518371582031, 0.036297183990478515, 0.03653839874267578, 0.03653219223022461, 0.03648227310180664, 0.03639990234375, 0.03653468704223633, 0.03643724822998047, 0.03657353591918945, 0.03633356857299805, 0.03666739273071289, 0.03676979064941406, 0.03645836639404297, 0.03656307220458985, 0.04001811218261719, 0.036905918121337894, 0.03668851089477539, 0.03638265609741211, 0.03707052612304688, 0.03663846588134766, 0.0363853759765625, 0.0364813117980957, 0.036431873321533206, 0.03632128143310547, 0.03644348907470703, 0.03643247985839844, 0.03632339096069336, 0.036501407623291016, 0.036415584564208986, 0.03660550308227539, 0.03643436813354492, 0.03659667205810547, 0.036591167449951174, 0.03664681625366211, 0.03681539154052734, 0.03642777633666992, 0.04417244720458984, 0.038841182708740235, 0.03953785705566406, 0.03727142333984375, 0.03744659042358398, 0.037262657165527346, 0.03683417510986328, 0.0364071044921875, 0.03616476821899414, 0.03614169692993164, 0.03616995239257813, 0.036206592559814454, 0.03617587280273438, 0.03643392181396484, 0.0363397102355957, 0.03653152084350586, 0.03627251052856445, 0.03627040100097656, 0.036104095458984374, 0.036261825561523436, 0.03650166320800781, 0.03659507369995117, 0.036278911590576175, 0.03619430541992188, 0.03628441619873047, 0.03639420700073242, 0.03631008148193359, 0.03649097442626953, 0.036280319213867186, 0.036613376617431644, 0.03611315155029297, 0.03658956909179688, 0.03655036926269531, 0.03635433578491211, 0.036208641052246096, 0.036190208435058595, 0.036222366333007815, 0.03652044677734375, 0.03623535919189453, 0.03644140625, 0.036192928314208984, 0.03615951919555664, 0.03644825744628906, 0.036245502471923825, 0.036517887115478515, 0.03627145767211914, 0.03618204879760742, 0.03631760025024414, 0.03642118453979492, 0.03631785583496094, 0.03623263931274414, 0.03627884674072265, 0.03621887969970703, 0.03736371231079102, 0.03658956909179688, 0.03672636795043945, 0.03619251251220703, 0.0362204475402832, 0.037548030853271484, 0.03691312026977539, 0.03653750228881836, 0.03646265411376953, 0.03648390579223633, 0.036229118347167966, 0.03653580856323242, 0.0362696647644043, 0.036430751800537106, 0.036578945159912106, 0.03630527877807617, 0.036485118865966795, 0.036531681060791014, 0.03655734252929688, 0.03651583862304687, 0.036519935607910156, 0.036345409393310546, 0.03626643371582031, 0.03634380722045898, 0.03660095977783203, 0.03684767913818359, 0.036279102325439454, 0.036626113891601565, 0.036503646850585936, 0.03672006225585937, 0.036571937561035155, 0.03646422576904297, 0.036265567779541014, 0.03618035125732422, 0.036219329833984376, 0.036429824829101565, 0.036345855712890625, 0.03616563034057617, 0.03640876770019531, 0.03638643264770508, 0.036385726928710935, 0.03636617660522461, 0.03632668685913086, 0.036238208770751956, 0.036364288330078126, 0.03621433639526367, 0.03640908813476563, 0.03642761611938477, 0.036450366973876956, 0.0364285774230957, 0.03709952163696289, 0.037689342498779296, 0.03671039962768555, 0.03662847900390625, 0.036601856231689454, 0.03634995269775391, 0.03685171127319336, 0.03637177658081055, 0.03637113571166992, 0.03627590560913086, 0.03635232162475586, 0.03636019134521484, 0.03636019134521484, 0.03641139221191406, 0.03638886260986328, 0.0361451530456543, 0.036185089111328124, 0.03630118560791016, 0.03701408004760742, 0.036910720825195316, 0.036783905029296876, 0.03664502334594726, 0.036450336456298825, 0.03636070251464844, 0.03664070510864258, 0.03722655868530273, 0.0372918701171875, 0.037044384002685546, 0.03714569473266602, 0.03681577682495117, 0.03656073760986328, 0.03651190567016602, 0.03669001770019531, 0.03645350265502929, 0.036504032135009766, 0.03640335845947266, 0.03631734466552734, 0.036345855712890625, 0.03703603363037109, 0.03658137512207031, 0.03652403259277344, 0.036669185638427734, 0.036649215698242185, 0.036466686248779294, 0.036603904724121096, 0.03671571350097656, 0.036633087158203126, 0.03797433471679688, 0.03646646499633789, 0.03648329544067383, 0.03681280136108398, 0.03662643051147461, 0.03642777633666992, 0.036585567474365234, 0.0365874252319336, 0.036490848541259766, 0.036961727142333985, 0.036563934326171874, 0.03657718276977539, 0.03655254364013672, 0.03653657531738281, 0.03643587112426758, 0.03637052917480469, 0.03674867248535156, 0.03634854507446289, 0.03630627059936523, 0.03673987197875977, 0.03652928161621094, 0.03653235244750976, 0.03644684982299805, 0.036413440704345705, 0.03648662567138672, 0.036714622497558594, 0.036432289123535154, 0.036462593078613284, 0.036402942657470704, 0.036406654357910156, 0.03737484741210938, 0.036601856231689454, 0.03668582534790039, 0.03652608108520508, 0.03704012680053711, 0.03687168121337891, 0.0364958724975586, 0.036485118865966795, 0.036628223419189455, 0.03646182250976562, 0.03640627288818359, 0.036865345001220705, 0.036553409576416014, 0.036450302124023434, 0.03640422439575195, 0.03642665481567383, 0.03640060806274414, 0.03639769744873047, 0.03644745635986328, 0.03636905670166016, 0.03635955047607422, 0.036389278411865233, 0.03651414489746094, 0.03672063827514648, 0.03636956787109375, 0.036265823364257814, 0.03633776092529297, 0.03644508743286133, 0.03675545501708984, 0.03710083389282227, 0.03653705596923828, 0.036478015899658205, 0.03649631881713867, 0.03629260635375976, 0.0361998405456543, 0.03632803344726562, 0.03636553573608398, 0.036274143218994144, 0.03638355255126953, 0.03637247848510742, 0.03637452697753906, 0.03620220947265625, 0.036450111389160156, 0.03662691116333008, 0.036543903350830076, 0.036413665771484374, 0.03637833786010742, 0.03635676956176758, 0.03627609634399414, 0.036329025268554686, 0.036430400848388673, 0.03628646469116211, 0.03646054458618164, 0.036245502471923825, 0.03632332611083984, 0.03707030487060547, 0.03698947143554687, 0.037144126892089846, 0.03662015914916992, 0.036430400848388673, 0.03621068954467774, 0.03627190399169922, 0.036243518829345706, 0.03622630310058594, 0.03616796875, 0.0361396484375, 0.03647283172607422]",tokens/s,27.327051892812296,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1070.915584,903.74144,0.0,501.219328,495.906816,s,1,8.14180078125,8.14180078125,0.0,8.14180078125,8.14180078125,8.14180078125,8.14180078125,[8.14180078125],,kWh,3.237335922914705e-05,3.5639003502244545e-06,1.0485841722029932e-05,4.642310130140144e-05,,MB,1362.292736,1067.319296,0.0,652.214272,602.88,s,10,0.6126129646301269,0.06126129646301269,0.000305294694587954,0.06126756858825684,0.06166249237060547,0.06168998222351074,0.06171197410583496,"[0.0616563835144043, 0.06134137725830078, 0.06155472183227539, 0.06090719985961914, 0.06112188720703125, 0.061717472076416015, 0.06105344009399414, 0.06126668930053711, 0.060725345611572265, 0.061268447875976566]",tokens/s,4178.821128190837,kWh,1.8598643592620629e-06,2.0510970341326353e-07,1.2291835735796999e-06,3.294157636255026e-06,tokens/kWh,77713342.30715032,MB,1401.106432,1081.99936,0.0,664.797184,611.073536,s,10,22.45428076171875,2.245428076171875,0.010569635837998102,2.24448974609375,2.2583637939453127,2.261645520019531,2.2642709008789064,"[2.257634521484375, 2.254486083984375, 2.26492724609375, 2.247927001953125, 2.245221435546875, 2.243758056640625, 2.23429296875, 2.228927734375, 2.240291259765625, 2.236814453125]",tokens/s,28.057010896294546,kWh,6.448376297907865e-05,7.112384224968399e-06,2.482942060662164e-05,9.642556781066867e-05,tokens/kWh,653353.6844055752,,s,630,22.448528137207038,0.035632584344773066,0.0005452552027973131,0.035535182952880856,0.03596945991516114,0.03620936222076416,0.037856841011047374,"[0.03516739273071289, 0.03565654373168945, 0.03591312026977539, 0.03582246398925781, 0.03563254547119141, 0.03565110397338867, 0.03562985610961914, 0.03565142440795899, 0.0354892463684082, 0.035631809234619144, 0.03559465789794922, 0.03542694473266601, 0.03553526306152344, 0.03546988677978516, 0.03553279876708984, 0.03585433578491211, 0.03600409698486328, 0.035785663604736326, 0.035943233489990234, 0.03588336181640625, 0.03617148971557617, 0.03557129669189453, 0.035627681732177736, 0.03554067230224609, 0.03555942535400391, 0.03611395263671875, 0.04376009750366211, 0.036741119384765625, 0.03701760101318359, 0.03562704086303711, 0.03571414566040039, 0.03566668701171875, 0.035606494903564455, 0.0356701774597168, 0.03584204864501953, 0.03564313507080078, 0.035491550445556644, 0.035582271575927735, 0.03568048095703125, 0.035631103515625, 0.0355464973449707, 0.03541056060791015, 0.035552833557128904, 0.035657310485839845, 0.035590145111083986, 0.035592830657958985, 0.03546268844604492, 0.035588798522949217, 0.035668128967285155, 0.03557875061035156, 0.035570655822753906, 0.03549942398071289, 0.03556208038330078, 0.035864192962646486, 0.03573183822631836, 0.03595030212402344, 0.03563139343261719, 0.03563935852050781, 0.03557331085205078, 0.03582828903198242, 0.03566915130615234, 0.035609600067138675, 0.03568809509277344, 0.03525881576538086, 0.0356558723449707, 0.03559231948852539, 0.0353524169921875, 0.035520511627197264, 0.035362144470214844, 0.035537025451660154, 0.03532838439941406, 0.03546908950805664, 0.03542678451538086, 0.035436607360839846, 0.035454975128173825, 0.035429439544677734, 0.035496063232421875, 0.03532803344726562, 0.03554793548583984, 0.03538950347900391, 0.03563529586791992, 0.035491680145263674, 0.03669401550292969, 0.03540790557861328, 0.0355552978515625, 0.03542835235595703, 0.03568364715576172, 0.0359733772277832, 0.03792892837524414, 0.03578927993774414, 0.03600735855102539, 0.036006175994873046, 0.03568873596191406, 0.03558147048950195, 0.03552025604248047, 0.03549875259399414, 0.035460575103759766, 0.035740062713623046, 0.03578275299072266, 0.03555942535400391, 0.035708160400390626, 0.03572124862670899, 0.03554755020141601, 0.03560006332397461, 0.03667827224731445, 0.03572851181030273, 0.03563814544677734, 0.0360140495300293, 0.03621686553955078, 0.03815423965454102, 0.035659774780273434, 0.03595248031616211, 0.03560809707641602, 0.035611263275146486, 0.03579289627075195, 0.035856670379638675, 0.035606239318847654, 0.035969024658203126, 0.03618406295776367, 0.03669811248779297, 0.03608367919921875, 0.03577164840698242, 0.035676799774169925, 0.03556777572631836, 0.035757568359375, 0.03607807922363281, 0.03567193603515625, 0.03598963165283203, 0.035811328887939455, 0.03584422302246094, 0.035983230590820314, 0.03572022247314453, 0.03573648071289062, 0.03612268829345703, 0.03702374267578125, 0.03605299377441406, 0.03892428970336914, 0.035915775299072264, 0.035743873596191404, 0.03610771179199219, 0.03575833511352539, 0.0356058235168457, 0.03551027297973633, 0.03564419174194336, 0.03572243118286133, 0.035644607543945314, 0.03593807983398437, 0.03576732635498047, 0.03588188934326172, 0.03569049453735352, 0.03560764694213867, 0.03574700927734375, 0.03585209655761719, 0.035954048156738284, 0.03608195114135742, 0.035882686614990236, 0.0357639045715332, 0.03579379272460938, 0.036103710174560544, 0.03587126541137695, 0.03598124694824219, 0.03617411041259765, 0.03626774215698242, 0.03623731231689453, 0.0358928337097168, 0.03575875091552734, 0.03583081436157227, 0.03634972763061523, 0.0364471664428711, 0.03602841567993164, 0.03576422500610352, 0.03576816177368164, 0.035700897216796874, 0.03594771194458008, 0.03583878326416016, 0.035959007263183594, 0.03575708770751953, 0.0356280632019043, 0.035753631591796876, 0.035896480560302736, 0.035746177673339846, 0.036020767211914065, 0.035939903259277345, 0.03601238250732422, 0.035852382659912106, 0.03597926330566406, 0.03584000015258789, 0.03569836807250976, 0.03580960083007813, 0.0353416633605957, 0.03569526290893555, 0.035525760650634765, 0.035602943420410156, 0.0357072639465332, 0.03651980972290039, 0.03579916763305664, 0.03557324981689453, 0.03545328140258789, 0.035438144683837894, 0.035363327026367186, 0.03551446533203125, 0.03555123138427734, 0.0357289924621582, 0.035684768676757815, 0.03565142440795899, 0.035726913452148436, 0.035719329833984376, 0.036644481658935545, 0.03577936172485351, 0.035493824005126955, 0.03549398422241211, 0.03553510284423828, 0.03583926391601563, 0.03555376052856445, 0.03565116882324219, 0.03588131332397461, 0.03564755249023437, 0.03567817687988281, 0.03560796737670899, 0.03549795150756836, 0.03550678253173828, 0.03539769744873047, 0.03614044952392578, 0.03599225616455078, 0.03585424041748047, 0.03565750503540039, 0.03554864120483398, 0.0355747184753418, 0.0355055046081543, 0.03553113555908203, 0.035573760986328126, 0.03584124755859375, 0.035998592376708986, 0.035862529754638675, 0.03566387176513672, 0.035579681396484375, 0.03555868911743164, 0.03561983871459961, 0.03565084838867188, 0.03558195114135742, 0.035650337219238284, 0.03564121627807617, 0.03553200149536133, 0.03579983901977539, 0.03564291381835937, 0.03543657684326172, 0.03549433517456055, 0.035538944244384765, 0.03563724899291992, 0.03564662551879883, 0.03581423950195312, 0.035975166320800785, 0.03542457580566406, 0.035744129180908205, 0.03541196823120117, 0.03548303985595703, 0.03538793563842774, 0.03584320068359375, 0.03545548629760742, 0.03582611083984375, 0.03553484725952148, 0.03538438415527344, 0.03550508880615234, 0.03544473648071289, 0.03561180877685547, 0.035426624298095705, 0.03538915252685547, 0.03533427047729492, 0.03528364944458008, 0.03550374221801758, 0.035494270324707033, 0.035687744140625, 0.03557660675048828, 0.035806209564208984, 0.03527926254272461, 0.03543513488769531, 0.03549350357055664, 0.035727455139160154, 0.035708255767822265, 0.03557839965820313, 0.03596316909790039, 0.03538127899169922, 0.035416160583496094, 0.03529545593261719, 0.035382560729980465, 0.03519120025634766, 0.035319072723388675, 0.03559516906738281, 0.03566748809814453, 0.03540742492675781, 0.035713729858398435, 0.03540991973876953, 0.03559628677368164, 0.03555327987670898, 0.035389438629150394, 0.0354951057434082, 0.035531585693359374, 0.035340286254882815, 0.03632041549682617, 0.037497215270996094, 0.03741574478149414, 0.03715651321411133, 0.03550172805786133, 0.0352044792175293, 0.03666019058227539, 0.03556560134887695, 0.03566175842285156, 0.03558639907836914, 0.03548508834838867, 0.03543497467041016, 0.035440673828125, 0.03521859359741211, 0.03527948760986328, 0.03632361602783203, 0.03547721481323242, 0.035504127502441404, 0.035673568725585934, 0.03554329681396484, 0.03570223999023438, 0.03530611038208008, 0.03555142211914063, 0.03539168167114258, 0.0355491828918457, 0.035511775970458986, 0.03540617752075195, 0.035514366149902346, 0.035559009552001954, 0.03538985443115234, 0.03552012634277344, 0.03551824188232422, 0.035590816497802734, 0.03572320175170898, 0.03602441787719726, 0.03557190322875976, 0.0355788803100586, 0.03548387145996094, 0.03581388854980469, 0.035651649475097656, 0.03548153686523438, 0.03548720169067383, 0.03547600173950195, 0.035555328369140625, 0.03595065689086914, 0.03544841766357422, 0.0356662712097168, 0.035493728637695315, 0.03559766387939453, 0.035412033081054686, 0.035429119110107425, 0.0356231689453125, 0.035406879425048825, 0.035393856048583985, 0.03550864028930664, 0.03587891387939453, 0.035399681091308595, 0.03550822448730469, 0.03548390579223633, 0.03541372680664062, 0.03553488159179687, 0.03563724899291992, 0.0364279670715332, 0.03768035125732422, 0.036200191497802736, 0.03551932907104492, 0.03558793640136719, 0.035433727264404295, 0.03539190292358398, 0.035737953186035155, 0.035446945190429686, 0.035358463287353516, 0.0356104621887207, 0.03542704010009766, 0.035544769287109375, 0.03586150360107422, 0.03559894561767578, 0.035565502166748045, 0.03552508926391602, 0.03543657684326172, 0.03509872055053711, 0.03562092971801758, 0.03552134323120117, 0.039163902282714845, 0.03572431945800781, 0.03539206314086914, 0.035221920013427735, 0.03534985733032227, 0.03526518249511719, 0.03542630386352539, 0.03549148941040039, 0.03537254333496094, 0.035205665588378905, 0.03544710540771484, 0.035320926666259765, 0.03555039978027344, 0.03540963363647461, 0.03535257720947266, 0.035448833465576174, 0.03543027114868164, 0.03527433776855469, 0.035428897857666015, 0.03547145462036133, 0.03541084671020508, 0.0354601936340332, 0.035253246307373046, 0.03527065658569336, 0.03549699020385742, 0.035335071563720705, 0.035496929168701175, 0.035394783020019534, 0.035154720306396485, 0.035299327850341795, 0.03540291213989258, 0.035203487396240234, 0.03551071929931641, 0.03533359909057617, 0.035469249725341795, 0.03543510437011719, 0.03544473648071289, 0.035221664428710935, 0.0353870735168457, 0.03528239822387695, 0.03522604751586914, 0.03542204666137695, 0.03534924697875977, 0.035380897521972654, 0.03541219329833984, 0.03554217529296875, 0.03546099090576172, 0.03537587356567383, 0.03551372909545898, 0.03550271987915039, 0.035389438629150394, 0.035544639587402345, 0.035285438537597656, 0.035299327850341795, 0.035415489196777346, 0.035373184204101564, 0.03598137664794922, 0.035398017883300784, 0.03542147064208984, 0.03522947311401367, 0.03507020950317383, 0.03534038543701172, 0.035571617126464845, 0.03534643173217773, 0.03541401672363281, 0.035319297790527344, 0.03510140609741211, 0.035201118469238284, 0.03541779327392578, 0.03515014266967773, 0.0353583984375, 0.03528681564331055, 0.035310142517089846, 0.03533708953857422, 0.03549673461914062, 0.035350528717041016, 0.03614310455322266, 0.03555942535400391, 0.03533776092529297, 0.035258846282958986, 0.035299198150634765, 0.035598304748535155, 0.03521142578125, 0.0351723518371582, 0.03510681533813476, 0.035250175476074216, 0.035092384338378906, 0.03541411209106445, 0.035264511108398434, 0.03522780990600586, 0.035112800598144533, 0.0353546257019043, 0.03515625762939453, 0.03539324951171875, 0.035464481353759764, 0.03513935852050781, 0.035243167877197265, 0.03523152160644531, 0.0351418571472168, 0.035277984619140626, 0.03631196975708008, 0.035304672241210935, 0.03584080123901367, 0.03526847839355469, 0.035636543273925785, 0.035936798095703125, 0.03551641464233399, 0.03540582275390625, 0.03547715377807617, 0.035324256896972654, 0.035510337829589844, 0.035753761291503906, 0.03515180969238281, 0.03545548629760742, 0.03546217727661133, 0.0352342414855957, 0.03566412734985352, 0.035389438629150394, 0.03515379333496094, 0.03536703872680664, 0.035174079895019535, 0.03519327926635742, 0.03534377670288086, 0.035244033813476565, 0.03539558410644531, 0.03530137634277344, 0.0353199348449707, 0.03548899078369141, 0.03546166229248047, 0.03529916763305664, 0.03531296157836914, 0.03533049774169922, 0.035476318359375, 0.03547679901123047, 0.03547702407836914, 0.035420097351074216, 0.0355563850402832, 0.035644737243652344, 0.03541420745849609, 0.03567795181274414, 0.03543308639526367, 0.0353073616027832, 0.03549321746826172, 0.03548652648925781, 0.03548108673095703, 0.035575870513916016, 0.035639743804931644, 0.03555363082885742, 0.03585843276977539, 0.035530399322509766, 0.03534643173217773, 0.0364760627746582, 0.03538415908813477, 0.03555942535400391, 0.03542835235595703, 0.03534960174560547, 0.03558083343505859, 0.03573376083374023, 0.03535676956176758, 0.03543759918212891, 0.03534297561645508, 0.0352911376953125, 0.03559532928466797, 0.03560543823242188, 0.0357314567565918, 0.03812473678588867, 0.035705665588378906, 0.03555347061157227, 0.0355997428894043, 0.035547584533691404, 0.0357130241394043, 0.035592254638671876, 0.035409854888916015, 0.035482654571533205, 0.03550054550170898, 0.03550870513916016, 0.03551824188232422, 0.03560432052612305, 0.03539759826660156, 0.03564166259765625, 0.035383392333984375, 0.0354994888305664, 0.03536540985107422, 0.035423423767089846, 0.035545600891113284, 0.03560003280639648, 0.0352061767578125, 0.03550921630859375, 0.035595584869384765, 0.03553116989135742, 0.036340160369873045, 0.03657926559448242, 0.03961967849731445, 0.035963520050048825, 0.03570457458496094, 0.03550991821289062, 0.03521014404296875, 0.035468734741210935, 0.035324127197265624, 0.03543804931640625, 0.03523011016845703, 0.035278526306152344, 0.035487518310546876, 0.035324607849121094, 0.035238208770751955, 0.03525004959106445, 0.03526144027709961, 0.03529347229003906, 0.035257057189941404, 0.035261566162109376, 0.03549401473999023, 0.035402496337890624, 0.03557187271118164, 0.035440383911132814, 0.03538159942626953, 0.03523110580444336, 0.035342208862304686, 0.03539785766601562, 0.035391456604003904, 0.035509761810302735, 0.03541279983520508, 0.03526985549926758, 0.035293598175048825, 0.03521305465698242, 0.03525923156738281, 0.03557558441162109, 0.03540351867675781, 0.03537152099609375, 0.035396671295166014, 0.03549446487426758, 0.03527897644042969, 0.03533315277099609, 0.03527302551269531, 0.03550636672973633, 0.03529059219360352, 0.03524710464477539, 0.035438625335693356, 0.035224864959716794, 0.03536352157592773, 0.03544240188598633, 0.035537185668945315, 0.03531603240966797, 0.03538297653198242, 0.03516521453857422, 0.03542729568481445, 0.03555763244628906, 0.03539737701416015, 0.03568025588989258, 0.035676223754882816]",tokens/s,28.06420074177667,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,864.722944,561.905664,0.0,159.383552,142.313472,s,1,7.31664794921875,7.31664794921875,0.0,7.31664794921875,7.31664794921875,7.31664794921875,7.31664794921875,[7.31664794921875],,kWh,1.3752497383347873e-05,1.5099028171656185e-06,4.732503785992437e-06,1.999490398650593e-05,,MB,1285.419008,635.305984,0.0,220.20096,185.324544,s,19,0.20845401477813721,0.010971263935691433,0.00019792755877006405,0.010882623672485351,0.011210566139221192,0.0112919713973999,0.01159404872894287,"[0.011200703620910645, 0.010878687858581542, 0.010929920196533204, 0.010880864143371582, 0.010996735572814942, 0.010869888305664063, 0.010848095893859863, 0.011669568061828613, 0.010865471839904784, 0.011008831977844238, 0.010859423637390137, 0.010893823623657227, 0.010854432106018067, 0.010871392250061035, 0.01086473560333252, 0.011250016212463378, 0.010882623672485351, 0.010940896034240723, 0.010887904167175293]",tokens/s,23333.68347535487,kWh,3.2363195273462276e-07,3.5690945112395764e-08,1.6334732210418277e-07,5.226702199512013e-07,tokens/kWh,489792588.57315654,MB,1298.972672,639.500288,0.0,224.395264,185.327104,s,19,9.95060107421875,0.5237158460115131,0.0032827093571487246,0.5228843994140625,0.5280344604492188,0.528894921875,0.531578837890625,"[0.5322498168945312, 0.5285221557617188, 0.5243967895507813, 0.5250255126953125, 0.5279125366210937, 0.52364306640625, 0.5201376342773437, 0.52427294921875, 0.5194375, 0.5228843994140625, 0.519981201171875, 0.5218004760742188, 0.5222012329101563, 0.5207174072265625, 0.5252326049804688, 0.520295654296875, 0.5222452392578125, 0.5226543579101562, 0.5269905395507812]",tokens/s,120.29424062646186,kWh,1.535126922884526e-05,1.6927327975267244e-06,5.580926279160777e-06,2.2624928305532757e-05,tokens/kWh,2784539.2104333793,,s,1197,9.94207020950319,0.00830582306558327,0.00016756898697132985,0.008274080276489258,0.008383891105651856,0.008465491104125977,0.009133852157592772,"[0.009010175704956054, 0.009241344451904297, 0.008521984100341797, 0.008849056243896484, 0.011032928466796875, 0.008367903709411621, 0.008412704467773438, 0.008352800369262696, 0.009291423797607421, 0.009922112464904785, 0.009171392440795898, 0.008361824035644531, 0.008312416076660157, 0.008446528434753417, 0.008272159576416015, 0.00840880012512207, 0.008267775535583496, 0.00828540802001953, 0.008268128395080567, 0.008266336441040039, 0.008304479598999024, 0.008271712303161621, 0.008294560432434083, 0.00828825569152832, 0.008285823822021485, 0.008241536140441895, 0.008274016380310058, 0.008263392448425293, 0.00833456039428711, 0.008290783882141113, 0.008264191627502441, 0.008362144470214843, 0.00829971218109131, 0.008293024063110352, 0.008313152313232422, 0.008283424377441406, 0.00824771213531494, 0.008292351722717285, 0.008325375556945801, 0.00825705623626709, 0.008290528297424316, 0.008230560302734375, 0.008268128395080567, 0.00829030418395996, 0.008255488395690918, 0.00825164794921875, 0.008232704162597656, 0.008253439903259278, 0.00825331211090088, 0.00833244800567627, 0.008424256324768066, 0.008376480102539063, 0.008274080276489258, 0.008257375717163087, 0.008310784339904785, 0.008292032241821288, 0.008310239791870117, 0.008313695907592773, 0.00828172779083252, 0.00852620792388916, 0.008361632347106934, 0.00828656005859375, 0.008284159660339355, 0.008292256355285644, 0.008503487586975098, 0.008374496459960938, 0.008377471923828124, 0.008329888343811036, 0.008374272346496582, 0.008324224472045899, 0.008403840065002442, 0.008370176315307617, 0.008347647666931152, 0.008462335586547852, 0.00840499210357666, 0.00834563159942627, 0.008286175727844238, 0.008325023651123047, 0.008295552253723144, 0.008355072021484375, 0.008336640357971192, 0.008347552299499511, 0.00833516788482666, 0.008356608390808106, 0.008376319885253907, 0.008503168106079102, 0.00835801601409912, 0.00858521556854248, 0.008353792190551757, 0.008339455604553223, 0.00841113567352295, 0.008392352104187012, 0.008333855628967286, 0.008303584098815918, 0.008338272094726562, 0.008364031791687012, 0.008372223854064942, 0.008488800048828125, 0.00841744041442871, 0.008345952033996582, 0.008402591705322265, 0.008353792190551757, 0.008482048034667969, 0.008317567825317384, 0.008277600288391113, 0.00830668830871582, 0.008384544372558594, 0.008343040466308594, 0.008460576057434083, 0.008448736190795898, 0.008574975967407226, 0.008316703796386719, 0.008360159873962403, 0.008491104125976562, 0.008320128440856934, 0.00834598445892334, 0.008439647674560546, 0.008342080116271973, 0.008425472259521484, 0.008468000411987305, 0.008311264038085938, 0.008343551635742187, 0.00846828842163086, 0.008348128318786622, 0.00836083221435547, 0.008522591590881347, 0.00829849624633789, 0.008339455604553223, 0.008322239875793457, 0.00829475212097168, 0.008276448249816895, 0.00829849624633789, 0.008316927909851075, 0.008367584228515626, 0.008280672073364258, 0.00831657600402832, 0.008348031997680664, 0.008296128273010253, 0.008298720359802246, 0.008280351638793945, 0.008265439987182616, 0.008351743698120117, 0.008299936294555664, 0.008303199768066406, 0.008265055656433106, 0.00848265552520752, 0.008251999855041504, 0.008306719779968262, 0.008310976028442383, 0.008373567581176758, 0.008286911964416504, 0.008253439903259278, 0.008298720359802246, 0.008322848320007323, 0.008320608139038087, 0.008282527923583985, 0.008263680458068847, 0.00841539192199707, 0.008337120056152343, 0.008273056030273437, 0.008324064254760743, 0.008295488357543946, 0.00832592010498047, 0.008262111663818359, 0.00828384017944336, 0.008316224098205566, 0.008356863975524903, 0.008438464164733886, 0.008272895812988281, 0.008351872444152832, 0.008320896148681641, 0.008317184448242188, 0.008296192169189453, 0.008376319885253907, 0.008370176315307617, 0.008408703804016114, 0.00829478359222412, 0.008377632141113282, 0.008366111755371095, 0.008286399841308594, 0.008328895568847656, 0.008327136039733887, 0.008302592277526855, 0.008319135665893554, 0.008278688430786134, 0.008276288032531737, 0.008311936378479004, 0.008293312072753906, 0.008291135787963868, 0.008236255645751953, 0.008336159706115722, 0.008475935935974122, 0.008421728134155274, 0.0083439359664917, 0.008259584426879883, 0.00828940773010254, 0.008260607719421387, 0.008304544448852539, 0.008240768432617188, 0.008294560432434083, 0.008263872146606446, 0.008304863929748536, 0.008226143836975097, 0.008275903701782227, 0.008486559867858887, 0.008403807640075684, 0.008303903579711914, 0.008464863777160644, 0.008239359855651855, 0.008304512023925781, 0.008279359817504883, 0.008303423881530762, 0.008259008407592774, 0.00826790428161621, 0.008333375930786133, 0.008272447586059571, 0.00839475154876709, 0.0082774715423584, 0.008372575759887695, 0.008326911926269531, 0.008277824401855469, 0.008274080276489258, 0.00828054428100586, 0.008261664390563965, 0.008249119758605957, 0.008268863677978515, 0.008370783805847168, 0.008323200225830078, 0.008422656059265137, 0.008250080108642577, 0.00833347225189209, 0.008502367973327637, 0.008291071891784667, 0.008278559684753418, 0.008386015892028809, 0.008310432434082032, 0.00884489631652832, 0.00860262393951416, 0.008287712097167969, 0.008317791938781738, 0.008439231872558593, 0.008311039924621582, 0.008327487945556641, 0.008281279563903808, 0.008318719863891601, 0.008274880409240723, 0.008282208442687988, 0.008240863800048829, 0.008267168045043946, 0.008354463577270509, 0.008238304138183593, 0.008297183990478515, 0.008274815559387206, 0.008275039672851562, 0.008243200302124023, 0.00825222396850586, 0.008290240287780762, 0.00829030418395996, 0.008273280143737793, 0.008239583969116211, 0.008267135620117187, 0.008289055824279785, 0.00833897590637207, 0.008333696365356445, 0.008228960037231446, 0.008345151901245117, 0.008292575836181641, 0.008626655578613281, 0.00966537570953369, 0.010040224075317383, 0.008355744361877441, 0.008376416206359863, 0.008361984252929687, 0.008348959922790528, 0.008327775955200196, 0.008437248229980468, 0.008356479644775391, 0.00833135986328125, 0.008333087921142578, 0.00837235164642334, 0.008294400215148925, 0.008294688224792481, 0.00831827163696289, 0.008319392204284667, 0.008390656471252441, 0.008319295883178712, 0.008330944061279297, 0.008390656471252441, 0.00831004810333252, 0.008298751831054687, 0.008307168006896973, 0.008372223854064942, 0.00827564811706543, 0.008367520332336426, 0.008258560180664062, 0.008282015800476075, 0.008318400382995606, 0.008321599960327148, 0.008291775703430176, 0.008330880165100097, 0.008259807586669922, 0.008297087669372559, 0.00836780834197998, 0.008287775993347167, 0.008335712432861329, 0.008288384437561035, 0.008275551795959473, 0.008407872200012207, 0.008361984252929687, 0.008325087547302245, 0.008409119606018067, 0.008300640106201173, 0.008380640029907226, 0.008294207572937012, 0.008303744316101074, 0.008249343872070313, 0.008278016090393067, 0.008251392364501953, 0.008293888092041016, 0.008294912338256836, 0.008253376007080078, 0.008337471961975098, 0.0085830717086792, 0.008269951820373536, 0.008498656272888183, 0.008318623542785645, 0.00828873634338379, 0.008716671943664552, 0.008281760215759278, 0.008343903541564942, 0.008239104270935058, 0.008326592445373536, 0.008237728118896484, 0.008320927619934082, 0.008314496040344238, 0.008349504470825195, 0.00833795166015625, 0.008261183738708495, 0.008259807586669922, 0.008239359855651855, 0.008243200302124023, 0.0082608642578125, 0.008229056358337402, 0.00843017578125, 0.00826483154296875, 0.008287360191345215, 0.008383968353271484, 0.008306591987609864, 0.008304703712463378, 0.008317215919494629, 0.008300543785095215, 0.008421504020690918, 0.008343232154846192, 0.008246463775634766, 0.008260576248168946, 0.00837183952331543, 0.00827228832244873, 0.008226816177368163, 0.00827187156677246, 0.008248736381530761, 0.00848303985595703, 0.008263968467712402, 0.00830019187927246, 0.008309184074401855, 0.00826905632019043, 0.008299263954162597, 0.008236576080322265, 0.008319456100463868, 0.008371456146240235, 0.008272640228271485, 0.008218496322631837, 0.008257984161376953, 0.008222399711608886, 0.008199775695800781, 0.008231328010559083, 0.008254752159118653, 0.008253600120544434, 0.008299072265625, 0.008197855949401856, 0.008323360443115234, 0.008255488395690918, 0.008243200302124023, 0.008249343872070313, 0.008210176467895508, 0.00823040008544922, 0.008329983711242675, 0.008195775985717774, 0.008255647659301759, 0.008208224296569824, 0.008229087829589844, 0.008228960037231446, 0.008194047927856446, 0.008212479591369629, 0.008224512100219726, 0.00823526382446289, 0.008238752365112305, 0.0082041597366333, 0.008232768058776856, 0.008194720268249512, 0.008239104270935058, 0.008252896308898926, 0.008259455680847167, 0.008336319923400878, 0.00824841594696045, 0.008276543617248534, 0.008233023643493651, 0.008294528007507324, 0.008267871856689453, 0.008245023727416992, 0.008265727996826172, 0.008281472206115723, 0.008264320373535156, 0.008304160118103027, 0.008239359855651855, 0.00823305606842041, 0.008254624366760254, 0.008250335693359374, 0.008285247802734375, 0.008260064125061035, 0.008223199844360351, 0.008222016334533691, 0.008207039833068848, 0.008232959747314453, 0.008245311737060546, 0.008251615524291992, 0.008238847732543946, 0.008380384445190429, 0.008345536231994629, 0.00828217601776123, 0.008259584426879883, 0.008235008239746093, 0.00823852825164795, 0.00820691204071045, 0.008226559638977051, 0.008198687553405762, 0.008228575706481934, 0.008237055778503418, 0.008232319831848144, 0.008264320373535156, 0.008228863716125488, 0.008298784255981446, 0.00819593620300293, 0.008292991638183594, 0.008255295753479004, 0.008197440147399902, 0.00822764778137207, 0.008201824188232423, 0.008222880363464356, 0.008233280181884765, 0.00845315170288086, 0.008220959663391113, 0.00825376033782959, 0.008247679710388183, 0.008252863883972167, 0.00822550392150879, 0.008243040084838867, 0.008448032379150391, 0.008254816055297851, 0.008319616317749023, 0.008265727996826172, 0.008296511650085448, 0.00843769645690918, 0.008311840057373046, 0.008304736137390138, 0.008321439743041992, 0.008394816398620605, 0.008349247932434082, 0.008317791938781738, 0.0083405122756958, 0.008383456230163575, 0.008386143684387207, 0.00825385570526123, 0.008517631530761719, 0.008572896003723145, 0.009304096221923828, 0.008326848030090333, 0.008355520248413085, 0.008329055786132812, 0.008305472373962402, 0.008312800407409667, 0.008294400215148925, 0.008263680458068847, 0.008289536476135255, 0.008278016090393067, 0.008276608467102051, 0.008302720069885253, 0.008284383773803711, 0.008258879661560059, 0.008243680000305176, 0.008284064292907714, 0.008290399551391601, 0.008285504341125488, 0.008260448455810547, 0.00828163242340088, 0.008387999534606934, 0.008305631637573242, 0.00823084831237793, 0.008243488311767579, 0.00823846435546875, 0.008374624252319336, 0.008232064247131347, 0.008241888046264648, 0.00828611183166504, 0.008237312316894532, 0.008255743980407714, 0.008221887588500976, 0.0082128324508667, 0.008205696105957032, 0.008249407768249511, 0.00823151969909668, 0.008237248420715331, 0.008210432052612305, 0.008255488395690918, 0.008265727996826172, 0.008243200302124023, 0.00822480010986328, 0.008251359939575195, 0.00824944019317627, 0.00826972770690918, 0.008240639686584473, 0.008301055908203125, 0.008233087539672852, 0.008218784332275391, 0.00825545597076416, 0.008290047645568848, 0.008216287612915039, 0.008216863632202148, 0.008245280265808105, 0.008175616264343261, 0.008305983543395996, 0.008229663848876953, 0.008201631546020507, 0.008206944465637207, 0.008203935623168946, 0.008186079978942872, 0.008231231689453125, 0.008203968048095703, 0.008199616432189942, 0.0082194242477417, 0.00821225643157959, 0.008204287528991699, 0.00821350383758545, 0.008246623992919921, 0.008267071723937989, 0.008337759971618652, 0.008363776206970215, 0.008303903579711914, 0.008252384185791016, 0.008214688301086426, 0.008222751617431641, 0.00823583984375, 0.008203264236450195, 0.008228863716125488, 0.008177663803100586, 0.008222623825073242, 0.008208479881286621, 0.008208576202392579, 0.008236000061035156, 0.00819696044921875, 0.008243200302124023, 0.008383711814880372, 0.008217375755310058, 0.008310144424438476, 0.00825596809387207, 0.00823516845703125, 0.008220479965209962, 0.008214367866516113, 0.008248640060424804, 0.008258079528808594, 0.008280192375183106, 0.00830348777770996, 0.008315487861633301, 0.008282527923583985, 0.008254624366760254, 0.008281984329223634, 0.008276960372924805, 0.008319135665893554, 0.008244447708129883, 0.008254079818725587, 0.00835807991027832, 0.00828223991394043, 0.008272640228271485, 0.008274880409240723, 0.008292351722717285, 0.008390656471252441, 0.008290431976318359, 0.008257535934448243, 0.008341407775878907, 0.008285247802734375, 0.008388863563537598, 0.00824777603149414, 0.008274463653564454, 0.008254560470581054, 0.008279840469360351, 0.00824396800994873, 0.008245183944702148, 0.008487008094787597, 0.008370176315307617, 0.008273088455200195, 0.00823804759979248, 0.008267104148864746, 0.008243712425231933, 0.008256959915161132, 0.00839897632598877, 0.008262080192565918, 0.008273920059204102, 0.008292351722717285, 0.008293408393859864, 0.008301535606384277, 0.008303647994995117, 0.008287199974060059, 0.008255488395690918, 0.008253439903259278, 0.008224063873291016, 0.00829100799560547, 0.008262944221496583, 0.008288607597351075, 0.00839299201965332, 0.008276063919067383, 0.008310463905334473, 0.008285856246948243, 0.008276639938354492, 0.008341504096984862, 0.008265727996826172, 0.008263680458068847, 0.00834496021270752, 0.00828889560699463, 0.008282112121582032, 0.008382464408874512, 0.00827139186859131, 0.00832310390472412, 0.008288543701171875, 0.00828384017944336, 0.008241151809692383, 0.008196096420288086, 0.008237055778503418, 0.00822873592376709, 0.008234335899353027, 0.00829315185546875, 0.00821008014678955, 0.008205727577209472, 0.008215488433837891, 0.008341504096984862, 0.008208383560180664, 0.008232095718383788, 0.008231679916381835, 0.008283455848693848, 0.008268128395080567, 0.0082161283493042, 0.00829529571533203, 0.008194047927856446, 0.008206624031066895, 0.008201951980590821, 0.0082227201461792, 0.008359935760498047, 0.008204287528991699, 0.008210432052612305, 0.008185215950012207, 0.008226688385009766, 0.008233728408813477, 0.008226911544799804, 0.008207551956176758, 0.008257311820983887, 0.008219807624816895, 0.008318304061889648, 0.008308608055114746, 0.00851318359375, 0.008242079734802246, 0.008204575538635253, 0.00825430393218994, 0.00821132755279541, 0.008212224006652832, 0.008196352005004882, 0.008195679664611816, 0.008215007781982421, 0.008246591567993164, 0.008268416404724121, 0.00818995189666748, 0.00845359992980957, 0.008204832077026367, 0.00820633602142334, 0.008221759796142578, 0.008196191787719726, 0.008202336311340331, 0.008190719604492188, 0.00819814395904541, 0.00821452808380127, 0.008300543785095215, 0.008187904357910156, 0.008355072021484375, 0.008284671783447266, 0.008299903869628906, 0.008248224258422851, 0.008165375709533691, 0.008286208152770995, 0.00827187156677246, 0.008255583763122559, 0.008275168418884277, 0.00828831958770752, 0.00824505615234375, 0.008299327850341797, 0.008257535934448243, 0.00828825569152832, 0.00834768009185791, 0.008258527755737304, 0.0083056640625, 0.008209823608398437, 0.008303199768066406, 0.008523039817810059, 0.008229599952697755, 0.008232959747314453, 0.008239104270935058, 0.008228863716125488, 0.008308735847473145, 0.008322912216186524, 0.008259327888488769, 0.008243904113769532, 0.008277759552001954, 0.008230688095092774, 0.008226943969726562, 0.008215776443481445, 0.008215071678161621, 0.008255807876586915, 0.00823305606842041, 0.008265631675720215, 0.008226816177368163, 0.008318112373352051, 0.008210816383361817, 0.00822332763671875, 0.008328096389770508, 0.008274911880493164, 0.008261568069458007, 0.008263615608215332, 0.008247072219848633, 0.008264032363891602, 0.008303744316101074, 0.00821891212463379, 0.008206944465637207, 0.008476672172546386, 0.008301600456237793, 0.008305631637573242, 0.00827187156677246, 0.008268896102905274, 0.008331392288208008, 0.008385312080383301, 0.008309951782226562, 0.008308671951293945, 0.008289152145385743, 0.008279583930969238, 0.008294879913330078, 0.008267775535583496, 0.008287967681884766, 0.008297792434692383, 0.008233951568603516, 0.008265376091003418, 0.008280223846435546, 0.008268959999084472, 0.008321696281433106, 0.0082227201461792, 0.008199616432189942, 0.008370752334594727, 0.008370176315307617, 0.008353792190551757, 0.00823526382446289, 0.008238847732543946, 0.00824294376373291, 0.008331263542175293, 0.008261568069458007, 0.00829088020324707, 0.008281855583190919, 0.008223999977111816, 0.008247455596923829, 0.008213088035583497, 0.008247296333312988, 0.00825708770751953, 0.008290847778320313, 0.008224672317504882, 0.008229087829589844, 0.008228639602661132, 0.008294400215148925, 0.00822707176208496, 0.008198047637939453, 0.008243167877197265, 0.00841487979888916, 0.008249567985534669, 0.008239104270935058, 0.008199487686157227, 0.008188287734985351, 0.008196160316467286, 0.008203904151916503, 0.008337311744689942, 0.008276320457458497, 0.008231488227844239, 0.008224255561828613, 0.008236448287963867, 0.008241727828979492, 0.00844159984588623, 0.008395039558410644, 0.008308192253112794, 0.008250207901000976, 0.008247167587280273, 0.008265855789184571, 0.008241151809692383, 0.008214624404907226, 0.008211999893188477, 0.008244864463806153, 0.008215359687805176, 0.008224096298217774, 0.008237664222717284, 0.009416895866394043, 0.008282048225402832, 0.008290176391601562, 0.008231072425842286, 0.008242112159729004, 0.008216608047485351, 0.008242048263549805, 0.008273183822631836, 0.008427264213562012, 0.008283103942871094, 0.008297151565551757, 0.008512288093566895, 0.008293312072753906, 0.008380384445190429, 0.008343104362487794, 0.008251808166503906, 0.008334527969360352, 0.008312800407409667, 0.008231871604919434, 0.008239104270935058, 0.008228128433227538, 0.008223232269287109, 0.00822099208831787, 0.008795999526977539, 0.008242400169372558, 0.008258336067199707, 0.008275808334350586, 0.008321184158325196, 0.008196096420288086, 0.008226816177368163, 0.008239104270935058, 0.008253536224365234, 0.008236736297607421, 0.008249567985534669, 0.008216704368591309, 0.00822054386138916, 0.008228863716125488, 0.008253439903259278, 0.008233023643493651, 0.008205984115600586, 0.008216863632202148, 0.008262656211853027, 0.008364543914794922, 0.008241151809692383, 0.00828876781463623, 0.008228063583374023, 0.008231616020202637, 0.008259679794311523, 0.008244416236877442, 0.008205120086669921, 0.00821241569519043, 0.008308608055114746, 0.008208576202392579, 0.008207551956176758, 0.00832595157623291, 0.008193568229675293, 0.008171999931335449, 0.008219776153564453, 0.008222847938537598, 0.008241087913513183, 0.008178496360778808, 0.008193440437316894, 0.00819222354888916, 0.008217023849487306, 0.008221823692321778, 0.008254015922546386, 0.008189248085021974, 0.008348608016967773, 0.00820019245147705, 0.008265727996826172, 0.00819200038909912, 0.008212479591369629, 0.008228863716125488, 0.008186816215515137, 0.008287551879882813, 0.008341312408447266, 0.008255392074584962, 0.00817859172821045, 0.008240960121154784, 0.008255200386047363, 0.008235551834106446, 0.008216575622558593, 0.008216575622558593, 0.008281696319580078, 0.008325535774230957, 0.008298784255981446, 0.008249183654785157, 0.008241024017333985, 0.00825715160369873, 0.008265600204467774, 0.008293888092041016, 0.008355135917663575, 0.008265024185180663, 0.008322815895080566, 0.008321663856506348, 0.00831283187866211, 0.009233407974243164, 0.009222911834716798, 0.009132287979125976, 0.008300543785095215, 0.008284159660339355, 0.008312704086303711, 0.008329248428344727, 0.008290399551391601, 0.008251296043395997, 0.008263327598571778, 0.008397055625915526, 0.008367424011230468, 0.008284895896911622, 0.008275424003601074, 0.008293184280395507, 0.008267647743225097, 0.008275967597961426, 0.008284159660339355, 0.008306303977966308, 0.008282496452331542, 0.00829030418395996, 0.008246272087097169, 0.008310815811157227, 0.008324288368225097, 0.008279840469360351, 0.00831283187866211, 0.008259584426879883, 0.008251392364501953, 0.008265727996826172, 0.008300543785095215, 0.008344991683959961, 0.008222944259643554, 0.008240832328796386, 0.008356224060058594, 0.008346176147460938, 0.008273759841918945, 0.008288479804992676, 0.00833619213104248, 0.008257599830627441, 0.00841750431060791, 0.00826806354522705, 0.008277791976928711, 0.0082740478515625, 0.0082390718460083, 0.00826527976989746, 0.008247584342956542, 0.008308735847473145, 0.008245247840881348, 0.008255104064941406, 0.008248000144958497, 0.008277983665466308, 0.00827337646484375, 0.00825984001159668, 0.008232959747314453, 0.008381728172302247, 0.008213215827941895, 0.008254847526550292, 0.008233119964599609, 0.008325599670410156, 0.008249216079711914, 0.00823299217224121, 0.008228960037231446, 0.008240511894226074, 0.008219167709350586, 0.008280159950256348, 0.00823465633392334, 0.008315103530883789, 0.008226943969726562, 0.00821350383758545, 0.008293439865112304, 0.008342687606811523, 0.008256447792053223, 0.008249183654785157, 0.008237215995788574, 0.008263520240783691, 0.008304672241210937, 0.0082390718460083, 0.00832102394104004, 0.008249343872070313, 0.008224608421325684, 0.008224831581115723, 0.00823744010925293, 0.008317983627319336, 0.008239808082580567, 0.008239007949829102, 0.008302687644958496, 0.008198271751403809, 0.00822435188293457, 0.008234880447387695, 0.008287679672241212, 0.008223711967468262, 0.008194239616394042, 0.008236703872680664, 0.00820035171508789, 0.008203328132629395, 0.008223615646362304, 0.008236672401428223, 0.008210880279541016, 0.008318976402282715, 0.008215680122375488, 0.008182623863220214, 0.008183839797973633, 0.008185855865478516, 0.008195808410644531, 0.00826806354522705, 0.008300543785095215, 0.008227968215942382, 0.008244319915771485, 0.008247072219848633, 0.008230912208557128, 0.008237055778503418, 0.0082740478515625, 0.008318816184997559, 0.008240608215332032, 0.00840556812286377, 0.008267775535583496, 0.00845587158203125, 0.008331583976745606, 0.008262784004211425, 0.008284223556518555, 0.008263839721679687, 0.008228608131408691, 0.008246175765991211, 0.008255231857299804, 0.008263520240783691, 0.008354559898376464, 0.008253087997436523, 0.008237119674682616, 0.008294655799865722, 0.00839033603668213, 0.008247296333312988, 0.008244864463806153, 0.008245696067810058, 0.008265119552612304, 0.008296480178833008, 0.008238688468933105, 0.008229791641235351, 0.00825324821472168, 0.008400128364562989, 0.008244159698486329, 0.008264927864074708, 0.00827676773071289, 0.008244319915771485, 0.00824022388458252, 0.008226079940795899, 0.008225312232971191, 0.008213695526123046, 0.008209216117858886, 0.008365344047546387, 0.008248031616210938, 0.00820844841003418, 0.00825100803375244, 0.0082291841506958, 0.008242752075195313, 0.008308768272399902, 0.008278016090393067, 0.008321439743041992, 0.00838383960723877, 0.008288543701171875, 0.008477055549621582, 0.008275232315063476, 0.008239839553833008, 0.008303808212280274, 0.00830355167388916, 0.00843721580505371, 0.008487327575683594, 0.008270272254943848, 0.008280223846435546, 0.008272031784057617, 0.00866329574584961, 0.008269824028015137, 0.008218624114990235, 0.008255488395690918, 0.008234527587890625, 0.008239583969116211, 0.008229887962341309, 0.008233247756958008, 0.008333760261535645, 0.008285728454589844, 0.00823788833618164, 0.008230560302734375, 0.008231200218200684, 0.008261247634887696, 0.008225152015686035, 0.008325471878051757, 0.008519359588623047, 0.00866214370727539, 0.00826863956451416, 0.00826268768310547, 0.008285152435302734, 0.008269087791442871, 0.008284895896911622, 0.008300543785095215, 0.008294079780578613, 0.008244864463806153, 0.008246111869812011, 0.008324031829833984, 0.008241727828979492, 0.00821183967590332, 0.008215519905090332, 0.00832038402557373, 0.008305376052856445, 0.00827996826171875, 0.008251392364501953, 0.008250944137573242, 0.008290240287780762, 0.008322688102722168, 0.008629119873046875, 0.008299872398376465, 0.008270496368408204, 0.008258975982666016, 0.00824124813079834, 0.008214048385620117, 0.008285247802734375, 0.008261280059814453, 0.008247679710388183, 0.008406911849975586, 0.008290176391601562, 0.008270079612731934, 0.008275039672851562, 0.008250144004821778, 0.008263072013854981, 0.00823971176147461, 0.00831283187866211, 0.00824732780456543, 0.008255328178405762, 0.008226943969726562, 0.008259167671203613, 0.008263392448425293, 0.008235008239746093, 0.008295968055725097, 0.008282464027404785, 0.008337535858154298, 0.008238368034362793, 0.008239839553833008, 0.008357888221740722, 0.008396672248840333, 0.008238719940185548, 0.008260095596313476, 0.008257439613342285, 0.008280159950256348, 0.008226431846618653, 0.008229248046875, 0.008269824028015137, 0.008245247840881348, 0.00827187156677246, 0.00822214412689209, 0.008517631530761719, 0.008240896224975586, 0.008334207534790038, 0.008453951835632325, 0.008379839897155761, 0.008252096176147462, 0.008262944221496583, 0.008229280471801758, 0.008199487686157227, 0.008225791931152344, 0.008263680458068847, 0.00826527976989746, 0.008250080108642577, 0.008206048011779786, 0.008210432052612305, 0.008209535598754883, 0.008209152221679688, 0.008212063789367676, 0.008267999649047852, 0.008333824157714843, 0.008211520195007324, 0.008304991722106934, 0.008271519660949706, 0.00825385570526123, 0.008235103607177734, 0.00822435188293457, 0.008203935623168946, 0.008739839553833008, 0.00892518424987793, 0.00898204803466797, 0.008358464241027833, 0.00834166431427002, 0.00872755241394043, 0.008317695617675782, 0.008282112121582032, 0.008260607719421387, 0.008901632308959961, 0.00860364818572998, 0.009639936447143555, 0.008957951545715333, 0.008247615814208985, 0.008269503593444824, 0.00833078384399414, 0.008274399757385253, 0.008271103858947753]",tokens/s,120.39745996320183,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 58.12 MiB is free. Process 130993 has 14.68 GiB memory in use. Of the allocated memory 14.19 GiB is allocated by PyTorch, and 384.43 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,1556.680704,6043.860992,0.0,5641.33888,5589.443072,s,1,15.3459052734375,15.3459052734375,0.0,15.3459052734375,15.3459052734375,15.3459052734375,15.3459052734375,[15.3459052734375],,kWh,0.00022218646855417317,2.450167138528771e-05,7.36295033479939e-05,0.00032031764328745475,,MB,1422.323712,7990.018048,0.0,7574.913024,6755.79136,s,10,9.935990173339844,0.9935990173339844,0.007965376884838362,0.9932402648925781,1.0041536987304687,1.0051293395996095,1.005909852294922,"[0.9806353149414062, 0.9870344848632813, 0.99700732421875, 0.9875632934570312, 0.9940713500976562, 1.0010421752929688, 0.9924091796875, 0.9861851806640625, 1.00610498046875, 1.0039368896484375]",tokens/s,257.64920811505715,kWh,2.8913279401896224e-05,3.1886690359871966e-06,1.9253500251271603e-05,5.135544868915503e-05,tokens/kWh,4984865.414174849,MB,1478.88128,7992.1152,0.0,7574.913024,6755.79392,s,10,50.50752978515625,5.050752978515625,0.016425835324374135,5.054232666015626,5.066164404296875,5.072623706054688,5.077791147460937,"[5.023517578125, 5.02657177734375, 5.0383994140625, 5.04711279296875, 5.0790830078125, 5.05138720703125, 5.057078125, 5.05864794921875, 5.0610029296875, 5.06472900390625]",tokens/s,12.473387684565635,kWh,0.00014834524946393877,1.636272579115718e-05,9.838745497252787e-05,0.0002630954302276238,tokens/kWh,239456.83870485294,,s,630,50.505484954833975,0.08016743643624442,0.0013153008877054171,0.08000113677978515,0.08106471481323242,0.08131376800537109,0.08881760345458985,"[0.09101910400390625, 0.08023741149902344, 0.07947593688964844, 0.07902457427978515, 0.07861885070800781, 0.0785567398071289, 0.07875199890136719, 0.07874380493164063, 0.07873036956787109, 0.07870553588867188, 0.07869222259521484, 0.07985273742675782, 0.08150931549072266, 0.08055971527099609, 0.08010793304443359, 0.08037366485595702, 0.07970121765136719, 0.07920524597167969, 0.07880908966064454, 0.07873900604248046, 0.07859394836425782, 0.07856492614746094, 0.0786884765625, 0.07973283386230469, 0.0810272674560547, 0.08046230316162109, 0.07994163513183594, 0.07926112365722657, 0.07932371520996094, 0.08060313415527344, 0.07986176300048828, 0.07942937469482422, 0.0789793243408203, 0.07874969482421874, 0.07869644927978516, 0.0787783660888672, 0.08027545928955078, 0.07985971069335937, 0.08106304168701171, 0.0807033920288086, 0.07982825469970703, 0.07935507202148437, 0.07927426910400391, 0.0797001953125, 0.07924972534179688, 0.07981439971923829, 0.07940502166748047, 0.07899772644042968, 0.07872201538085938, 0.07954867553710937, 0.07984703826904296, 0.07943580627441406, 0.07908601379394531, 0.07997657775878907, 0.07976080322265625, 0.07987088012695312, 0.07946607971191406, 0.07961641693115235, 0.08015068817138672, 0.07955244445800781, 0.08057231903076172, 0.08040866851806641, 0.07964399719238281, 0.08838150024414063, 0.080046142578125, 0.07936061096191406, 0.07977935791015625, 0.07931887817382813, 0.0790552978515625, 0.0790169906616211, 0.07868515014648438, 0.07842406463623047, 0.07847280120849609, 0.07849820709228515, 0.08004812622070312, 0.08060518646240235, 0.08051113891601562, 0.08035497283935547, 0.07981430053710938, 0.07917350769042969, 0.07890758514404297, 0.07995404815673827, 0.07944815826416016, 0.07895836639404297, 0.07871250915527343, 0.0786849594116211, 0.0799435806274414, 0.07944316864013672, 0.08019197082519532, 0.08005014038085938, 0.07976182556152343, 0.07922838592529297, 0.07927247619628906, 0.0799170913696289, 0.07940620422363281, 0.07906547546386719, 0.08013651275634766, 0.07941545867919922, 0.07887865447998046, 0.0790098876953125, 0.08005788421630859, 0.08003836822509766, 0.08012595367431641, 0.07944105529785156, 0.07943254089355468, 0.07969296264648437, 0.08006681823730469, 0.07938108825683594, 0.08071987152099609, 0.07997964477539063, 0.08062655639648438, 0.08019558715820313, 0.08058470153808593, 0.07998854064941406, 0.08036982727050782, 0.08000035095214844, 0.07943772888183594, 0.07928707122802735, 0.07933926391601563, 0.08014057922363281, 0.07986172485351563, 0.08045967864990235, 0.08000867462158204, 0.07951615905761719, 0.08017353820800781, 0.0795255355834961, 0.08819391632080079, 0.07993520355224609, 0.07941120147705077, 0.08018972778320313, 0.07993344116210938, 0.07935590362548828, 0.07932326507568359, 0.07904374694824219, 0.07907571411132812, 0.07904905700683594, 0.07983513641357422, 0.07975039672851562, 0.0803683853149414, 0.08028569793701172, 0.07991276550292968, 0.07981394958496094, 0.07949024200439453, 0.0791192626953125, 0.07994035339355468, 0.07917571258544921, 0.07914828491210937, 0.07960681915283203, 0.07929417419433593, 0.07983055877685546, 0.08056060791015625, 0.08081932830810547, 0.08035008239746094, 0.08073958587646485, 0.07998258972167968, 0.08046982574462891, 0.08048694610595702, 0.07954863739013672, 0.07930691528320312, 0.07928412628173828, 0.07928604888916016, 0.07964006042480469, 0.07964086151123047, 0.08004627227783204, 0.07990473937988281, 0.08027792358398438, 0.0799295654296875, 0.07970793914794921, 0.08023264312744141, 0.07956256103515626, 0.07954841613769531, 0.07951737976074219, 0.07946886444091797, 0.07955577850341797, 0.08020771026611329, 0.08088412475585938, 0.08025350189208984, 0.0796918716430664, 0.08016886138916016, 0.07964214324951172, 0.07967359924316406, 0.08011321258544922, 0.07938114929199219, 0.07985151672363282, 0.07980646514892578, 0.07941558074951172, 0.08015843200683594, 0.0806541748046875, 0.08135298919677734, 0.08861689758300781, 0.07989043426513671, 0.0792125473022461, 0.07961929321289063, 0.07988473510742188, 0.07950780487060546, 0.07910399627685546, 0.07906018829345703, 0.07985049438476563, 0.07922665405273438, 0.07917369842529297, 0.08001747131347656, 0.08047808074951172, 0.0812415008544922, 0.08045401763916016, 0.07979043579101562, 0.07934349060058593, 0.07922688293457031, 0.08020172882080077, 0.07956479644775391, 0.07941529846191406, 0.07920873260498047, 0.07935743713378907, 0.07992546844482422, 0.08011286163330078, 0.08121833801269532, 0.0805212173461914, 0.08036479949951172, 0.07994432067871093, 0.079334716796875, 0.08001747131347656, 0.08003852844238281, 0.07990300750732422, 0.07944905853271485, 0.07927859497070312, 0.07984780883789062, 0.08120294189453126, 0.08119484710693359, 0.08060723114013672, 0.08055248260498046, 0.07970012664794922, 0.07994115447998047, 0.08039647674560547, 0.08076697540283204, 0.08024269104003906, 0.07971593475341797, 0.08113967895507812, 0.08017113494873047, 0.07970211029052734, 0.08032892608642578, 0.08010956573486328, 0.07989453125, 0.08005996704101563, 0.07945670318603516, 0.07954637145996094, 0.07986930847167968, 0.08056508636474609, 0.08018883514404297, 0.0798194580078125, 0.08016476440429687, 0.07961373138427734, 0.08006982421875, 0.08049295806884765, 0.08980643463134766, 0.08163334655761718, 0.08063011169433594, 0.07916671752929688, 0.07942623901367188, 0.07946054077148437, 0.07949046325683594, 0.08123359680175782, 0.08172828674316407, 0.0821180191040039, 0.08163401794433593, 0.08069929504394531, 0.08173782348632813, 0.081082275390625, 0.0805090560913086, 0.08000192260742188, 0.08018351745605469, 0.07955465698242188, 0.0794525146484375, 0.07952623748779297, 0.08007881927490235, 0.07952413177490235, 0.0793268814086914, 0.08139170837402344, 0.08099366760253907, 0.081478271484375, 0.08075878143310547, 0.0799719009399414, 0.07936428833007812, 0.08047853088378906, 0.07991216278076171, 0.07946720123291015, 0.0798578872680664, 0.07941458892822266, 0.0800343017578125, 0.08058060455322266, 0.08145836639404297, 0.08202137756347656, 0.08131529235839843, 0.08111702728271485, 0.08182220458984375, 0.08098636627197266, 0.08082556915283202, 0.08108902740478516, 0.08005840301513673, 0.07953135681152344, 0.0807225570678711, 0.0801157455444336, 0.08117887878417969, 0.08111078643798827, 0.08129254150390625, 0.08105651092529297, 0.08080563354492187, 0.08039814758300781, 0.07946284484863281, 0.07953794860839844, 0.0800770263671875, 0.07957263946533204, 0.080056640625, 0.08035228729248046, 0.08024371337890625, 0.08042217254638671, 0.08052764892578125, 0.08914761352539062, 0.07993309020996094, 0.0793081283569336, 0.07904166412353515, 0.08020313262939453, 0.0798048324584961, 0.08090624237060547, 0.07981219482421875, 0.080552001953125, 0.07995423889160157, 0.07924098968505859, 0.08012210845947265, 0.08149587249755859, 0.08012009429931641, 0.07955446624755859, 0.07960108947753906, 0.07981705474853516, 0.07919586944580079, 0.07904009246826171, 0.07977871704101562, 0.07930614471435547, 0.07989513397216796, 0.07988755035400391, 0.08002642822265625, 0.08063571166992188, 0.08003398132324219, 0.0800186538696289, 0.07968233489990234, 0.07988428497314454, 0.07984742736816407, 0.0795832290649414, 0.07983939361572266, 0.07945814514160156, 0.07961804962158203, 0.08077043151855469, 0.07987468719482421, 0.07999282836914062, 0.07969324493408203, 0.0801819839477539, 0.08068899536132812, 0.07994342041015624, 0.08070937347412109, 0.080089599609375, 0.08057852935791016, 0.08002159881591797, 0.08061299133300781, 0.08009113311767578, 0.08076643371582032, 0.08004898834228516, 0.0799596176147461, 0.07983139038085937, 0.0801956787109375, 0.08031427001953124, 0.08045577239990234, 0.0802870101928711, 0.07985635375976563, 0.07979622650146484, 0.07998607635498046, 0.08008354949951171, 0.08058841705322266, 0.08119744110107421, 0.08018688201904296, 0.08005228424072265, 0.08889984130859375, 0.08007510375976562, 0.0791756820678711, 0.07892697906494141, 0.07963533020019531, 0.07987773132324219, 0.08021443176269531, 0.07990402984619141, 0.0792501449584961, 0.07921449279785156, 0.07922803497314453, 0.08078060913085937, 0.08116329956054688, 0.08062966156005859, 0.08071833801269532, 0.08000259399414063, 0.07985049438476563, 0.08006201934814453, 0.07959142303466797, 0.07991120147705078, 0.07927792358398437, 0.07936109161376953, 0.08002861022949219, 0.08016441345214843, 0.08124041748046874, 0.08090838623046875, 0.0811924819946289, 0.08025526428222657, 0.07978617858886719, 0.07928012847900391, 0.08019286346435547, 0.07981852722167969, 0.08007154846191407, 0.07979859161376954, 0.08005804443359375, 0.07980032348632812, 0.08055193328857421, 0.08065229034423828, 0.0804167709350586, 0.0800351333618164, 0.07941395568847656, 0.08022425842285157, 0.0798897933959961, 0.07940937805175781, 0.08122927856445313, 0.08075564575195313, 0.08000688171386719, 0.07943612670898438, 0.0811716766357422, 0.08049465942382812, 0.08080655670166016, 0.07999282836914062, 0.07964281463623046, 0.08008048248291015, 0.08057244873046875, 0.08007084655761719, 0.08023155212402344, 0.07986675262451172, 0.08112127685546874, 0.0808652801513672, 0.08010348510742188, 0.08084591674804688, 0.08062448120117187, 0.0890814437866211, 0.08012809753417968, 0.07933132934570312, 0.07917616271972656, 0.07975421142578125, 0.07926233673095703, 0.08042729949951172, 0.07996415710449219, 0.07964851379394532, 0.07934182739257813, 0.0790118408203125, 0.08203257751464844, 0.08133773040771484, 0.08031318664550781, 0.07990460968017578, 0.07970636749267578, 0.07986764526367188, 0.0792405776977539, 0.07963478088378906, 0.07934390258789062, 0.07921891021728515, 0.079351806640625, 0.07982498931884766, 0.08060486602783203, 0.08108646392822266, 0.08038323211669922, 0.08038188934326172, 0.07997702026367187, 0.08034732818603516, 0.07980857849121094, 0.07925350189208985, 0.07919641876220704, 0.08023190307617188, 0.07953846740722656, 0.07936204528808594, 0.08023375701904296, 0.08040521240234375, 0.08111309051513672, 0.08066252899169922, 0.07984537506103516, 0.08116838073730469, 0.0804947509765625, 0.08005001831054688, 0.0794051513671875, 0.07956185913085938, 0.07991990661621094, 0.08092476654052734, 0.08068643188476562, 0.08021590423583984, 0.08131190490722656, 0.08104812622070312, 0.0812298583984375, 0.080942626953125, 0.08001692962646484, 0.07971552276611328, 0.08004374694824219, 0.08082022094726563, 0.08019967651367188, 0.08016496276855468, 0.08042691040039063, 0.08086268615722657, 0.08152937316894532, 0.08034508514404297, 0.08889958190917968, 0.07996415710449219, 0.079325439453125, 0.07975910186767578, 0.07932444763183594, 0.08064688110351563, 0.07991295623779297, 0.08022160339355469, 0.07980912017822266, 0.08009228515625, 0.07950527954101562, 0.0801087646484375, 0.08105667114257813, 0.08081254577636719, 0.07980278778076172, 0.0794562225341797, 0.07935590362548828, 0.07996211242675781, 0.07999078369140625, 0.0798392333984375, 0.07975730895996094, 0.07948441314697266, 0.07937677001953125, 0.08144908905029297, 0.08074588775634765, 0.08117699432373048, 0.08014768218994141, 0.07983622741699219, 0.08010892486572266, 0.07982339477539062, 0.08040013122558594, 0.08022386932373046, 0.08027519989013672, 0.07994457244873047, 0.08038358306884766, 0.08057488250732422, 0.08008704376220703, 0.08108035278320312, 0.08101267242431641, 0.0800747528076172, 0.07958326721191407, 0.07941069030761719, 0.07994624328613281, 0.0803552017211914, 0.08002162933349609, 0.07982489776611328, 0.0802529296875, 0.07990681457519531, 0.0808051528930664, 0.08046428680419922, 0.08069344329833984, 0.08026329803466797, 0.07962214660644532, 0.08069734191894531, 0.08028921508789062, 0.07963897705078125, 0.08048652648925782, 0.08113970947265625, 0.08057651519775391, 0.08102706909179687, 0.08125440216064453, 0.08059635162353515, 0.08013011169433594, 0.08898976135253907, 0.0801115493774414, 0.0792795867919922, 0.07918572998046874, 0.07985814666748046, 0.08050918579101562, 0.08068736267089843, 0.07973862457275391, 0.08063180541992188, 0.08046198272705078, 0.07971004486083984, 0.08039119720458984, 0.08081833648681641, 0.0811374740600586, 0.08065945434570312, 0.07997030639648438, 0.07985539245605469, 0.07987152099609375, 0.07941600036621094, 0.07993276977539063, 0.07979689788818359, 0.08006451416015625, 0.08033014678955078, 0.08133193969726563, 0.08159295654296875, 0.08021836853027343, 0.0801495361328125, 0.079891357421875, 0.07938780975341797, 0.07993030548095703, 0.08033891296386719, 0.08016281890869141, 0.07925116729736328, 0.07979574584960937, 0.08030694580078125, 0.08029593658447266, 0.08065401458740235, 0.081244384765625, 0.08107977294921875, 0.08050956726074218, 0.08000621032714844, 0.08013670349121094, 0.08006291198730468, 0.08071372985839843, 0.07994931030273437, 0.0802636489868164, 0.07986300659179688, 0.08022713470458985, 0.08020716857910157, 0.08040278625488281, 0.08167868804931641, 0.08091420745849609, 0.07976700592041015, 0.0796272964477539, 0.08010089874267579, 0.08035939025878906, 0.08038626861572265, 0.08003584289550782, 0.08021401977539062, 0.08001519775390625, 0.08003936004638672, 0.080401123046875, 0.08161917114257812]",tokens/s,12.473892698256357,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1149.775872,2341.404672,0.0,1946.157056,1819.994112,s,1,10.570611328125,10.570611328125,0.0,10.570611328125,10.570611328125,10.570611328125,10.570611328125,[10.570611328125],,kWh,0.00010066060923333566,1.109633666215753e-05,3.623419565399952e-05,0.0001479911415494927,,MB,1378.922496,2582.577152,0.0,2174.746624,2099.202048,s,10,3.934668365478516,0.3934668365478516,0.0016876900708838235,0.3932472686767578,0.39559535522460937,0.39606524047851566,0.39644114868164065,"[0.3930092468261719, 0.39351248168945313, 0.3919287414550781, 0.3916224365234375, 0.393379638671875, 0.3931148986816406, 0.390976806640625, 0.39509805297851563, 0.3965351257324219, 0.3954909362792969]",tokens/s,650.6266252222415,kWh,1.1802210804999958e-05,1.3015767721691397e-06,7.819250699840074e-06,2.0923038277009173e-05,tokens/kWh,12235316.716946414,MB,1401.004032,2582.577152,0.0,2174.746624,2099.204608,s,10,29.687070068359375,2.9687070068359374,0.010928839747835784,2.9662825927734375,2.9838070068359377,2.9861058959960936,2.9879450073242184,"[2.9577314453125, 2.96910888671875, 2.983296142578125, 2.965572265625, 2.96279736328125, 2.979611572265625, 2.96007080078125, 2.966992919921875, 2.98840478515625, 2.95348388671875]",tokens/s,21.22135995735925,kWh,8.53399655324997e-05,9.413203049478244e-06,5.445724356576024e-05,0.0001492104121477382,tokens/kWh,422222.54528471915,,s,630,29.68362404251098,0.04711686355954125,0.0009809421800245752,0.04696473693847656,0.047550915908813475,0.04844057292938232,0.05204850360870361,"[0.05248166275024414, 0.046940319061279295, 0.04614553451538086, 0.04617379379272461, 0.0466357421875, 0.04670431900024414, 0.04694630432128906, 0.047247390747070316, 0.04696636962890625, 0.04696649551391602, 0.047030910491943356, 0.04713593673706055, 0.04674851226806641, 0.04682060623168945, 0.04716207885742187, 0.04681532669067383, 0.046973953247070314, 0.04722988891601562, 0.04713011169433594, 0.047695487976074216, 0.04701887893676758, 0.04705260848999023, 0.04649593734741211, 0.04603859329223633, 0.0465013427734375, 0.04679164886474609, 0.046723072052001956, 0.046614048004150394, 0.04717820739746094, 0.047179840087890626, 0.04708550262451172, 0.04699241638183594, 0.04692076873779297, 0.04689907073974609, 0.04699955368041992, 0.04674687957763672, 0.04654159927368164, 0.04635958480834961, 0.04656643295288086, 0.04690892791748047, 0.04752646255493164, 0.04671696090698242, 0.04640956878662109, 0.04680006408691406, 0.04652918243408203, 0.04664336013793945, 0.046831329345703124, 0.046958881378173827, 0.04688896179199219, 0.04691763305664062, 0.047185726165771484, 0.047114177703857424, 0.04652671813964844, 0.04639145660400391, 0.0466143684387207, 0.04676108932495117, 0.04679520034790039, 0.046881214141845706, 0.046727199554443356, 0.0471421127319336, 0.0471354866027832, 0.04719615936279297, 0.047118335723876956, 0.05119385528564453, 0.047333377838134766, 0.04698025512695313, 0.047397727966308596, 0.047459583282470706, 0.047155967712402345, 0.04710521697998047, 0.04695257568359375, 0.04697103881835937, 0.04705539321899414, 0.04701593780517578, 0.04692083358764648, 0.04674137496948242, 0.046823486328125, 0.046468032836914065, 0.046252033233642575, 0.046634239196777345, 0.04696044921875, 0.04653801727294922, 0.04685583877563477, 0.046854145050048826, 0.04723295974731445, 0.047054912567138674, 0.04676812744140625, 0.047016063690185544, 0.04744998550415039, 0.04726169586181641, 0.04655107116699219, 0.04678857421875, 0.046839134216308594, 0.04659987258911133, 0.04663395309448242, 0.046878719329833986, 0.04678656005859375, 0.04672716903686523, 0.046837760925292966, 0.046712833404541014, 0.046653438568115234, 0.04665280151367188, 0.04681299209594727, 0.04720844650268555, 0.04684064102172852, 0.04702588653564453, 0.04714329528808594, 0.0468070068359375, 0.04658988952636719, 0.047118080139160155, 0.04722918319702148, 0.04689891052246094, 0.04688924789428711, 0.04853926467895508, 0.04890566253662109, 0.04722787094116211, 0.0473026237487793, 0.04737843322753906, 0.04706918334960938, 0.047263744354248044, 0.047513599395751956, 0.04732067108154297, 0.04730511856079102, 0.04740304183959961, 0.04802864074707031, 0.04776038360595703, 0.05205545425415039, 0.04724396896362305, 0.0469851188659668, 0.047242305755615235, 0.05152617645263672, 0.04748534393310547, 0.04731606292724609, 0.047167617797851565, 0.046813983917236325, 0.04711423873901367, 0.04710575866699219, 0.046589344024658204, 0.04668499374389649, 0.04753180694580078, 0.04716783905029297, 0.046362560272216795, 0.04744339370727539, 0.048580192565917966, 0.04732723236083984, 0.04635116958618164, 0.0464180793762207, 0.046862335205078126, 0.04690739059448242, 0.04680607986450196, 0.047015007019042966, 0.047457183837890625, 0.047051712036132814, 0.04692172622680664, 0.04653590393066406, 0.046467872619628904, 0.04656947326660156, 0.04699347305297852, 0.046951904296875, 0.046848320007324216, 0.04699267196655273, 0.04722163009643555, 0.04694563293457031, 0.04710671997070313, 0.04755046463012695, 0.04776931381225586, 0.04729270553588867, 0.047254753112792966, 0.04724777603149414, 0.04730099105834961, 0.04736614227294922, 0.04757017517089844, 0.04754044723510742, 0.04737488174438476, 0.05087846374511719, 0.04738252639770508, 0.04746854400634765, 0.04723487854003906, 0.047526046752929686, 0.04743990325927734, 0.047156448364257815, 0.046988063812255856, 0.0474337272644043, 0.04717891311645508, 0.04705366516113281, 0.04675180816650391, 0.04652995300292969, 0.047434272766113283, 0.04810927963256836, 0.0519417610168457, 0.047077728271484376, 0.04640172958374023, 0.04697068786621094, 0.04640563201904297, 0.04666511917114258, 0.04692643356323242, 0.047323135375976565, 0.04672707366943359, 0.05037884902954102, 0.047062366485595704, 0.04695926284790039, 0.046682113647460936, 0.04672700881958008, 0.046612640380859376, 0.046860286712646484, 0.046976192474365235, 0.047475582122802736, 0.04689891052246094, 0.046827743530273434, 0.04683545684814453, 0.04740940856933594, 0.046435680389404294, 0.04655756759643555, 0.04797468948364258, 0.04718988800048828, 0.047042686462402346, 0.04729446411132812, 0.04701308822631836, 0.04726419067382812, 0.04722518539428711, 0.04721049499511719, 0.046994815826416014, 0.04709235382080078, 0.04700774383544922, 0.04717772674560547, 0.04729836654663086, 0.047163585662841796, 0.047393089294433595, 0.04703308868408203, 0.04678934478759766, 0.046782176971435545, 0.0461030387878418, 0.04570111846923828, 0.04666572952270508, 0.04707657623291016, 0.046848800659179686, 0.04717772674560547, 0.04702825546264648, 0.04698313522338867, 0.04721664047241211, 0.04693932723999023, 0.046906177520751956, 0.04700569534301758, 0.047011455535888674, 0.04687295913696289, 0.04681318283081055, 0.04668726348876953, 0.047362430572509766, 0.047032318115234374, 0.04681584167480469, 0.04639539337158203, 0.046521888732910154, 0.052090816497802735, 0.047023456573486326, 0.0470252799987793, 0.04678224182128906, 0.047027713775634764, 0.04681974411010742, 0.04650425720214844, 0.04647404861450195, 0.04705788803100586, 0.047065086364746093, 0.0470838394165039, 0.04694393539428711, 0.046927040100097656, 0.04692620849609375, 0.046993854522705075, 0.0478636474609375, 0.04695811080932617, 0.04698380661010742, 0.046935680389404294, 0.04707148742675781, 0.048269535064697264, 0.04701788711547852, 0.04703871917724609, 0.04935027313232422, 0.047102046966552735, 0.04690969467163086, 0.049872703552246093, 0.04675785446166992, 0.04678451156616211, 0.046744800567626955, 0.04685084915161133, 0.04711004638671875, 0.046526561737060546, 0.04653068923950195, 0.04658396911621094, 0.04631836700439453, 0.04609763336181641, 0.04689619064331055, 0.04666198348999023, 0.04640377426147461, 0.0462825927734375, 0.046561054229736325, 0.04679942321777344, 0.046724960327148436, 0.046813278198242186, 0.0465530891418457, 0.04698726272583008, 0.04655251312255859, 0.04691939163208008, 0.04930441665649414, 0.046367870330810544, 0.0464925422668457, 0.04700889587402344, 0.04682022476196289, 0.046712478637695315, 0.04658432006835937, 0.04642595291137695, 0.04627046585083008, 0.046652511596679686, 0.0467130241394043, 0.04651459121704102, 0.04667228698730469, 0.04731027221679687, 0.05203148651123047, 0.04748259353637695, 0.0474664306640625, 0.04748118209838867, 0.0474370231628418, 0.04721744155883789, 0.04723878479003906, 0.04727641677856445, 0.047108097076416014, 0.047607425689697266, 0.047212928771972654, 0.04712857437133789, 0.04734931182861328, 0.04725600051879883, 0.04710604858398437, 0.04874764633178711, 0.047149791717529296, 0.04674166488647461, 0.047110145568847656, 0.04689465713500977, 0.04671088027954102, 0.0466701774597168, 0.046854145050048826, 0.04757708740234375, 0.047376094818115236, 0.046981151580810544, 0.047042816162109376, 0.047185855865478514, 0.04701113510131836, 0.046719745635986326, 0.047052639007568356, 0.046887073516845706, 0.04691766357421875, 0.04685942459106445, 0.046804031372070315, 0.0470648307800293, 0.04731600189208984, 0.04691360092163086, 0.04677545547485352, 0.046917152404785153, 0.046606559753417966, 0.04649903869628906, 0.04651273727416992, 0.046966815948486326, 0.046833824157714844, 0.04678403091430664, 0.050544097900390626, 0.04812076950073242, 0.046798912048339844, 0.046757118225097656, 0.04678937530517578, 0.04738848114013672, 0.046780513763427733, 0.046769248962402345, 0.046973953247070314, 0.047132225036621095, 0.04701638412475586, 0.04723632049560547, 0.04749596786499023, 0.04763033676147461, 0.04981145477294922, 0.047511550903320314, 0.04756480026245117, 0.05374806213378906, 0.04743132781982422, 0.046841537475585934, 0.046467742919921874, 0.046785984039306644, 0.04676665496826172, 0.04664947128295899, 0.04670451354980469, 0.046129505157470704, 0.045872798919677736, 0.0458666877746582, 0.045984416961669924, 0.04607926559448242, 0.046168449401855466, 0.04676809692382813, 0.04669164657592773, 0.04755497741699219, 0.0464851188659668, 0.0468583984375, 0.04662326431274414, 0.046589088439941403, 0.046736190795898434, 0.04656342315673828, 0.04636774444580078, 0.046054336547851564, 0.04612035369873047, 0.04657241439819336, 0.04688633728027344, 0.04741763305664062, 0.047080799102783205, 0.04632387161254883, 0.046236480712890625, 0.046231136322021485, 0.04623369598388672, 0.045965312957763675, 0.046077728271484375, 0.04657183837890625, 0.047379680633544925, 0.04693471908569336, 0.046792606353759765, 0.046717025756835937, 0.04666195297241211, 0.04678623962402344, 0.04709344100952149, 0.04747296142578125, 0.047105377197265624, 0.04714460754394531, 0.047134944915771484, 0.047311649322509766, 0.04735795211791992, 0.04759535980224609, 0.0473540153503418, 0.04722892761230469, 0.04726169586181641, 0.04774092864990234, 0.04715315246582031, 0.047282176971435545, 0.04721811294555664, 0.04754691314697266, 0.047261726379394534, 0.04726784133911133, 0.047925247192382815, 0.05047615814208985, 0.05258422470092773, 0.0472119026184082, 0.046723072052001956, 0.04672556686401367, 0.04663132858276367, 0.0466431999206543, 0.047155231475830076, 0.04653871917724609, 0.047209854125976565, 0.04678057479858398, 0.046779071807861325, 0.04696041488647461, 0.04686438369750977, 0.0465715217590332, 0.046798686981201175, 0.04723318481445313, 0.04667350387573242, 0.047483295440673826, 0.04695004653930664, 0.046919456481933595, 0.047196704864501955, 0.047443073272705076, 0.047104705810546876, 0.04738848114013672, 0.04690972900390625, 0.04676976013183594, 0.04638364791870117, 0.0468372802734375, 0.04676860809326172, 0.047099807739257815, 0.046782302856445315, 0.046978912353515624, 0.04682393646240234, 0.04689014434814453, 0.046940929412841795, 0.04723507308959961, 0.0484117431640625, 0.04756889724731445, 0.048480800628662106, 0.04772288131713867, 0.0470077133178711, 0.046964767456054685, 0.047066783905029295, 0.0470654411315918, 0.04745625686645508, 0.04692921447753906, 0.04741596984863281, 0.047718433380126955, 0.04805017471313477, 0.04666777420043945, 0.04663827133178711, 0.04704134368896484, 0.04676982498168945, 0.04672867202758789, 0.04672175979614258, 0.04639670562744141, 0.046395713806152344, 0.04677795028686523, 0.04653395080566406, 0.046636703491210935, 0.04673891067504883, 0.04682601547241211, 0.046878719329833986, 0.052262527465820316, 0.04715161514282227, 0.046774208068847654, 0.046585792541503905, 0.046685440063476566, 0.04742118453979492, 0.04679372787475586, 0.0474337272644043, 0.04696473693847656, 0.04713059234619141, 0.04717366409301758, 0.047026176452636716, 0.04701747131347656, 0.04879206466674805, 0.04696672058105469, 0.046911006927490236, 0.046769695281982424, 0.04701219177246094, 0.04735836791992187, 0.047255809783935544, 0.04729651260375976, 0.04780441665649414, 0.04739788818359375, 0.04729753494262695, 0.047032257080078126, 0.04716508865356445, 0.04742595291137695, 0.0472470703125, 0.047683616638183594, 0.047370494842529295, 0.04734566497802734, 0.04728387069702149, 0.047216991424560546, 0.04717071914672852, 0.04703641510009766, 0.046832065582275394, 0.047122463226318356, 0.047392223358154295, 0.04678054428100586, 0.04671529769897461, 0.04674803161621094, 0.046729217529296874, 0.04656332778930664, 0.04707468795776367, 0.049135967254638674, 0.04753164672851563, 0.047030017852783206, 0.0466124496459961, 0.04683491134643555, 0.04706684875488281, 0.04672284698486328, 0.04640534210205078, 0.04668467330932617, 0.04686963272094727, 0.04700249481201172, 0.048842273712158206, 0.049482208251953125, 0.04899571228027344, 0.056950752258300784, 0.046863262176513674, 0.04687868881225586, 0.04673865509033203, 0.046268672943115235, 0.05170819091796875, 0.04694220733642578, 0.04696473693847656, 0.04748905563354492, 0.046804958343505856, 0.04755660629272461, 0.0470302734375, 0.047087551116943356, 0.046927295684814456, 0.04697971343994141, 0.04682451248168945, 0.04729087829589844, 0.047129024505615236, 0.047474750518798826, 0.04761171340942383, 0.04760614395141602, 0.04747647857666016, 0.04699926376342774, 0.04710758590698242, 0.04726486587524414, 0.04809286499023437, 0.04634339141845703, 0.04640784072875977, 0.0485873908996582, 0.04838329696655273, 0.04685068893432617, 0.04702828979492187, 0.04659404754638672, 0.04664748764038086, 0.046190399169921875, 0.045610240936279293, 0.04597366333007812, 0.045754974365234374, 0.045856769561767576, 0.04640972900390625, 0.04729446411132812, 0.04628662490844727, 0.04619494247436524, 0.04605948638916016, 0.046002334594726565, 0.04631766510009765, 0.046163711547851566, 0.04627257537841797, 0.04619286346435547, 0.046154655456542966, 0.04670547103881836, 0.046499839782714845, 0.04665507125854492, 0.0462391357421875, 0.04629782485961914, 0.04709711837768555, 0.046672832489013674, 0.04636064147949219, 0.046640159606933594, 0.046588897705078125, 0.046448192596435546, 0.04644524765014649, 0.04771846389770508, 0.04655440139770508, 0.04644467163085937, 0.04846416091918945, 0.046925376892089844, 0.046509696960449216]",tokens/s,21.223823583594587,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,828.510208,4944.953344,0.0,4584.374272,4545.690624,s,1,13.94236328125,13.94236328125,0.0,13.94236328125,13.94236328125,13.94236328125,13.94236328125,[13.94236328125],,kWh,0.00019561187700833216,2.157021555441482e-05,6.484866298998859e-05,0.00028203075555273556,,MB,1298.939904,5572.001792,0.0,5156.896768,4927.105024,s,10,9.89754522705078,0.9897545227050781,0.003063670858612575,0.98935791015625,0.9921638854980469,0.9943199798583985,0.9960448553466796,"[0.9901321411132813, 0.987863525390625, 0.9885836791992187, 0.9841514282226562, 0.98832666015625, 0.9915654907226562, 0.9905794677734375, 0.9881820068359375, 0.9916847534179688, 0.99647607421875]",tokens/s,258.64999262678947,kWh,2.8836119542049318e-05,3.1788739150806525e-06,1.9246076002909053e-05,5.126106946003902e-05,tokens/kWh,4994043.290485129,MB,1331.744768,5580.3904,0.0,5163.188224,4927.107584,s,10,55.76240673828126,5.576240673828126,0.017106762595435723,5.570315429687501,5.5908185546875,5.6066880859375,5.6193837109375,"[5.5606044921875, 5.5657109375, 5.5650712890625, 5.56974072265625, 5.57766357421875, 5.5872919921875, 5.57089013671875, 5.5769091796875, 5.6225576171875, 5.565966796875]",tokens/s,11.29793416121512,kWh,0.00016279664395086428,1.7958261159170387e-05,0.00010278616051169022,0.00028354106562172496,tokens/kWh,222190.03748842838,,s,630,55.75906935119632,0.08850645928761315,0.001505852565889858,0.0881601905822754,0.0888387924194336,0.0894180290222168,0.09861282333374026,"[0.09744169616699219, 0.08868860626220704, 0.08891196441650391, 0.08782032012939453, 0.08795136260986328, 0.08798003387451171, 0.08809999847412109, 0.08788652801513672, 0.08804777526855469, 0.08818627166748047, 0.08787619018554688, 0.08807014465332032, 0.08793875122070313, 0.08806575775146484, 0.08892476654052735, 0.08841830444335938, 0.08796160125732422, 0.08798553466796875, 0.08809452819824219, 0.08799110412597656, 0.08799641418457031, 0.08798966217041015, 0.08810966491699218, 0.08790121459960938, 0.08793296051025391, 0.08794003295898438, 0.0882166748046875, 0.08781097412109375, 0.08792034912109375, 0.08801862335205078, 0.08804959869384765, 0.08793507385253906, 0.08800109100341796, 0.08793202972412109, 0.0880874252319336, 0.08818688201904297, 0.08791584014892578, 0.0880483169555664, 0.08787699127197265, 0.08789161682128906, 0.0879769287109375, 0.08799027252197265, 0.08802912139892578, 0.08821746826171875, 0.08806953430175782, 0.08786409759521484, 0.08791561889648437, 0.08796649932861328, 0.08810099029541016, 0.08795721435546874, 0.08797596740722656, 0.08790451049804687, 0.08807154846191406, 0.08809740447998046, 0.08804557037353515, 0.08791964721679688, 0.09018093109130859, 0.08901392364501953, 0.08877830505371094, 0.08805216217041016, 0.08792447662353516, 0.08813593292236328, 0.08799398040771485, 0.09801696014404297, 0.08899788665771484, 0.08847388458251954, 0.08827817535400391, 0.08795426940917969, 0.08801074981689454, 0.08801068878173827, 0.08815827178955078, 0.08819302368164063, 0.08797388458251953, 0.08799212646484375, 0.08801615905761719, 0.08795750427246093, 0.088091552734375, 0.08798003387451171, 0.08833225250244141, 0.08842243194580078, 0.08842649841308593, 0.08810086059570313, 0.08801689910888671, 0.0880097885131836, 0.08823027038574219, 0.08825504302978515, 0.0889029769897461, 0.08800531005859374, 0.0882155532836914, 0.08834662628173828, 0.08830723571777344, 0.08816278076171875, 0.08840566253662109, 0.08799657440185547, 0.08818707275390625, 0.08823600006103516, 0.08837328338623048, 0.08833392333984375, 0.08794960021972656, 0.08814195251464843, 0.08808444976806641, 0.08802102661132813, 0.08822169494628906, 0.08821753692626953, 0.08848185729980469, 0.08837721252441406, 0.08779993438720703, 0.08794857788085937, 0.08808866882324219, 0.0879393310546875, 0.088050048828125, 0.08805785369873047, 0.08788377380371094, 0.0879636459350586, 0.0880148468017578, 0.08800665283203125, 0.08804073333740234, 0.08947376251220703, 0.08803123474121094, 0.08820941162109375, 0.08831385803222656, 0.08824217224121093, 0.0882339859008789, 0.08805375671386718, 0.08800870513916016, 0.0881435546875, 0.09969459533691406, 0.08915731048583984, 0.08892777252197266, 0.08792044830322265, 0.087890625, 0.08816246032714843, 0.0879467544555664, 0.08823161315917968, 0.08798896026611328, 0.08784508514404298, 0.08784896087646485, 0.08813155364990234, 0.08799747467041015, 0.08794214630126954, 0.08806159973144531, 0.08804905700683593, 0.08808544158935547, 0.08794316864013672, 0.08797782135009766, 0.08802710723876953, 0.08787139129638671, 0.08916815948486329, 0.08859168243408203, 0.08839852905273438, 0.08810470581054687, 0.08814412689208985, 0.08876441955566407, 0.08801894378662109, 0.08823603057861328, 0.08816230773925782, 0.08837452697753906, 0.08796851348876954, 0.08847071838378906, 0.08796857452392579, 0.08793635559082032, 0.08786192321777343, 0.0879428482055664, 0.0878779525756836, 0.08942591857910157, 0.08801280212402343, 0.08791654205322266, 0.08805990600585938, 0.08805705261230469, 0.0881088638305664, 0.08845001220703125, 0.088493408203125, 0.08817459106445312, 0.0881028823852539, 0.08791519927978515, 0.08804704284667969, 0.08808505249023438, 0.08793907165527344, 0.08792246246337891, 0.08828540802001954, 0.08815216064453125, 0.08793488311767578, 0.08784076690673828, 0.08782227325439453, 0.08814086151123048, 0.08790322875976563, 0.08791827392578125, 0.08807456207275391, 0.08825225830078125, 0.09886016082763673, 0.08898445129394532, 0.08882176208496094, 0.08796979522705078, 0.0880148468017578, 0.08792473602294922, 0.08806387329101563, 0.08789004516601563, 0.08796688079833985, 0.08821155548095704, 0.08804633331298828, 0.08786329650878906, 0.08795120239257813, 0.0881890869140625, 0.09035980987548828, 0.08799231719970703, 0.08806511688232421, 0.08797891235351563, 0.08798118591308594, 0.08940838623046875, 0.08805548858642578, 0.08806636810302734, 0.08804966735839843, 0.0879393310546875, 0.08794290924072265, 0.0880393295288086, 0.08938909149169921, 0.08791455841064454, 0.08790835571289063, 0.08807222747802734, 0.08795238494873046, 0.08795990753173828, 0.08803539276123047, 0.08812310028076172, 0.08795017242431641, 0.08799788665771484, 0.08795193481445313, 0.08804758453369141, 0.08810499572753906, 0.08811315155029296, 0.08795750427246093, 0.08823836517333984, 0.0879858856201172, 0.08794281768798828, 0.08810908508300781, 0.08820358276367188, 0.08816611480712891, 0.08847299194335938, 0.08797273254394532, 0.08805117034912109, 0.08824816131591796, 0.0878985595703125, 0.08799871826171875, 0.08826982116699218, 0.08869987487792969, 0.08868013000488281, 0.0884534683227539, 0.08847599792480469, 0.08844048309326172, 0.0885780487060547, 0.08874918365478515, 0.0890561294555664, 0.08857190704345703, 0.09900569915771484, 0.08929337310791016, 0.0888568344116211, 0.0889806365966797, 0.08854425811767579, 0.08810275268554688, 0.08835686492919922, 0.08874591827392578, 0.08838969421386719, 0.08828886413574219, 0.08870748901367187, 0.08815821075439453, 0.08822374725341797, 0.08821113586425781, 0.08872991943359375, 0.08810678100585938, 0.08815638732910157, 0.08806604766845703, 0.08854259490966797, 0.08850086212158204, 0.088416259765625, 0.08831795501708985, 0.08847142028808594, 0.08836675262451171, 0.08815023803710938, 0.08820912170410156, 0.08862364959716797, 0.08823935699462891, 0.0881091537475586, 0.08829180908203126, 0.08874208068847657, 0.08838963317871094, 0.08847964477539062, 0.08855350494384766, 0.0881786880493164, 0.08818694305419922, 0.08836211395263673, 0.08878083038330079, 0.08807305908203125, 0.08847325134277344, 0.08871971130371094, 0.08804329681396485, 0.08807855987548828, 0.08813922882080077, 0.08843523406982422, 0.08852601623535156, 0.0881011505126953, 0.0880703353881836, 0.088012451171875, 0.08810566711425781, 0.08808412933349609, 0.0881313934326172, 0.08845571136474609, 0.08828316497802734, 0.08821142578125, 0.08813772583007813, 0.08806192016601562, 0.0881500473022461, 0.08879718780517579, 0.08821052551269531, 0.08823081970214844, 0.08854528045654297, 0.08841011047363281, 0.09883261108398438, 0.0894912338256836, 0.08861808013916016, 0.08855644989013672, 0.0881517105102539, 0.08812095642089844, 0.08836067199707032, 0.08851148986816407, 0.08823772430419922, 0.08807049560546874, 0.08815151977539062, 0.08835916900634766, 0.08861456298828126, 0.08837696075439454, 0.08821161651611328, 0.08828912353515625, 0.08855209350585938, 0.08814422607421875, 0.08813158416748047, 0.08824832153320313, 0.08881257629394532, 0.08820835113525391, 0.08813187408447265, 0.08830947113037109, 0.08837529754638672, 0.08881913757324218, 0.08996717071533203, 0.08910189056396485, 0.0884755859375, 0.08816896057128906, 0.08811929321289062, 0.08807014465332032, 0.0883609619140625, 0.08864153289794922, 0.0883773422241211, 0.08832592010498047, 0.08852467346191406, 0.08830397033691406, 0.08814530944824218, 0.0882135009765625, 0.08827065277099609, 0.08919734191894531, 0.09211414337158202, 0.08850825500488281, 0.0882143325805664, 0.08876019287109375, 0.09006460571289063, 0.08829596710205079, 0.08848178863525391, 0.08843468475341797, 0.08847360229492188, 0.08855715179443359, 0.08813814544677734, 0.08814777374267578, 0.08805366516113282, 0.08837948608398437, 0.08869497680664062, 0.08824591827392578, 0.08808857727050781, 0.08837155151367188, 0.08858828735351562, 0.08861270141601563, 0.08877184295654297, 0.09946316528320312, 0.08926544189453126, 0.08855171203613281, 0.08809516906738281, 0.0881124496459961, 0.08849683380126953, 0.08829488372802734, 0.08812172698974609, 0.08809478759765625, 0.08804499053955078, 0.08827766418457031, 0.08805785369873047, 0.08791817474365235, 0.08796611022949219, 0.08814588928222657, 0.08816028594970703, 0.08866611480712891, 0.08811110687255859, 0.0882135009765625, 0.08830156707763671, 0.08838553619384766, 0.08833135986328125, 0.08807100677490234, 0.08919046020507812, 0.08811958312988281, 0.08824598693847656, 0.08851455688476563, 0.08860671997070313, 0.08813875579833984, 0.08804249572753907, 0.08820121765136718, 0.08836649322509765, 0.08849046325683593, 0.08813359832763672, 0.08807849884033203, 0.08812742614746094, 0.0881357421875, 0.0883609619140625, 0.08846540832519531, 0.08834799957275391, 0.08804214477539063, 0.0879573745727539, 0.08823411560058594, 0.0880103988647461, 0.08840620422363281, 0.08814717102050781, 0.08806809234619141, 0.0882380142211914, 0.08798925018310547, 0.08812531280517578, 0.08823616027832032, 0.088342529296875, 0.08846329498291015, 0.0880128631591797, 0.08819292449951172, 0.08793881225585938, 0.08793103790283203, 0.08818502044677734, 0.08843264007568359, 0.08832614135742188, 0.08822374725341797, 0.08821891021728516, 0.08816508483886719, 0.09807472229003907, 0.08929622650146485, 0.08829596710205079, 0.08807234954833984, 0.08784505462646484, 0.08830342102050781, 0.08876850891113282, 0.08826624298095703, 0.0882808609008789, 0.08853167724609375, 0.08862252807617188, 0.08883872222900391, 0.08832367706298828, 0.08842076873779296, 0.0883978271484375, 0.08853708648681641, 0.08811891174316407, 0.08809305572509765, 0.08804557037353515, 0.08832572937011719, 0.08840233612060547, 0.08839695739746094, 0.08811100769042969, 0.08805817413330078, 0.08817673492431641, 0.08840969848632813, 0.088392578125, 0.08822927856445313, 0.08814864349365234, 0.08821715545654298, 0.0887890853881836, 0.08814736175537109, 0.08807929229736328, 0.08816831970214843, 0.08848381042480469, 0.08844713592529296, 0.08819097900390625, 0.0882155532836914, 0.08840099334716797, 0.08848614501953125, 0.08821798706054687, 0.08819644927978515, 0.08832505798339843, 0.08849817657470703, 0.08826675415039062, 0.08831385803222656, 0.08859852600097656, 0.08849549102783204, 0.0883288345336914, 0.08861695861816406, 0.08824147033691407, 0.08805590057373047, 0.08793353271484375, 0.08791811370849609, 0.08808905792236328, 0.08818495941162109, 0.088040771484375, 0.09153798675537109, 0.08810905456542968, 0.08814796447753906, 0.08811519622802734, 0.08781375885009765, 0.08910066986083984, 0.10189625549316406, 0.0934747543334961, 0.08994393920898437, 0.08892633819580079, 0.08890367889404296, 0.09113600158691407, 0.08892620849609376, 0.08890505981445312, 0.08829404449462891, 0.08824422454833984, 0.08838451385498047, 0.08849510192871093, 0.08815821075439453, 0.08811456298828126, 0.08803778839111329, 0.08914323425292969, 0.0882281265258789, 0.09051532745361328, 0.08830131530761719, 0.08836934661865234, 0.08839926147460937, 0.08827327728271485, 0.08808489227294922, 0.08805171203613281, 0.08978604888916016, 0.09270416259765625, 0.09261148834228515, 0.09273324584960937, 0.09320278167724609, 0.09345613098144531, 0.093185791015625, 0.09054585266113281, 0.08830223846435546, 0.0880404510498047, 0.08791142272949219, 0.0882747802734375, 0.08914502716064453, 0.08907209777832031, 0.08852889251708984, 0.08883942413330079, 0.08883277130126953, 0.08844902038574219, 0.0881475830078125, 0.08818441772460937, 0.08869670104980469, 0.08840898895263671, 0.08814796447753906, 0.08820941162109375, 0.08904294586181641, 0.08811449432373047, 0.08798806762695313, 0.08799318695068359, 0.08814387512207031, 0.08786739349365234, 0.08794697570800782, 0.08793440246582031, 0.08799523162841796, 0.08795340728759765, 0.08802480316162109, 0.08808271789550781, 0.08806333160400391, 0.08804828643798829, 0.08839987182617187, 0.09922182464599609, 0.08900572967529297, 0.088314208984375, 0.08842396545410157, 0.08829942321777344, 0.08825504302978515, 0.08802713775634766, 0.08805712127685547, 0.08820195007324219, 0.08843414306640625, 0.08847209930419922, 0.08834867095947266, 0.08835276794433594, 0.088468994140625, 0.08809728240966797, 0.08804557037353515, 0.08800892639160156, 0.08805705261230469, 0.0879744644165039, 0.08801280212402343, 0.08793612670898437, 0.08806079864501953, 0.08796979522705078, 0.08800870513916016, 0.0880373764038086, 0.08813689422607422, 0.08794156646728515, 0.0908903045654297, 0.08813734436035156, 0.08838009643554688, 0.08826614379882812, 0.08821577453613282, 0.08800294494628906, 0.08809062194824219, 0.087967041015625, 0.08796640014648438, 0.08798617553710937, 0.08814096069335937, 0.0880578842163086, 0.08798207855224609, 0.08792556762695312, 0.08794111633300782, 0.08812134552001953, 0.08792063903808593, 0.08801254272460937, 0.08803763580322266, 0.08794931030273437, 0.08803533172607422, 0.08804889678955079, 0.08796784210205078, 0.08791085052490234, 0.0878983383178711, 0.08816009521484375, 0.08796482849121094, 0.08798303985595703, 0.08799852752685547, 0.08802304077148437, 0.08834172821044922, 0.08818287658691407, 0.08840048217773437, 0.08839791870117188, 0.08807584381103516, 0.08810070037841797]",tokens/s,11.29861038447342,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,820.768768,4724.752384,0.0,4322.230272,4218.036736,s,1,14.017095703125,14.017095703125,0.0,14.017095703125,14.017095703125,14.017095703125,14.017095703125,[14.017095703125],,kWh,0.00020098744131669263,2.215890224457214e-05,6.397227340000644e-05,0.0002871186169612712,,MB,1341.804544,5379.063808,0.0,4963.958784,4656.87808,s,10,9.599362548828124,0.9599362548828123,0.004207139081396536,0.960638916015625,0.9632162780761719,0.9642642364501953,0.965102603149414,"[0.9493804931640625, 0.9567501831054688, 0.9587097778320313, 0.9598460083007813, 0.959911376953125, 0.962509521484375, 0.9625931396484375, 0.961366455078125, 0.9629833984375, 0.9653121948242187]",tokens/s,266.6843748195052,kWh,2.8003677736362053e-05,3.088335264013631e-06,1.8523575424909828e-05,4.961558842528552e-05,tokens/kWh,5159668.727611726,MB,1379.155968,5381.16096,0.0,4963.958784,4656.88064,s,10,44.98382958984375,4.498382958984375,0.003254590900469086,4.49684375,4.504073779296875,4.504460717773437,4.5047702685546875,"[4.4967802734375, 4.4960576171875, 4.49970751953125, 4.4956572265625, 4.49520947265625, 4.49658837890625, 4.50398779296875, 4.50484765625, 4.4969072265625, 4.49808642578125]",tokens/s,14.005032602698607,kWh,0.00013180309437114043,1.4538250963904266e-05,8.730409257048944e-05,0.0002336454379055342,tokens/kWh,269639.3328487402,,s,630,44.981566864013686,0.07139931248256139,0.0013202360902917706,0.07120065689086913,0.07150878753662109,0.07178218078613281,0.08133764152526855,"[0.08207974243164062, 0.0723947525024414, 0.07184178924560547, 0.07141785430908203, 0.07087718200683593, 0.07083417510986328, 0.07090790557861328, 0.07100006103515626, 0.07090585327148438, 0.07086489868164063, 0.07095500946044922, 0.07090995025634765, 0.07091776275634766, 0.07092876434326172, 0.07113481903076171, 0.07095110321044922, 0.07097776031494141, 0.07101644897460938, 0.07102019500732422, 0.07100844573974609, 0.07103202819824218, 0.07103788757324218, 0.07109017944335938, 0.0710307846069336, 0.07107379150390625, 0.07103488159179687, 0.0710635528564453, 0.07164723205566406, 0.07111885070800782, 0.07122329711914062, 0.07113113403320312, 0.07105926513671874, 0.07127468872070312, 0.07123967742919922, 0.07117596435546875, 0.07118051147460938, 0.07120182037353516, 0.07128348541259766, 0.07129106903076173, 0.07126972961425782, 0.0711973419189453, 0.07125987243652344, 0.07119462585449218, 0.07131731414794922, 0.07127056121826172, 0.07120313262939452, 0.07123353576660156, 0.07116287994384765, 0.07125708770751953, 0.0712882843017578, 0.07123932647705078, 0.07123583984375, 0.0712853775024414, 0.0713154525756836, 0.0712943344116211, 0.07126694488525391, 0.07128268432617188, 0.07131954956054687, 0.07142195129394531, 0.07170662689208984, 0.07142310333251953, 0.07145523071289063, 0.07150220489501953, 0.08132959747314453, 0.07258179473876954, 0.07189504241943359, 0.07151618957519532, 0.0709672622680664, 0.07090995025634765, 0.0708667221069336, 0.07089737701416016, 0.07098931121826171, 0.07093740844726562, 0.0709850845336914, 0.07097756958007813, 0.07101324462890625, 0.07094649505615234, 0.07094703674316406, 0.0709111328125, 0.07096173095703125, 0.07096963500976562, 0.07096115112304688, 0.07097113800048828, 0.07099622344970703, 0.07107379150390625, 0.07101849365234375, 0.07100006103515626, 0.0710423355102539, 0.07107862091064453, 0.07104857635498046, 0.07106559753417968, 0.07117871856689453, 0.07110262298583984, 0.07107321929931641, 0.071002685546875, 0.0711720962524414, 0.07116799926757812, 0.0710983657836914, 0.07114112091064453, 0.07123776245117187, 0.07122303771972656, 0.07125385284423828, 0.07115433502197266, 0.07113100433349609, 0.07119225311279297, 0.07127276611328125, 0.071193603515625, 0.071193603515625, 0.07123136138916016, 0.07125004577636719, 0.07130429077148437, 0.07128975677490235, 0.07137484741210938, 0.07125389099121093, 0.07123980712890625, 0.07135846710205078, 0.07146060943603516, 0.07143456268310547, 0.07147513580322265, 0.07146611022949219, 0.07143309020996094, 0.07144652557373046, 0.07152025604248047, 0.07155830383300782, 0.0715535659790039, 0.07152057647705078, 0.08234060668945313, 0.07242076873779296, 0.07186287689208984, 0.07147721862792969, 0.07107308959960937, 0.07102947235107422, 0.07125196838378907, 0.07102486419677734, 0.07113091278076172, 0.07118233489990235, 0.07114921569824219, 0.07108029174804688, 0.07128473663330079, 0.07168131256103516, 0.0715472640991211, 0.07118819427490235, 0.07116454315185547, 0.0710979232788086, 0.07112076568603516, 0.07135494232177735, 0.07113849639892578, 0.07111353302001953, 0.07111475372314453, 0.07105270385742188, 0.07116028594970703, 0.07113849639892578, 0.07109728240966796, 0.0711671371459961, 0.07119139099121094, 0.07115078735351563, 0.07112764739990235, 0.07106079864501953, 0.07112796783447266, 0.07117823791503906, 0.07113318634033203, 0.07110617828369141, 0.0711497573852539, 0.07115805053710937, 0.07121295928955078, 0.07113932800292969, 0.07112477111816407, 0.07119894409179688, 0.0712437744140625, 0.07126345825195313, 0.07122614288330079, 0.07119174194335938, 0.07119660949707031, 0.07131430053710938, 0.07134207916259766, 0.07136870574951172, 0.07132067108154297, 0.07128361511230469, 0.07122284698486328, 0.07130732727050781, 0.07129452514648438, 0.07124870300292968, 0.07120800018310547, 0.07128793334960938, 0.0712681884765625, 0.07137862396240234, 0.07133155059814453, 0.07134799957275391, 0.07133673858642578, 0.08090726470947265, 0.07245516967773437, 0.07183152008056641, 0.07141993713378907, 0.0708439712524414, 0.07097500610351562, 0.07100713348388672, 0.07089974212646484, 0.07096038055419922, 0.070978271484375, 0.07096934509277343, 0.0708853759765625, 0.0709339828491211, 0.07097126770019531, 0.07101302337646484, 0.0709054412841797, 0.07100787353515625, 0.07105187225341797, 0.07104275512695313, 0.07105750274658203, 0.07105372619628907, 0.07106559753417968, 0.07115122985839843, 0.07097996520996094, 0.07097151947021485, 0.07107577514648437, 0.07110140991210938, 0.07105548858642578, 0.07104393768310546, 0.07112825775146485, 0.07113533020019532, 0.07105155181884766, 0.07109062194824219, 0.07113254547119141, 0.07119667053222656, 0.07118502044677734, 0.07112006378173828, 0.0711258544921875, 0.0711495361328125, 0.07119404602050781, 0.07119725036621094, 0.07118643188476563, 0.071235107421875, 0.07125011444091797, 0.07127222442626953, 0.07136466979980469, 0.07132383728027344, 0.0712738265991211, 0.07126230621337891, 0.07132243347167969, 0.0713154525756836, 0.07131747436523438, 0.07128463745117188, 0.07142412567138672, 0.07140780639648438, 0.07131855773925781, 0.07144732666015625, 0.07158534240722657, 0.07145622253417969, 0.07146192169189453, 0.07156729888916015, 0.0715508804321289, 0.07147325134277344, 0.08159056091308593, 0.07220995330810547, 0.07162918090820312, 0.07129251098632812, 0.07083494567871093, 0.0709095687866211, 0.07110489654541016, 0.0709959716796875, 0.07094271850585937, 0.07100994873046874, 0.07101644897460938, 0.07091849517822266, 0.07092018890380859, 0.07119257354736327, 0.07104630279541016, 0.0709268798828125, 0.07099014282226562, 0.07106764984130859, 0.07106764984130859, 0.07096524810791016, 0.07095484924316406, 0.07104118347167969, 0.07105126190185547, 0.07100985717773438, 0.07100633239746093, 0.07107411193847656, 0.07111065673828125, 0.07111698913574219, 0.07113072204589843, 0.07112633514404297, 0.07116051483154297, 0.07112111663818359, 0.07105439758300781, 0.0710552978515625, 0.07120764923095703, 0.07121334075927735, 0.07133961486816406, 0.071248291015625, 0.07128441619873047, 0.07120223999023438, 0.07125491333007812, 0.07132096099853516, 0.07121984100341797, 0.0712127685546875, 0.07124559783935547, 0.07126850891113282, 0.07125135803222657, 0.07117046356201172, 0.07122300720214844, 0.07120365142822266, 0.07126198577880859, 0.07132367706298828, 0.0712799072265625, 0.07124060821533203, 0.07127859497070313, 0.07134207916259766, 0.07146288299560546, 0.07144236755371093, 0.0714297637939453, 0.0713814697265625, 0.07132281494140626, 0.07135699462890625, 0.07136691284179687, 0.08126614379882813, 0.07237100982666016, 0.07177420806884766, 0.07147901153564454, 0.07093820953369141, 0.07087174224853515, 0.070998046875, 0.07095702362060546, 0.07098892974853516, 0.07099839782714844, 0.07094937896728516, 0.07099334716796875, 0.07097756958007813, 0.07094105529785157, 0.07096720123291016, 0.07099398040771485, 0.07103711700439454, 0.07095295715332031, 0.07098572540283203, 0.07108198547363281, 0.07107174682617187, 0.07170387268066407, 0.07097580718994141, 0.071162109375, 0.0711251220703125, 0.07110246276855468, 0.07105299377441407, 0.0710854721069336, 0.07113616180419922, 0.07129702758789062, 0.07121305847167969, 0.07104716491699219, 0.07114134216308594, 0.07119465637207031, 0.07114342498779297, 0.07117391967773437, 0.07105884552001954, 0.0711561279296875, 0.07126876831054688, 0.07150387573242188, 0.07131110382080078, 0.07119283294677735, 0.07120054626464843, 0.07121942138671875, 0.07127654266357422, 0.07130239868164062, 0.0711956787109375, 0.07115142059326172, 0.07122720336914062, 0.07131964874267578, 0.07134207916259766, 0.071228515625, 0.07127753448486328, 0.07129081726074218, 0.07135337829589844, 0.07134307098388672, 0.0713359375, 0.07139657592773438, 0.07136540985107422, 0.07139737701416016, 0.07143628692626953, 0.07150796508789062, 0.07156326293945313, 0.08143714904785156, 0.07251763153076173, 0.07190860748291016, 0.07154150390625, 0.07095884704589844, 0.07099622344970703, 0.07095846557617187, 0.07097357177734374, 0.07109069061279297, 0.07097958374023437, 0.07102259063720703, 0.07111270141601563, 0.07110038757324219, 0.07106531524658204, 0.07108230590820312, 0.07108185577392578, 0.07111427307128906, 0.07151676940917968, 0.07146495819091797, 0.07114752197265625, 0.07110352325439454, 0.07122198486328125, 0.07119414520263671, 0.07134671783447266, 0.07116790771484376, 0.07126044464111328, 0.07125811004638671, 0.07124787139892579, 0.07124166107177735, 0.07119888305664063, 0.07123139190673829, 0.07118233489990235, 0.0712449951171875, 0.07130604553222657, 0.07127142333984375, 0.07127756500244141, 0.07126016235351562, 0.07135151672363281, 0.0713506851196289, 0.07128694152832031, 0.07128905487060547, 0.07140493011474609, 0.0713601303100586, 0.07147993469238281, 0.07136089324951173, 0.07138098907470704, 0.0713338851928711, 0.07126585388183594, 0.071416259765625, 0.07142326354980469, 0.07133430480957031, 0.07191484832763671, 0.07200662231445312, 0.07158902740478515, 0.07141395568847657, 0.07146963500976562, 0.07141295623779297, 0.07162764739990235, 0.07146701049804688, 0.07150521850585938, 0.07140563201904297, 0.07140006256103516, 0.07142530822753906, 0.08148569488525391, 0.07252992248535156, 0.07182950592041015, 0.07146867370605468, 0.07093820953369141, 0.07091673278808594, 0.07089532470703125, 0.07092848205566406, 0.07153289794921874, 0.07097344207763671, 0.0709664306640625, 0.07092278289794922, 0.0709942398071289, 0.07099187469482422, 0.07099507141113282, 0.07106790161132813, 0.07109081268310546, 0.07114316558837891, 0.07107609558105468, 0.07095088195800782, 0.07106768035888672, 0.07110246276855468, 0.07114342498779297, 0.07101805114746093, 0.07104316711425782, 0.07112329864501953, 0.07115366363525391, 0.07122287750244141, 0.07121324920654297, 0.07122557067871094, 0.0712806396484375, 0.07115366363525391, 0.07118029022216797, 0.07117814636230468, 0.07117814636230468, 0.07127420806884766, 0.0712422103881836, 0.07132982635498047, 0.07126579284667969, 0.07126659393310547, 0.07131884765625, 0.07138150024414062, 0.07132787322998047, 0.07129094696044921, 0.07134806060791016, 0.07137315368652344, 0.0714260482788086, 0.07140755462646485, 0.07143840026855469, 0.07138409423828125, 0.07139631652832032, 0.07144191741943359, 0.07138739013671876, 0.07187481689453125, 0.074884765625, 0.07146121978759766, 0.07143833923339844, 0.07166726684570313, 0.07152480316162109, 0.07150387573242188, 0.0715379867553711, 0.07173804473876953, 0.07169020843505859, 0.0815770263671875, 0.07246745300292969, 0.07186358642578125, 0.07142060852050781, 0.07087638092041015, 0.0709415054321289, 0.07088742065429687, 0.07088333129882812, 0.07111452484130859, 0.07099209594726562, 0.07110041809082031, 0.07114752197265625, 0.07120076751708984, 0.07114752197265625, 0.07099183654785156, 0.07094700622558593, 0.0710203857421875, 0.07103465270996094, 0.07095945739746094, 0.07092422485351563, 0.07111017608642578, 0.07109270477294923, 0.07104300689697265, 0.07097055816650391, 0.07101318359375, 0.07114093017578126, 0.07111430358886718, 0.07105235290527344, 0.07112655639648438, 0.07117158508300782, 0.07116060638427735, 0.07107379150390625, 0.07109974670410156, 0.07111468505859375, 0.071172607421875, 0.07116966247558594, 0.07114307403564453, 0.07115872192382812, 0.07120207977294922, 0.07123193359375, 0.07126659393310547, 0.07115891265869141, 0.07122418975830078, 0.07126834869384766, 0.07126834869384766, 0.07128268432617188, 0.07131340789794922, 0.07117823791503906, 0.07126998138427734, 0.07134841918945313, 0.0712962875366211, 0.07128774261474609, 0.07123734283447265, 0.07131574249267578, 0.07135001373291015, 0.07146316528320312, 0.07139532470703125, 0.07137484741210938, 0.07140726470947266, 0.07139593505859375, 0.07157907104492188, 0.0715840606689453, 0.07155481719970704, 0.08134092712402344, 0.07245193481445313, 0.07178870391845703, 0.07153590393066406, 0.07091506958007812, 0.07092809295654297, 0.07100339508056641, 0.07101478576660156, 0.07097513580322265, 0.0709908447265625, 0.07107328033447266, 0.0710389404296875, 0.07103683471679688, 0.07108029174804688, 0.0712437744140625, 0.07105481719970703, 0.07101084899902343, 0.0709939193725586, 0.07109222412109376, 0.07105865478515624, 0.07110095977783203, 0.07152451324462891, 0.07111481475830078, 0.07110476684570312, 0.07114729309082031, 0.07110655975341797, 0.07104463958740234, 0.07112445068359376, 0.07123046112060546, 0.07116550445556641, 0.07111833953857422, 0.07105375671386718, 0.07114393615722656, 0.0712273941040039, 0.07111004638671875, 0.07117855834960937, 0.07123190307617187, 0.07130406188964844, 0.0712671661376953, 0.07117430114746094, 0.07120896148681641, 0.07120800018310547, 0.07122966766357422, 0.07123426818847656, 0.07146495819091797, 0.07135382080078125, 0.07124559783935547, 0.07129488372802735, 0.0712896957397461, 0.07136434936523438, 0.0713229751586914, 0.07138601684570313, 0.0713700180053711, 0.07134076690673828, 0.07133206176757813, 0.07134941101074219, 0.07136700439453125, 0.07141165161132812, 0.07133609771728516, 0.07134595489501953, 0.07137865447998047, 0.0714793930053711, 0.0714593276977539]",tokens/s,14.005737103480381,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,821.395456,1350.434816,0.0,968.884224,920.384512,s,1,8.726904296875,8.726904296875,0.0,8.726904296875,8.726904296875,8.726904296875,8.726904296875,[8.726904296875],,kWh,5.176226294170192e-05,5.702535817235163e-06,1.72400137920109e-05,7.470481255094798e-05,,MB,1287.241728,1568.538624,0.0,1153.4336,1097.719808,s,10,1.8651453094482422,0.18651453094482423,0.0004514559277895472,0.186549072265625,0.18708944396972654,0.18711813201904295,0.1871410824584961,"[0.18646949768066406, 0.18641862487792968, 0.18621066284179688, 0.18662864685058594, 0.18669874572753906, 0.18708306884765624, 0.185677978515625, 0.18714682006835937, 0.18593603515625, 0.18687522888183594]",tokens/s,1372.5472149713169,kWh,5.630550226202156e-06,6.209459106712036e-07,3.751012616192567e-06,1.0002508753065927e-05,tokens/kWh,25593579.20297065,MB,1320.157184,1572.732928,0.0,1155.530752,1097.722368,s,10,19.33783679199219,1.933783679199219,0.013552359617725022,1.9375467529296875,1.9477151123046876,1.95120283203125,1.9539930078125,"[1.9376134033203125, 1.924672119140625, 1.919724853515625, 1.9188377685546876, 1.911187744140625, 1.9427630615234375, 1.9439271240234375, 1.9546905517578126, 1.9374801025390624, 1.9469400634765626]",tokens/s,32.578618114146224,kWh,5.641915490963031e-05,6.222873935058748e-06,2.8803513427406305e-05,9.144554227209538e-05,tokens/kWh,688934.6209194547,,s,630,19.335078693389914,0.030690601100618876,0.0004921357347092788,0.030652815818786622,0.031200690841674806,0.03141890420913696,0.032633227539062504,"[0.031379583358764646, 0.03094937515258789, 0.030713855743408205, 0.030681312561035155, 0.030423904418945314, 0.030802879333496094, 0.03079910469055176, 0.030728063583374023, 0.03062281608581543, 0.03048214340209961, 0.03039788818359375, 0.03031475257873535, 0.030439807891845704, 0.030424543380737305, 0.030378528594970703, 0.030304256439208983, 0.03021824073791504, 0.030345216751098632, 0.0307993278503418, 0.030843135833740234, 0.0309671688079834, 0.03099068832397461, 0.031214143753051756, 0.031096960067749025, 0.031110464096069337, 0.03133472061157227, 0.031027456283569337, 0.030696544647216797, 0.030871519088745115, 0.031075263977050783, 0.031004127502441407, 0.030722591400146486, 0.03084492874145508, 0.03056585693359375, 0.030919200897216798, 0.03122790336608887, 0.03134169578552246, 0.031031200408935547, 0.03107734489440918, 0.03120915222167969, 0.031223487854003907, 0.030999168395996094, 0.030975296020507814, 0.03448105621337891, 0.03090380859375, 0.030679168701171874, 0.030730112075805664, 0.030370080947875976, 0.03034726333618164, 0.03016851234436035, 0.030140735626220702, 0.030324031829833984, 0.03055695915222168, 0.03032636833190918, 0.030241567611694335, 0.030230016708374025, 0.030152416229248045, 0.030171167373657225, 0.030699520111083983, 0.030323232650756836, 0.030334880828857422, 0.03036169624328613, 0.030259456634521485, 0.03063408088684082, 0.03034511947631836, 0.030441471099853516, 0.030285856246948243, 0.030037567138671874, 0.030176959991455077, 0.030206687927246095, 0.02998886489868164, 0.03003932762145996, 0.03005414390563965, 0.030448192596435546, 0.030342784881591797, 0.030345951080322266, 0.030431295394897463, 0.03031475257873535, 0.03023232078552246, 0.030744575500488282, 0.031074304580688477, 0.030424863815307616, 0.03085510444641113, 0.030734527587890626, 0.03077654457092285, 0.03141926383972168, 0.031180639266967774, 0.031156383514404296, 0.030756864547729492, 0.030615264892578126, 0.030168895721435548, 0.03049443244934082, 0.03029631996154785, 0.03024278450012207, 0.03024745559692383, 0.030314336776733397, 0.030214303970336913, 0.030203903198242187, 0.030340543746948244, 0.030282304763793944, 0.030338560104370117, 0.030272064208984376, 0.03034864044189453, 0.03026585578918457, 0.030260608673095702, 0.03026812744140625, 0.030383136749267577, 0.030212831497192384, 0.030248319625854492, 0.03018841552734375, 0.030283775329589844, 0.030279136657714843, 0.03067958450317383, 0.030525440216064452, 0.03055820846557617, 0.030871551513671876, 0.031197183609008788, 0.03126041603088379, 0.03140012741088867, 0.031102336883544923, 0.03148255920410156, 0.03146124839782715, 0.031055456161499025, 0.03119977569580078, 0.030935039520263673, 0.03042483139038086, 0.03125916862487793, 0.030587167739868165, 0.03039232063293457, 0.030242944717407228, 0.030216192245483397, 0.030126047134399415, 0.03025926399230957, 0.03004412841796875, 0.030093311309814453, 0.029995008468627928, 0.03006857681274414, 0.030136480331420898, 0.030523391723632814, 0.030105600357055663, 0.0301711368560791, 0.030109695434570313, 0.030304256439208983, 0.030128095626831056, 0.03057196807861328, 0.03044927978515625, 0.03039126396179199, 0.03017487907409668, 0.030374176025390626, 0.0319322566986084, 0.030636255264282226, 0.0304737606048584, 0.030716608047485352, 0.03034294319152832, 0.03048851203918457, 0.030442848205566406, 0.030212831497192384, 0.030357503890991212, 0.03045984077453613, 0.030316608428955078, 0.030822399139404297, 0.03035273551940918, 0.030761632919311523, 0.030402208328247072, 0.030349184036254882, 0.030488479614257814, 0.030249536514282225, 0.030320640563964843, 0.030312448501586913, 0.030428384780883787, 0.03070572853088379, 0.030903263092041014, 0.030698591232299805, 0.031136415481567384, 0.030715871810913085, 0.03052547264099121, 0.030353023529052735, 0.030794111251831055, 0.030531360626220704, 0.030331104278564454, 0.030276704788208007, 0.030153631210327148, 0.03060326385498047, 0.030785535812377928, 0.030431232452392577, 0.03021004867553711, 0.030231807708740236, 0.03028668785095215, 0.032123905181884765, 0.031130048751831056, 0.03050716781616211, 0.03043280029296875, 0.03029609680175781, 0.03017363166809082, 0.030007295608520508, 0.030184959411621092, 0.030085535049438478, 0.030164608001708983, 0.03113417625427246, 0.030058143615722656, 0.029983072280883788, 0.030046207427978516, 0.030027776718139648, 0.02998601531982422, 0.030540639877319337, 0.030111679077148436, 0.0300382080078125, 0.030085952758789062, 0.03018035125732422, 0.030085119247436523, 0.030138368606567382, 0.030211423873901366, 0.030046016693115234, 0.030016544342041016, 0.030033664703369142, 0.03007494354248047, 0.02995574378967285, 0.030015840530395507, 0.0299804801940918, 0.03247123336791992, 0.030199712753295898, 0.030174367904663085, 0.030276544570922853, 0.030224384307861327, 0.030265344619750976, 0.030305599212646483, 0.03018332862854004, 0.030150848388671873, 0.030167648315429688, 0.03014041519165039, 0.031092575073242187, 0.032049312591552734, 0.03035772705078125, 0.030229312896728516, 0.030419488906860352, 0.03021366310119629, 0.03021843147277832, 0.03083718490600586, 0.030700096130371092, 0.030489408493041992, 0.030372543334960936, 0.030275680541992187, 0.031033439636230467, 0.033351711273193356, 0.03200543975830078, 0.03166278457641602, 0.031065088272094726, 0.03056537628173828, 0.030338272094726563, 0.030363712310791015, 0.030384864807128906, 0.0302325439453125, 0.030421695709228515, 0.03028000068664551, 0.030248096466064453, 0.03024924850463867, 0.029990272521972658, 0.030251840591430663, 0.03037308883666992, 0.03048534393310547, 0.030418912887573243, 0.030367776870727538, 0.030352991104125978, 0.03041321563720703, 0.0306177921295166, 0.03048019218444824, 0.030449663162231445, 0.030664703369140626, 0.03068726348876953, 0.03047817611694336, 0.03038751983642578, 0.030260032653808593, 0.030457855224609375, 0.03072371292114258, 0.030242975234985352, 0.030326175689697265, 0.030222143173217773, 0.030249984741210937, 0.030232576370239257, 0.03018342399597168, 0.030507007598876954, 0.03016841506958008, 0.03014313507080078, 0.03026972770690918, 0.030432992935180665, 0.03047817611694336, 0.030377311706542967, 0.030230688095092773, 0.030102176666259764, 0.030107040405273438, 0.030116447448730467, 0.030439424514770507, 0.03022233581542969, 0.03025833511352539, 0.03067990493774414, 0.030279680252075194, 0.03027574348449707, 0.030340959548950195, 0.030218496322631835, 0.030333791732788086, 0.030395200729370117, 0.03036582374572754, 0.030633535385131836, 0.03027769660949707, 0.030073183059692383, 0.030160959243774415, 0.030597055435180664, 0.030412288665771486, 0.030286336898803713, 0.030184703826904295, 0.030241439819335938, 0.030330976486206054, 0.030177120208740235, 0.030212255477905275, 0.030103551864624024, 0.031564895629882815, 0.031189535140991213, 0.03252467346191406, 0.031213567733764647, 0.03096291160583496, 0.03082524871826172, 0.03055411148071289, 0.030427135467529298, 0.0304552001953125, 0.030552671432495116, 0.030557472229003905, 0.03057711982727051, 0.03042313575744629, 0.030468511581420898, 0.0306376953125, 0.030644351959228516, 0.03073023986816406, 0.03086454391479492, 0.031212383270263672, 0.031495744705200196, 0.03124064064025879, 0.031498239517211916, 0.031178911209106444, 0.031120351791381836, 0.031016895294189453, 0.031002912521362305, 0.03300780868530274, 0.03330915069580078, 0.03100192070007324, 0.030909183502197266, 0.030942848205566406, 0.03077952003479004, 0.03083251190185547, 0.03107574462890625, 0.030855840682983398, 0.030950656890869142, 0.03076198387145996, 0.03072140884399414, 0.030622335433959962, 0.03075584030151367, 0.030810335159301757, 0.030628639221191405, 0.03082854461669922, 0.030619647979736327, 0.030426591873168946, 0.030591520309448242, 0.030555904388427733, 0.030303552627563478, 0.03032713508605957, 0.030567295074462892, 0.030530431747436523, 0.030572576522827147, 0.03050169563293457, 0.03076815986633301, 0.030427488327026367, 0.0302938232421875, 0.030239551544189454, 0.03036489677429199, 0.030336959838867188, 0.030337120056152345, 0.030325504302978517, 0.030332063674926756, 0.030345151901245117, 0.031513023376464847, 0.03129942321777344, 0.0330873908996582, 0.03141846466064453, 0.031599807739257815, 0.03147039985656738, 0.031139776229858397, 0.030799936294555665, 0.031006719589233397, 0.030688512802124025, 0.030874208450317384, 0.030552223205566408, 0.030648319244384766, 0.03072393608093262, 0.030734399795532226, 0.03064431953430176, 0.030666303634643555, 0.03126931190490723, 0.030707103729248047, 0.030614112854003905, 0.030664512634277344, 0.030587072372436522, 0.03055001640319824, 0.030733823776245117, 0.030634496688842775, 0.03087593650817871, 0.031141056060791015, 0.030816511154174806, 0.030720256805419923, 0.030713888168334962, 0.03064451217651367, 0.031100639343261717, 0.030531583786010744, 0.030734336853027344, 0.031145248413085937, 0.03104614448547363, 0.03096169662475586, 0.030884031295776368, 0.031006719589233397, 0.03131747245788574, 0.031117855072021486, 0.031208927154541016, 0.030956064224243164, 0.03056844711303711, 0.030543872833251953, 0.030617855072021485, 0.030526880264282227, 0.030517599105834962, 0.03056435203552246, 0.030627840042114256, 0.0305699520111084, 0.030741024017333984, 0.030668800354003906, 0.030539775848388673, 0.030611328125, 0.03076736068725586, 0.030695295333862303, 0.03059712028503418, 0.030481760025024413, 0.03060153579711914, 0.030531904220581055, 0.030746368408203124, 0.030576480865478515, 0.03069580841064453, 0.031334175109863284, 0.03083888053894043, 0.030959808349609375, 0.031098400115966797, 0.030816320419311524, 0.030767520904541015, 0.030979360580444336, 0.030720640182495117, 0.030645471572875976, 0.030696319580078124, 0.030711807250976563, 0.03073174476623535, 0.03073823928833008, 0.030769472122192384, 0.03084534454345703, 0.030832223892211914, 0.030722463607788086, 0.030736383438110353, 0.030736383438110353, 0.030698623657226563, 0.03079257583618164, 0.030785535812377928, 0.030865407943725585, 0.030980096817016602, 0.031082271575927734, 0.03108412742614746, 0.03134105682373047, 0.031284927368164066, 0.03134207916259766, 0.030997631072998046, 0.030844320297241212, 0.03078175926208496, 0.031364320755004886, 0.031088544845581056, 0.03080019187927246, 0.03081488037109375, 0.031135744094848632, 0.031257823944091795, 0.030804767608642578, 0.030705663681030275, 0.030741888046264647, 0.031114080429077148, 0.03267756652832031, 0.031172704696655274, 0.030744575500488282, 0.0317255687713623, 0.03218025588989258, 0.03367919921875, 0.03115430450439453, 0.030896127700805662, 0.03096540832519531, 0.030865760803222658, 0.03070921516418457, 0.030843423843383788, 0.0307174072265625, 0.030674688339233397, 0.0309399356842041, 0.031148031234741212, 0.03117465591430664, 0.03115443229675293, 0.030916351318359375, 0.031001792907714844, 0.03077350425720215, 0.03114028739929199, 0.03079187202453613, 0.030584640502929687, 0.031340543746948245, 0.03080745506286621, 0.030743135452270507, 0.030569984436035157, 0.03064678382873535, 0.030519327163696288, 0.030588415145874022, 0.03051568031311035, 0.03073023986816406, 0.030698976516723632, 0.030730783462524416, 0.030703615188598633, 0.030620895385742187, 0.030524192810058595, 0.030539775848388673, 0.030507007598876954, 0.030552352905273437, 0.032089569091796874, 0.031608640670776365, 0.03081260871887207, 0.030699520111083983, 0.03075641632080078, 0.030583328247070312, 0.03069532775878906, 0.030888191223144533, 0.030736127853393556, 0.030544992446899413, 0.030710687637329103, 0.03095347213745117, 0.031483680725097656, 0.0313590087890625, 0.03131001663208008, 0.03073023986816406, 0.030819839477539062, 0.030715776443481446, 0.03069196891784668, 0.031055839538574218, 0.03064195251464844, 0.03070748710632324, 0.030624223709106446, 0.030745695114135742, 0.030628768920898438, 0.030660608291625976, 0.030809152603149415, 0.0305284481048584, 0.030694623947143555, 0.03065324783325195, 0.03045987129211426, 0.030451200485229493, 0.030501375198364256, 0.030601247787475586, 0.03065238380432129, 0.03067068862915039, 0.03057174491882324, 0.030668991088867188, 0.03046668815612793, 0.03062915229797363, 0.030511232376098634, 0.03044550323486328, 0.030755231857299805, 0.030816160202026367, 0.03070751953125, 0.03105695915222168, 0.03070332717895508, 0.030674943923950194, 0.030719999313354493, 0.030859264373779297, 0.030728191375732423, 0.03074480056762695, 0.030865184783935545, 0.031170560836791993, 0.031214752197265626, 0.03099545669555664, 0.03081609535217285, 0.03078348731994629, 0.030648319244384766, 0.030782655715942384, 0.0307392635345459, 0.030672607421875, 0.03076134490966797, 0.03090377616882324, 0.030650815963745116, 0.03083375930786133, 0.03080284881591797, 0.03078057670593262, 0.03076940727233887, 0.030765439987182615, 0.030847200393676756, 0.03133750343322754, 0.030930944442749023, 0.030726816177368162, 0.03073366355895996, 0.030847967147827147, 0.03075071907043457, 0.030772703170776367, 0.030773599624633788, 0.030728384017944334, 0.03075600051879883, 0.030829408645629882, 0.0307589111328125, 0.030701087951660155, 0.030724576950073242, 0.03078550338745117, 0.03072982406616211, 0.03079212760925293, 0.030827936172485353, 0.03075337600708008, 0.031053823471069338, 0.031129600524902344, 0.031111167907714843, 0.03162675285339355, 0.031779327392578126, 0.031526912689208986, 0.03148819160461426, 0.031307775497436525, 0.031080223083496093, 0.031037471771240235, 0.030885440826416016, 0.030767551422119142, 0.031246335983276367, 0.030916608810424805, 0.030877695083618165]",tokens/s,32.583265369143746,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,2525.478912,4618.911744,0.0,4223.664128,4030.321664,s,1,12.98856640625,12.98856640625,0.0,12.98856640625,12.98856640625,12.98856640625,12.98856640625,[12.98856640625],,kWh,0.0001635869273792044,1.8036465604369006e-05,5.936421415800308e-05,0.00024098760714157648,,MB,2266.554368,5021.564928,0.0,4613.7344,4385.21344,s,10,4.838052032470704,0.4838052032470704,0.001823344870428049,0.4842698822021484,0.4858554473876953,0.486376789855957,0.4867938638305664,"[0.48689813232421875, 0.4818007507324219, 0.48216598510742187, 0.48156146240234377, 0.48153952026367186, 0.48395285034179686, 0.4857395935058594, 0.4845869140625, 0.4848028259277344, 0.4850039978027344]",tokens/s,529.1385836321101,kWh,1.4523641770832303e-05,1.6012413217024642e-06,9.68122467618851e-06,2.5806107768723277e-05,tokens/kWh,9920132.175463874,MB,2295.25504,5023.66208,0.0,4615.831552,4385.216,s,10,44.64654589843751,4.464654589843751,0.01035591943382472,4.462802734375,4.478136865234375,4.480356762695312,4.482132680664062,"[4.48257666015625, 4.465416015625, 4.467009765625, 4.4586259765625, 4.47295849609375, 4.44545654296875, 4.460189453125, 4.4776435546875, 4.45950244140625, 4.4571669921875]",tokens/s,14.110834048240404,kWh,0.00013087549349458487,1.4436641335989185e-05,7.927101315221339e-05,0.00022458314798278747,tokens/kWh,280519.71203479817,,s,630,44.64342021179197,0.0708625717647492,0.0007126091269215054,0.07069612884521484,0.07140072250366211,0.07209344902038574,0.074042163772583,"[0.07142793273925781, 0.07068627166748047, 0.07074208068847657, 0.07168669128417969, 0.0706246109008789, 0.07109014129638672, 0.0752339859008789, 0.07027507019042968, 0.07079116821289062, 0.07076169586181641, 0.07063565063476562, 0.07032486724853515, 0.07041161346435547, 0.07123628997802735, 0.0705820770263672, 0.07047596740722656, 0.0706984634399414, 0.07108668518066406, 0.07110444641113281, 0.07131459045410156, 0.07075721740722657, 0.07086080169677735, 0.07076019287109375, 0.07072089385986328, 0.07092339324951172, 0.07071241760253906, 0.07120355224609375, 0.07083615875244141, 0.07259750366210938, 0.07069081878662109, 0.07055506896972656, 0.07256326293945313, 0.07136460876464844, 0.070866943359375, 0.07139881896972657, 0.07094313812255859, 0.07096339416503906, 0.07097708892822266, 0.07068252563476562, 0.07061968231201173, 0.07880403137207032, 0.07280287933349609, 0.07096768188476563, 0.07074742126464843, 0.07064217376708984, 0.07061894226074218, 0.07052944183349609, 0.07043891143798828, 0.07056179046630859, 0.07122537231445313, 0.07080143737792968, 0.07058767700195312, 0.0709126739501953, 0.07315894317626953, 0.071231201171875, 0.07085260772705078, 0.07083766174316407, 0.07084095764160156, 0.07102051544189453, 0.07075043487548828, 0.07055648040771484, 0.0705914535522461, 0.0705987548828125, 0.07146530914306641, 0.07075555419921875, 0.07068144226074219, 0.07077279663085938, 0.07155506896972656, 0.07107174682617187, 0.07114752197265625, 0.07154278564453125, 0.0708619842529297, 0.07033344268798829, 0.0704776611328125, 0.07071481323242187, 0.07077740478515625, 0.07086835479736328, 0.07092444610595704, 0.07070563507080078, 0.07065727996826172, 0.07051945495605469, 0.0708109130859375, 0.07114806365966797, 0.07220867156982422, 0.07109356689453125, 0.07085126495361328, 0.0708505630493164, 0.07080941009521484, 0.07172223663330078, 0.0710973129272461, 0.07069283294677735, 0.07064780426025391, 0.07106124877929687, 0.07033452606201172, 0.07056329345703125, 0.07033277130126953, 0.07063088226318359, 0.07098655700683594, 0.07056092834472656, 0.07041734313964844, 0.07046710205078124, 0.07069487762451172, 0.07391693115234375, 0.07084416198730469, 0.07051494598388672, 0.07085244750976563, 0.07066336059570312, 0.07011631774902344, 0.07033184051513672, 0.07031561279296875, 0.07028630065917969, 0.07051673889160157, 0.0705692138671875, 0.07028607940673828, 0.07030374145507813, 0.07029759979248047, 0.0704368667602539, 0.07041228485107422, 0.07086080169677735, 0.0708853759765625, 0.0708016357421875, 0.07030691528320313, 0.07021762847900391, 0.07097142028808594, 0.07409331512451171, 0.07249305725097656, 0.07143424224853516, 0.07074610900878907, 0.07087049865722657, 0.07055209350585938, 0.07052214050292968, 0.07068959808349609, 0.0706869125366211, 0.07087484741210938, 0.07137184143066407, 0.0708371810913086, 0.07038288116455078, 0.07062995147705078, 0.07150406646728516, 0.07058739471435548, 0.07209193420410157, 0.07091065979003906, 0.07134815979003906, 0.07040582275390625, 0.07039955139160156, 0.07054589080810547, 0.07055753326416016, 0.07064832305908203, 0.07089971160888672, 0.07028096008300781, 0.07053670501708985, 0.07074073791503906, 0.0708116455078125, 0.07074179077148438, 0.07035107421875, 0.07220223999023437, 0.0708136978149414, 0.07097548675537109, 0.07091404724121093, 0.0726178207397461, 0.07068073272705078, 0.07080095672607421, 0.07090774536132813, 0.07076310729980469, 0.07130726623535157, 0.07053311920166015, 0.07114777374267578, 0.07155072021484375, 0.07188668823242188, 0.07122102355957032, 0.07095948791503906, 0.07075635528564453, 0.07047372436523437, 0.07044873809814453, 0.07059292602539062, 0.0711905288696289, 0.07155046081542969, 0.07074428558349609, 0.07069725036621094, 0.07039081573486328, 0.07107683563232423, 0.07158112335205079, 0.07081017303466797, 0.0704901123046875, 0.07084646606445312, 0.0709959716796875, 0.07096524810791016, 0.07038982391357422, 0.07045113372802735, 0.07112054443359375, 0.07032115173339844, 0.07032627105712891, 0.0706388168334961, 0.07073792266845703, 0.07060275268554687, 0.07030659484863282, 0.0710770263671875, 0.0704623031616211, 0.07123558044433594, 0.07064575958251954, 0.07049420928955077, 0.07053276824951171, 0.07053135681152344, 0.0706785888671875, 0.07062493133544921, 0.07032252502441406, 0.07049436950683594, 0.07055753326416016, 0.0704040298461914, 0.07036524963378907, 0.07252582550048828, 0.07120706939697266, 0.07076592254638672, 0.07050495910644532, 0.07052288055419922, 0.0708152618408203, 0.0709513931274414, 0.07067852783203125, 0.07056787109375, 0.07051884460449219, 0.07078912353515625, 0.07112908935546874, 0.07061404418945312, 0.070600830078125, 0.07135727691650391, 0.07078272247314453, 0.0709145278930664, 0.07082351684570312, 0.07047801971435547, 0.07072940826416016, 0.07049782562255859, 0.07066413116455078, 0.07090873718261718, 0.07086083221435546, 0.070552734375, 0.07077721405029297, 0.07060527801513672, 0.0705249252319336, 0.07071903991699219, 0.07097395324707031, 0.07241091156005859, 0.07059085083007813, 0.07046121978759766, 0.07081910705566406, 0.0711605453491211, 0.07110246276855468, 0.07098178863525391, 0.07060377502441406, 0.0712364501953125, 0.07089084625244141, 0.07063961791992188, 0.07057625579833984, 0.07134169769287109, 0.07097318267822265, 0.07084662628173828, 0.07036799621582031, 0.0706409912109375, 0.07095958709716797, 0.07059571075439453, 0.07065885162353516, 0.07106320190429688, 0.07083830261230468, 0.07051251220703125, 0.0706739501953125, 0.07026166534423828, 0.07069302368164063, 0.07066607666015624, 0.07089356994628906, 0.07053517150878906, 0.07059455871582031, 0.07077683258056641, 0.071005859375, 0.07069452667236328, 0.07059324645996094, 0.07118643188476563, 0.07061238098144532, 0.07102845001220703, 0.07067273712158204, 0.07048451232910156, 0.07065705871582031, 0.07072866821289063, 0.07075750732421875, 0.07068899536132812, 0.07239103698730469, 0.07071769714355469, 0.07071699523925781, 0.07351356506347656, 0.07090089416503906, 0.0708831024169922, 0.07106861114501953, 0.07084207916259766, 0.07068841552734376, 0.07121369934082031, 0.07088925170898437, 0.07464572906494141, 0.07521279907226562, 0.07100550079345704, 0.0706116485595703, 0.07071539306640626, 0.07072096252441407, 0.0705910415649414, 0.07098566436767578, 0.07142940521240235, 0.07209468841552734, 0.07101200103759765, 0.07069500732421875, 0.07025465393066406, 0.07114137268066406, 0.0709591064453125, 0.07066012573242188, 0.07045718383789062, 0.07040217590332032, 0.07075820922851563, 0.0708834228515625, 0.07056966400146485, 0.07142809295654297, 0.07046546936035156, 0.07078713226318359, 0.07031193542480468, 0.07018291473388671, 0.07042457580566407, 0.07035660552978516, 0.07053526306152344, 0.07038745880126954, 0.07057472229003907, 0.07076649475097656, 0.07033360290527343, 0.07034966278076171, 0.07026592254638672, 0.07045590209960938, 0.07009510040283203, 0.07058441925048828, 0.07030979156494141, 0.07018422698974609, 0.07020626831054687, 0.07141903686523438, 0.07040496063232422, 0.07144019317626953, 0.07343325042724609, 0.07129817962646484, 0.07028419494628907, 0.07038710021972656, 0.0702529296875, 0.07038899230957031, 0.07034764862060547, 0.07023216247558593, 0.07021692657470703, 0.07054006195068359, 0.07048191833496094, 0.0703272933959961, 0.07059353637695312, 0.0702784652709961, 0.07064765167236328, 0.07167062377929688, 0.070723388671875, 0.07070333099365235, 0.07077232360839844, 0.07072377777099609, 0.07058614349365235, 0.07063414764404297, 0.07063836669921875, 0.07028835296630859, 0.07028498840332031, 0.07024793243408203, 0.07062201690673828, 0.07011532592773438, 0.07038140869140624, 0.07075555419921875, 0.07050937652587891, 0.07034687805175781, 0.07022755432128906, 0.07036067199707031, 0.07034349060058594, 0.07044096374511719, 0.0705650863647461, 0.07081858825683594, 0.07020706939697266, 0.07021158599853515, 0.07096236419677734, 0.0704145278930664, 0.0713158721923828, 0.07061129760742188, 0.07017664337158203, 0.07040831756591796, 0.07153024291992187, 0.07133004760742187, 0.07097510528564453, 0.07037289428710937, 0.07066687774658204, 0.07092610931396484, 0.07070870208740235, 0.07027196502685547, 0.07040169525146485, 0.07048636627197266, 0.07035903930664063, 0.07049791717529297, 0.07046086120605469, 0.0705230712890625, 0.07023423767089844, 0.07049696350097656, 0.07059859466552734, 0.07045085144042969, 0.07089353942871093, 0.07105369567871093, 0.07047577667236328, 0.0702586898803711, 0.07043251037597656, 0.0703911361694336, 0.07066242980957031, 0.07089421081542968, 0.07199932861328125, 0.07080131530761719, 0.07094092559814454, 0.07093043518066407, 0.0730808334350586, 0.07124787139892579, 0.07081983947753906, 0.07071129608154297, 0.07064335632324219, 0.07046556854248047, 0.07041804504394532, 0.07070381164550782, 0.07068582153320313, 0.07333567810058594, 0.07108812713623047, 0.07037920379638672, 0.07080172729492187, 0.07075218963623046, 0.07120902252197266, 0.07094477081298828, 0.07118768310546875, 0.07095999908447266, 0.07060057830810547, 0.07037750244140625, 0.07032572937011719, 0.070506591796875, 0.07099231719970703, 0.07102198028564453, 0.07057437133789063, 0.07057395172119141, 0.07054771423339844, 0.07175580596923828, 0.07081254577636718, 0.07087935638427735, 0.07074301147460937, 0.07067945861816406, 0.0707460479736328, 0.07059811401367187, 0.07039475250244141, 0.07159164428710937, 0.07074816131591796, 0.07076777648925782, 0.07099273681640625, 0.07157759857177734, 0.07054541015625, 0.07107164764404297, 0.07052297973632812, 0.07049420928955077, 0.07063756561279297, 0.07048191833496094, 0.07070515441894532, 0.07049954986572265, 0.0703497314453125, 0.07041792297363281, 0.07064614105224609, 0.0743605728149414, 0.07126246643066406, 0.07112713623046875, 0.07099785614013672, 0.07064985656738282, 0.07053932952880859, 0.07277072143554687, 0.07059542083740235, 0.07107750701904297, 0.07070105743408203, 0.07057849884033203, 0.07053523254394531, 0.07065599822998046, 0.07082598114013672, 0.07135542297363281, 0.07117922973632812, 0.07072358703613281, 0.0710257568359375, 0.07160924530029297, 0.07080726623535157, 0.07448194885253906, 0.07059619140625, 0.07071135711669922, 0.07073545837402344, 0.07062937927246093, 0.07088998413085937, 0.07075225830078125, 0.07106735992431641, 0.07153862762451171, 0.07262064361572265, 0.07231488037109375, 0.07083952331542968, 0.07068956756591797, 0.0714734115600586, 0.0712475814819336, 0.07144064331054688, 0.07090889739990235, 0.07123753356933593, 0.07112745666503906, 0.0716693115234375, 0.07091785430908203, 0.0713551025390625, 0.07081484985351562, 0.07135935974121094, 0.07079936218261719, 0.07024639892578124, 0.07066445159912109, 0.0706618881225586, 0.07078688049316406, 0.07049791717529297, 0.07132399749755859, 0.07137423706054688, 0.07059334564208984, 0.07080499267578125, 0.07037776184082031, 0.07055506896972656, 0.07070374298095704, 0.07093673706054687, 0.07076863861083985, 0.07032217407226563, 0.07019945526123048, 0.0703936996459961, 0.07029145812988281, 0.07045132446289062, 0.07063948822021485, 0.07061199951171875, 0.07039228820800782, 0.07030630493164063, 0.0704307861328125, 0.07062448120117187, 0.07021180725097656, 0.07095321655273437, 0.0707499237060547, 0.07210646057128907, 0.07132371520996093, 0.07019036865234375, 0.07308096313476563, 0.07116041564941407, 0.0705249252319336, 0.07141785430908203, 0.07107788848876953, 0.07069200134277344, 0.0709142074584961, 0.0705072021484375, 0.07089065551757813, 0.07252003479003906, 0.07098201751708984, 0.07065312194824219, 0.07062016296386718, 0.0703917465209961, 0.0703543701171875, 0.07040211486816406, 0.07037798309326172, 0.07085670471191406, 0.07096934509277343, 0.07034880065917969, 0.07049945831298828, 0.07056988525390626, 0.07042499542236329, 0.07063021087646484, 0.07052877044677734, 0.07037522888183594, 0.07129193878173828, 0.07080854034423828, 0.07057631683349609, 0.07086214447021484, 0.07057593536376953, 0.0708795166015625, 0.07116841888427734, 0.07114137268066406, 0.07068450927734375, 0.0706786880493164, 0.07074400329589844, 0.07097058868408203, 0.07106598663330078, 0.07151663970947265, 0.07091404724121093, 0.0706447982788086, 0.0706118392944336, 0.0704983673095703, 0.07052288055419922, 0.07061504364013672, 0.0703446044921875, 0.07058236694335937, 0.07039180755615235, 0.07038771057128906, 0.0713084487915039, 0.07264137268066406, 0.0722059555053711, 0.07157183837890625, 0.07116387176513672, 0.07062531280517578, 0.07033241271972657, 0.07024540710449219, 0.07049292755126953, 0.07048623657226563, 0.07038307189941406, 0.07030223846435547, 0.07042412567138671, 0.07016480255126953, 0.07041648101806641, 0.07060688018798827, 0.07093673706054687, 0.0708544921875, 0.07056934356689454, 0.0703348159790039, 0.07058460998535156, 0.07039900970458984, 0.07093756866455078, 0.0707747802734375, 0.07093193817138672, 0.07078092956542968, 0.07055142211914063, 0.070771484375, 0.07063056182861328, 0.0706645736694336, 0.07052323150634765, 0.07096729278564454, 0.0705261459350586, 0.07032710266113282, 0.0704901123046875, 0.07070671844482422, 0.07069318389892579, 0.07048572540283203, 0.07055609893798828]",tokens/s,14.111822011199616,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1102.036992,4937.678848,0.0,4542.431232,4484.571136,s,1,14.752904296875,14.752904296875,0.0,14.752904296875,14.752904296875,14.752904296875,14.752904296875,[14.752904296875],,kWh,0.0002231884544166668,2.4612022554655027e-05,8.0394786538e-05,0.0003281952635093218,,MB,1298.18624,5480.841216,0.0,5073.010688,4884.617216,s,10,10.971497924804687,1.0971497924804687,0.007856330733512247,1.0982994384765625,1.1034736572265624,1.10629560546875,1.1085531640625,"[1.0782109375, 1.0902459716796875, 1.0958052978515624, 1.0969268798828125, 1.0970880126953124, 1.100737548828125, 1.0995108642578124, 1.10100830078125, 1.1028465576171875, 1.1091175537109375]",tokens/s,233.33185837936279,kWh,3.1934041897916597e-05,3.521803565735267e-06,2.1167794712000044e-05,5.66236401756519e-05,tokens/kWh,4521079.874163224,MB,1318.592512,5495.52128,0.0,5087.690752,4884.619776,s,10,49.122622070312495,4.91226220703125,0.020120577195886623,4.919377685546875,4.93226650390625,4.9333185546875,4.9341601953125,"[4.8706455078125, 4.886169921875, 4.8998837890625, 4.907328125, 4.91805517578125, 4.92144091796875, 4.9207001953125, 4.9319951171875, 4.93203271484375, 4.93437060546875]",tokens/s,12.825048286271013,kWh,0.00014475006764749977,1.5967055393106905e-05,9.617016026939989e-05,0.00025688728331000655,tokens/kWh,245243.7473285621,,s,630,49.1183184890747,0.0779655849032932,0.00191787837552134,0.07762366485595704,0.07906547698974609,0.07959547348022461,0.09099831825256348,"[0.08926860809326172, 0.07618364715576172, 0.07637757110595703, 0.0760898895263672, 0.07598899078369141, 0.07650016021728516, 0.0764277114868164, 0.07645017242431641, 0.07608319854736328, 0.07603814697265625, 0.07644694519042969, 0.0776957778930664, 0.0802877426147461, 0.07865948486328125, 0.0773755874633789, 0.07660543823242187, 0.07668531036376953, 0.07667097473144531, 0.07665827178955079, 0.07654236602783203, 0.07594393920898437, 0.07612556457519531, 0.07653011322021484, 0.07711676788330078, 0.07854892730712891, 0.07858882904052734, 0.07787519836425781, 0.07693312072753906, 0.07710047912597656, 0.07736377716064453, 0.07745126342773437, 0.07710227203369141, 0.07757497406005859, 0.07623474884033203, 0.07672831726074218, 0.07711539459228516, 0.0778317108154297, 0.07806761932373046, 0.07797747039794922, 0.07793888092041015, 0.07683942413330078, 0.0768358383178711, 0.07762544250488282, 0.07743926239013672, 0.0769276123046875, 0.07739110565185547, 0.07685401916503906, 0.0763719711303711, 0.07696521759033204, 0.07787382507324218, 0.0776703338623047, 0.07796537780761718, 0.07730518341064453, 0.07733100891113281, 0.07732755279541016, 0.07707536315917969, 0.07677500915527344, 0.0769184341430664, 0.0767004165649414, 0.07674009704589843, 0.07696588897705078, 0.07736511993408203, 0.07774578857421875, 0.09197212982177734, 0.07646002960205078, 0.07630172729492188, 0.07632752227783203, 0.07714816284179687, 0.07644147491455078, 0.07614415740966797, 0.07655689239501953, 0.07641228485107422, 0.07637017822265625, 0.07609334564208985, 0.07731043243408203, 0.08061673736572265, 0.07902486419677734, 0.07734272003173828, 0.07655980682373047, 0.07709126281738281, 0.07649292755126953, 0.07644979095458984, 0.07663206481933593, 0.07655219268798828, 0.07667027282714843, 0.07679849243164062, 0.07713603210449219, 0.07841734313964843, 0.07906899261474609, 0.07840025329589843, 0.07749222564697265, 0.07771929931640625, 0.07771897888183593, 0.07740089416503906, 0.07656038665771485, 0.07656028747558594, 0.07682441711425782, 0.07719347381591797, 0.07731571197509765, 0.07730006408691406, 0.07851990509033203, 0.07791661071777344, 0.07788854217529297, 0.0776402587890625, 0.07729353332519531, 0.07730425262451172, 0.07720294189453125, 0.07689881896972656, 0.07710720062255859, 0.07744822692871094, 0.0770522232055664, 0.07733110046386718, 0.07738105773925781, 0.0776033935546875, 0.07803823852539063, 0.07773670196533203, 0.07800220489501954, 0.07737289428710938, 0.07760457611083985, 0.0771099853515625, 0.07712895965576172, 0.07769379425048828, 0.07776860809326172, 0.07750870513916015, 0.07782195281982422, 0.07843628692626953, 0.092042236328125, 0.07608697509765625, 0.0767286376953125, 0.07707215881347657, 0.07717046356201172, 0.07669190216064453, 0.07661158752441406, 0.07637606048583985, 0.07637811279296874, 0.07637171173095703, 0.07679615783691406, 0.07832780456542969, 0.08089600372314452, 0.0783994903564453, 0.07728073883056641, 0.07711519622802734, 0.07762403106689453, 0.0775125732421875, 0.07758246612548828, 0.07658649444580078, 0.07698246765136718, 0.07689193725585937, 0.07671247863769531, 0.07782195281982422, 0.07968768310546875, 0.07895200347900391, 0.07807743835449218, 0.07727619171142579, 0.07718083190917968, 0.07720719909667968, 0.07717839813232422, 0.07722038269042969, 0.07691497802734375, 0.07660543823242187, 0.07693238067626954, 0.07689830780029297, 0.07791827392578125, 0.07859677124023437, 0.07882956695556641, 0.07828880310058593, 0.07732233428955078, 0.07731779479980469, 0.07743289947509766, 0.07728975677490234, 0.077338623046875, 0.07740006256103515, 0.07728323364257812, 0.07668335723876953, 0.07728128051757813, 0.07766630554199219, 0.07834009552001953, 0.07837625885009766, 0.07833622741699219, 0.07825804901123047, 0.07922748565673828, 0.0785401611328125, 0.07723481750488281, 0.0774655990600586, 0.07801606750488281, 0.07741689300537109, 0.0774483871459961, 0.07741232299804687, 0.07856204986572266, 0.09206169891357421, 0.07652620697021484, 0.07662220764160156, 0.07711743927001953, 0.07656845092773437, 0.07678534698486328, 0.0768802261352539, 0.07632809448242188, 0.07729203033447266, 0.0775766372680664, 0.07745536041259765, 0.0794717788696289, 0.08120406341552734, 0.07950540924072266, 0.07737881469726562, 0.07766502380371093, 0.07713504028320313, 0.07701776123046875, 0.07668508911132813, 0.07671437072753906, 0.07650713348388671, 0.07691852569580078, 0.07714998626708984, 0.07788591766357422, 0.07924486541748046, 0.07934732818603515, 0.07828562927246094, 0.07804041290283203, 0.07734732818603515, 0.07729901123046876, 0.07688278198242188, 0.0772908477783203, 0.0772061767578125, 0.07741795349121093, 0.07681814575195313, 0.07719526672363282, 0.07790815734863281, 0.07861721801757812, 0.07816553497314453, 0.07808048248291016, 0.07791001892089844, 0.07780294036865235, 0.076761474609375, 0.07750879669189453, 0.07747984313964844, 0.07775206756591797, 0.07756835174560547, 0.07798989105224609, 0.07786495971679687, 0.07807180786132813, 0.0781843490600586, 0.07848342132568359, 0.07806578826904297, 0.07846502685546874, 0.07796736145019531, 0.07741622161865234, 0.07772783660888671, 0.07749644470214843, 0.07735091400146485, 0.07727513885498047, 0.07805133056640624, 0.078166015625, 0.07796326446533203, 0.09175663757324219, 0.07707814025878906, 0.07720508575439453, 0.07673117065429688, 0.07681228637695313, 0.07694236755371094, 0.07776764678955078, 0.0769781723022461, 0.07710720062255859, 0.07697974395751953, 0.07688649749755859, 0.07944316864013672, 0.08178562927246094, 0.07993516540527344, 0.07816838073730469, 0.07787696075439453, 0.07652745819091797, 0.07671443176269531, 0.07724345397949219, 0.07688896179199219, 0.07722528076171875, 0.07712025451660157, 0.07678880310058593, 0.0779435806274414, 0.07948041534423828, 0.0797927017211914, 0.07873945617675782, 0.07797760009765625, 0.07792182159423829, 0.07735472106933594, 0.07799996948242187, 0.07688278198242188, 0.07699593353271485, 0.07731887817382813, 0.07758233642578125, 0.07751884460449218, 0.07847321319580078, 0.07891490936279297, 0.07905347442626953, 0.07871878051757812, 0.07794297790527344, 0.07775437164306641, 0.07776380920410156, 0.07752489471435547, 0.07765900421142578, 0.0776817626953125, 0.07773481750488281, 0.07696694183349609, 0.07780976104736329, 0.07812287902832031, 0.07902719879150391, 0.07883366394042969, 0.07807590484619141, 0.07790991973876953, 0.07793465423583984, 0.07771324920654298, 0.07755385589599609, 0.07749385833740234, 0.07769129943847657, 0.0780021743774414, 0.07740825653076172, 0.0783022689819336, 0.07807686614990235, 0.09157756805419921, 0.07674755096435547, 0.07706134033203126, 0.0774254379272461, 0.07752294158935547, 0.0775167999267578, 0.07747993469238282, 0.07675494384765626, 0.07712944030761719, 0.077034912109375, 0.07707491302490234, 0.07888713836669922, 0.08155561828613281, 0.07898934173583984, 0.07811481475830079, 0.07720655822753907, 0.07674562835693359, 0.07738579559326172, 0.077412353515625, 0.0774031982421875, 0.07759548950195312, 0.07727523040771485, 0.0768443832397461, 0.07761920166015625, 0.0790512924194336, 0.0791942367553711, 0.07896028900146485, 0.07807561492919922, 0.07797209930419922, 0.07724441528320312, 0.07771955108642578, 0.07745686340332031, 0.0777072982788086, 0.07750502777099609, 0.07761510467529296, 0.07803084564208984, 0.07868621063232421, 0.07910134124755859, 0.07940771484375, 0.07859200286865234, 0.07772978973388672, 0.07841382598876953, 0.07781990051269531, 0.07744921875, 0.07751475524902343, 0.07738162994384766, 0.07727718353271484, 0.07759200286865234, 0.07782457733154297, 0.07827193450927734, 0.0786396484375, 0.0785117416381836, 0.0781884765625, 0.07800985717773437, 0.07801718139648438, 0.07830764770507813, 0.07728460693359375, 0.07777561950683594, 0.07762329864501953, 0.077795166015625, 0.07847283172607422, 0.0780475845336914, 0.078388671875, 0.09062461090087891, 0.07655619049072265, 0.07697417449951172, 0.07689523315429687, 0.07720857238769531, 0.07717068481445312, 0.0769986572265625, 0.07686895751953125, 0.07673875427246094, 0.07722441864013672, 0.07722598266601563, 0.07866162872314453, 0.081421630859375, 0.0789590072631836, 0.07837926483154296, 0.07691600036621093, 0.07720809936523437, 0.07711561584472656, 0.07709001922607422, 0.07722882843017578, 0.07731401824951172, 0.07707855987548828, 0.07702323150634766, 0.07796883392333985, 0.0794691162109375, 0.07957030487060547, 0.07913475036621094, 0.07806012725830078, 0.07855104064941407, 0.07740825653076172, 0.07765606689453125, 0.07764947509765625, 0.07771590423583985, 0.07726802825927734, 0.07759149169921875, 0.07855478668212891, 0.07839510345458985, 0.07922259521484375, 0.0793004150390625, 0.07834111785888671, 0.07801225280761719, 0.0772460174560547, 0.07767414093017579, 0.07779782104492187, 0.07761309051513672, 0.07767884826660157, 0.0779716796875, 0.07753024291992187, 0.0775604476928711, 0.07833190155029297, 0.07873356628417968, 0.07873741149902344, 0.07877632141113282, 0.07797760009765625, 0.07793869018554687, 0.07817420959472657, 0.07765321350097656, 0.07743363189697265, 0.07776598358154296, 0.07809295654296874, 0.07781990051269531, 0.0782349090576172, 0.0788116455078125, 0.09117900848388671, 0.07717887878417969, 0.07776870727539062, 0.07668688201904297, 0.0770687713623047, 0.07717810821533203, 0.07724518585205079, 0.0772332763671875, 0.07712448120117188, 0.07714611053466797, 0.07718911743164063, 0.07860137939453125, 0.08075312042236328, 0.07922112274169922, 0.07856502532958984, 0.0777977294921875, 0.07735295867919922, 0.07741439819335938, 0.07767596435546875, 0.07731053161621093, 0.07733180999755859, 0.07748265838623047, 0.07766630554199219, 0.07856495666503906, 0.07945369720458985, 0.08012054443359375, 0.08000531005859375, 0.07848908996582031, 0.07777919769287109, 0.0775068130493164, 0.07752499389648437, 0.07764157104492188, 0.07760460662841796, 0.07734518432617188, 0.07810457611083985, 0.07764527893066406, 0.07818886566162109, 0.0790195541381836, 0.07927670288085938, 0.0784444808959961, 0.07830742645263672, 0.07792623901367188, 0.07821920013427734, 0.07787337493896485, 0.07816365051269532, 0.07733280181884766, 0.07772774505615235, 0.0778193588256836, 0.07777677154541016, 0.07863158416748046, 0.07970816040039062, 0.07904051208496093, 0.07840502166748047, 0.07804576110839843, 0.07797721862792968, 0.0774862060546875, 0.07818825531005859, 0.07796988677978516, 0.07776060485839843, 0.07846915435791016, 0.0781061782836914, 0.07818672180175781, 0.07955059051513672, 0.09101660919189453, 0.07695804595947266, 0.07690470123291016, 0.07700431823730469, 0.07696022033691406, 0.07730912017822265, 0.07718370819091797, 0.07712969970703125, 0.07693529510498047, 0.07701203155517578, 0.07717369842529297, 0.07928012847900391, 0.08156972503662109, 0.0796160659790039, 0.07813085174560547, 0.07764342498779297, 0.07760867309570313, 0.07835132598876954, 0.07768019104003906, 0.07756435394287109, 0.07740812683105469, 0.07753126525878906, 0.07769075012207032, 0.07851004791259765, 0.07996022033691406, 0.07982806396484375, 0.07891651153564454, 0.07889715576171875, 0.07735664367675782, 0.07743917083740234, 0.07787542724609375, 0.07728128051757813, 0.07728742218017579, 0.07717462158203126, 0.07823580932617187, 0.07755331420898437, 0.07847357177734375, 0.07914495849609375, 0.07906508636474609, 0.07862857818603515, 0.07816127777099609, 0.07790054321289062, 0.07786653137207031, 0.07795571136474609, 0.07787519836425781, 0.07731814575195313, 0.07716031646728516, 0.0777196502685547, 0.07826214599609375, 0.07891340637207031, 0.07919792175292968, 0.07927865600585937, 0.07844022369384765, 0.07841404724121094, 0.0778239974975586, 0.0783994903564453, 0.07819468688964844, 0.07852403259277344, 0.07787741088867188, 0.07743682861328124, 0.07816633605957031, 0.07863645172119141, 0.07878307342529296, 0.09095353698730468, 0.0773571548461914, 0.07703804779052735, 0.07774944305419922, 0.07724729919433594, 0.07715020751953125, 0.0771973114013672, 0.07709490966796875, 0.07734886169433594, 0.07758544158935547, 0.07741673278808593, 0.0791497573852539, 0.08200364685058593, 0.07928864288330079, 0.07853670501708984, 0.07763353729248047, 0.07752649688720703, 0.07790201568603515, 0.07745366668701172, 0.07719116973876954, 0.07711478424072266, 0.07716844940185547, 0.07740700531005859, 0.0783073272705078, 0.08032665252685547, 0.07952588653564453, 0.07868364715576172, 0.07845734405517578, 0.07784786987304687, 0.07737149047851563, 0.07749858856201172, 0.07753971099853516, 0.07734681701660157, 0.07713177490234376, 0.07712153625488281, 0.07828070068359375, 0.08018329620361328, 0.07970406341552734, 0.07879065704345703, 0.078323486328125, 0.07814575958251953, 0.07799193572998046, 0.07723612976074219, 0.07786016082763672, 0.07831222534179688, 0.0776785888671875, 0.0776325454711914, 0.07782498931884765, 0.07845069122314453, 0.07909580993652343, 0.07940882873535156, 0.07926758575439453, 0.07881171417236328, 0.07850371551513671, 0.07770941162109375, 0.07810265350341797, 0.07804313659667969, 0.07804108428955078, 0.07742668914794922, 0.07783424377441406, 0.07784006500244141, 0.07860665893554687, 0.07916134643554687]",tokens/s,12.826171973703245,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,823.86944,2378.039296,0.0,1975.517184,1956.768256,s,1,10.4465009765625,10.4465009765625,0.0,10.4465009765625,10.4465009765625,10.4465009765625,10.4465009765625,[10.4465009765625],,kWh,9.462073296670798e-05,1.0430128009308533e-05,2.9476690248025905e-05,0.00013452755122404244,,MB,1291.276288,2646.474752,0.0,2231.369728,2178.247168,s,10,3.5103599548339846,0.3510359954833984,0.0011199455340269388,0.35110118103027343,0.3522959869384765,0.3524813674926758,0.3526296719360351,"[0.35030947875976565, 0.35063699340820315, 0.351556640625, 0.35174005126953123, 0.3504044494628906, 0.35157861328125, 0.3522547912597656, 0.3506457214355469, 0.352666748046875, 0.3485664672851562]",tokens/s,729.269941811728,kWh,1.0466947451042568e-05,1.1543087386286975e-06,6.919519424499516e-06,1.854077561417078e-05,tokens/kWh,13807405.112240195,MB,1335.382016,2648.571904,0.0,2231.369728,2178.249728,s,10,34.75648608398438,3.475648608398438,0.033260838297804664,3.4657220458984375,3.5273400878906247,3.5314171142578124,3.5346787353515623,"[3.467604248046875, 3.45198046875, 3.433838134765625, 3.478316162109375, 3.441010498046875, 3.46383984375, 3.52643408203125, 3.535494140625, 3.454895263671875, 3.5030732421875]",tokens/s,18.126113165689123,kWh,0.00010177738346270568,1.1225887339490656e-05,5.272108384350239e-05,0.00016572435464569873,tokens/kWh,380149.3156192244,,s,630,34.75414226150512,0.05516530517699226,0.0009643730224488411,0.05491703987121582,0.056126870346069335,0.056396757507324216,0.05841041473388673,"[0.055810398101806644, 0.055661983489990234, 0.054803295135498045, 0.05468134307861328, 0.054648223876953124, 0.05442438507080078, 0.0547589111328125, 0.05453792190551758, 0.05820604705810547, 0.054899711608886716, 0.05474483108520508, 0.05458124923706055, 0.05451161575317383, 0.05456828689575195, 0.057329345703125, 0.05460271835327148, 0.054570079803466794, 0.05453916931152344, 0.054523902893066405, 0.05517532730102539, 0.05445382308959961, 0.0542815055847168, 0.05527199935913086, 0.059930305480957034, 0.054563583374023436, 0.054550529479980465, 0.05592841720581055, 0.05493183898925781, 0.0547116813659668, 0.05447542572021485, 0.05458262252807617, 0.05483996963500976, 0.054919166564941405, 0.05502908706665039, 0.05486211013793945, 0.05476416015625, 0.05458099365234375, 0.05516479873657226, 0.055011455535888674, 0.05462428665161133, 0.054642654418945315, 0.05442876815795898, 0.054523902893066405, 0.054421600341796876, 0.054682689666748045, 0.054797470092773436, 0.054772319793701174, 0.05508639907836914, 0.05536223983764649, 0.05571404647827148, 0.055944961547851564, 0.056423679351806644, 0.055605377197265625, 0.05510335922241211, 0.054753440856933594, 0.05430668640136719, 0.05441404724121094, 0.05494198226928711, 0.054884063720703126, 0.05464012908935547, 0.054572830200195314, 0.055050975799560545, 0.05523660659790039, 0.05579715347290039, 0.05537007904052734, 0.05449465560913086, 0.05429695892333984, 0.05428403091430664, 0.056064640045166016, 0.055360511779785154, 0.05534835052490234, 0.055097183227539065, 0.055639774322509765, 0.055195934295654295, 0.05478131103515625, 0.05465971374511719, 0.05453619384765625, 0.05487721633911133, 0.05489148712158203, 0.05465459060668945, 0.054617729187011715, 0.054878849029541016, 0.0550417594909668, 0.054407585144042966, 0.054155265808105466, 0.054226142883300785, 0.05471516799926758, 0.05507612609863281, 0.055139328002929686, 0.055178974151611326, 0.057732608795166014, 0.05529359817504883, 0.054647647857666015, 0.05499897766113281, 0.05544271850585938, 0.05499075317382812, 0.05434860610961914, 0.05415737533569336, 0.05418803024291992, 0.054245376586914064, 0.05487424087524414, 0.05438451385498047, 0.054112255096435545, 0.053919200897216794, 0.05433808135986328, 0.054095870971679685, 0.0539422721862793, 0.05410790252685547, 0.054475006103515626, 0.05422284698486328, 0.05398527908325195, 0.05436620712280273, 0.054147071838378906, 0.05461196899414063, 0.05456617736816406, 0.0543804817199707, 0.054148128509521484, 0.05403196716308594, 0.061517982482910155, 0.05469926452636719, 0.054473472595214845, 0.05416755294799805, 0.05457660675048828, 0.054282142639160154, 0.05413724899291992, 0.05435776138305664, 0.056233951568603516, 0.0547558708190918, 0.054574462890625, 0.054368896484375, 0.0545976333618164, 0.054867424011230466, 0.05441177749633789, 0.05439395141601563, 0.05425222396850586, 0.0542476806640625, 0.054247425079345706, 0.054128639221191405, 0.05415935897827148, 0.054122238159179686, 0.05428390502929688, 0.05415388870239258, 0.05425971221923828, 0.05448601531982422, 0.05470665740966797, 0.05472515106201172, 0.05436006546020508, 0.05420028686523438, 0.05455865478515625, 0.05484515380859375, 0.05459804916381836, 0.05421459197998047, 0.05476704025268555, 0.05448278427124023, 0.05506675338745117, 0.05425827026367187, 0.05425116729736328, 0.054298336029052735, 0.05421657562255859, 0.05409868621826172, 0.0542105598449707, 0.054464542388916015, 0.05411980819702149, 0.054319713592529295, 0.05450956726074219, 0.054446239471435544, 0.05454217529296875, 0.0544172477722168, 0.054902942657470706, 0.054441982269287106, 0.05409791946411133, 0.05415318298339844, 0.054102046966552735, 0.05404611206054687, 0.05482704162597656, 0.05402048110961914, 0.053911838531494144, 0.05466716766357422, 0.05734400177001953, 0.05531033706665039, 0.05473007965087891, 0.05426028823852539, 0.05413641738891602, 0.054378849029541015, 0.05550505447387695, 0.05444220733642578, 0.05457827377319336, 0.054358657836914064, 0.05418195343017578, 0.05585663986206055, 0.05519187164306641, 0.05507491302490235, 0.05458486557006836, 0.05438483047485351, 0.05442559814453125, 0.054802719116210936, 0.05491302490234375, 0.0545478401184082, 0.055188095092773434, 0.054766624450683594, 0.05458019256591797, 0.054573055267333984, 0.05465497589111328, 0.055171134948730466, 0.05440505599975586, 0.056779903411865236, 0.05468409729003906, 0.054188480377197264, 0.054171646118164066, 0.054261760711669924, 0.05478313446044922, 0.05433174514770508, 0.054358528137207034, 0.056346622467041016, 0.05920134353637695, 0.05505353546142578, 0.05491523361206055, 0.0549997444152832, 0.054421630859375, 0.05441535949707031, 0.054484992980957034, 0.05437545776367188, 0.054414558410644534, 0.054627361297607424, 0.05454694366455078, 0.05554713439941406, 0.05509817504882813, 0.055092449188232424, 0.05488326263427734, 0.0551649284362793, 0.055003135681152344, 0.05526528167724609, 0.05514057540893555, 0.05579958343505859, 0.05610623931884766, 0.05596031951904297, 0.055502559661865236, 0.05529788970947266, 0.055402942657470707, 0.05505007934570313, 0.054918846130371096, 0.05505174255371094, 0.055532543182373044, 0.055987712860107425, 0.05651302337646484, 0.05617635345458984, 0.05736486434936523, 0.05585398483276367, 0.05595248031616211, 0.05562278366088867, 0.05580265426635742, 0.05650431823730469, 0.05634332656860352, 0.05595331192016602, 0.0562718734741211, 0.05483520126342773, 0.05491097640991211, 0.05563187026977539, 0.055175167083740234, 0.055597057342529295, 0.055046112060546874, 0.054638622283935546, 0.05458099365234375, 0.05474355316162109, 0.05507625579833984, 0.05446672058105469, 0.054406368255615234, 0.05432726287841797, 0.05470499038696289, 0.05507660675048828, 0.05469353485107422, 0.0545533447265625, 0.05455251312255859, 0.054204254150390624, 0.054503135681152344, 0.05481343841552734, 0.05487318420410156, 0.05473961639404297, 0.05477785491943359, 0.05471027374267578, 0.054812671661376954, 0.055572223663330075, 0.05815875244140625, 0.055260990142822264, 0.05454415893554688, 0.05433670425415039, 0.05425075149536133, 0.0541376953125, 0.05420003128051758, 0.05412454223632813, 0.05404646301269531, 0.054015262603759766, 0.05404361724853515, 0.05389744186401367, 0.05425692749023438, 0.054014560699462894, 0.054063232421875, 0.05387446212768555, 0.054251518249511715, 0.0542061767578125, 0.05423542404174805, 0.05444169616699219, 0.05434806442260742, 0.05416755294799805, 0.0541429443359375, 0.0540918083190918, 0.05412192153930664, 0.054043201446533205, 0.05390950393676758, 0.054037696838378904, 0.054047550201416016, 0.05430374526977539, 0.05404687881469727, 0.05431177520751953, 0.05426995086669922, 0.05636870574951172, 0.05472083282470703, 0.05451161575317383, 0.05443926239013672, 0.05492115020751953, 0.05540937423706055, 0.055465984344482425, 0.05548835372924805, 0.0554251823425293, 0.055588863372802735, 0.055432865142822266, 0.055374431610107425, 0.05497625732421875, 0.054855297088623044, 0.05456076812744141, 0.054480575561523435, 0.054721118927001954, 0.054163551330566405, 0.05460540771484375, 0.05434000015258789, 0.05409958267211914, 0.05384230422973633, 0.053956607818603515, 0.053905406951904294, 0.05408143997192383, 0.054617984771728516, 0.05712713623046875, 0.05545779037475586, 0.0557619514465332, 0.055083999633789064, 0.0554508171081543, 0.05550368118286133, 0.05670624160766601, 0.055587646484375, 0.05509529495239258, 0.05487615966796875, 0.055310081481933594, 0.05544905471801758, 0.05486412811279297, 0.055511585235595705, 0.055136257171630856, 0.05610604858398437, 0.05552566528320312, 0.055333377838134766, 0.05545180892944336, 0.05561324691772461, 0.05483055877685547, 0.05438131332397461, 0.05480444717407226, 0.054629566192626954, 0.0547081298828125, 0.054733726501464845, 0.0551280632019043, 0.05445177459716797, 0.054478782653808594, 0.054755104064941405, 0.05457174301147461, 0.05448044967651367, 0.05459603118896485, 0.05443993759155273, 0.05457920074462891, 0.054361598968505856, 0.05438873672485352, 0.05718492889404297, 0.056414081573486326, 0.055818241119384764, 0.056170177459716794, 0.05572572708129883, 0.05648041534423828, 0.055597057342529295, 0.05589177703857422, 0.05624031829833984, 0.05595865631103516, 0.055825279235839846, 0.05578496170043945, 0.055728641510009766, 0.056124576568603514, 0.05598499298095703, 0.055779457092285156, 0.055705249786376955, 0.055598720550537106, 0.055777889251708984, 0.05615411376953125, 0.05613772964477539, 0.056151233673095706, 0.05589385604858398, 0.055766143798828126, 0.05597372817993164, 0.05573222351074219, 0.05558272171020508, 0.0555748176574707, 0.05600390243530273, 0.05564457702636719, 0.05570355224609375, 0.05573017501831055, 0.055656448364257816, 0.05554323196411133, 0.05584339141845703, 0.05615011215209961, 0.055903488159179685, 0.055961345672607424, 0.05559104156494141, 0.05602316665649414, 0.05606256103515625, 0.05611503982543945, 0.05621353530883789, 0.056105152130126956, 0.05987919998168945, 0.055814369201660156, 0.05565766525268555, 0.05580044937133789, 0.055784862518310545, 0.05558713531494141, 0.05561971282958984, 0.05555235290527344, 0.05584016036987305, 0.055851615905761716, 0.056066047668457034, 0.05607628631591797, 0.05594451141357422, 0.05585372924804687, 0.056143009185791015, 0.05593119812011719, 0.05593475341796875, 0.05600950241088867, 0.055818241119384764, 0.05657212829589844, 0.055758689880371096, 0.056047775268554687, 0.05769622421264648, 0.055758880615234374, 0.055629695892333984, 0.05587964630126953, 0.05608464050292969, 0.056123390197753906, 0.05609676742553711, 0.05585919952392578, 0.05584799957275391, 0.05849388885498047, 0.05599641418457031, 0.056221694946289064, 0.05648096084594727, 0.05605868911743164, 0.05651660919189453, 0.05609062576293945, 0.05610905456542969, 0.05616342544555664, 0.0564552001953125, 0.056234878540039064, 0.056872417449951175, 0.05620703887939453, 0.05619184112548828, 0.05607219314575195, 0.05608857727050781, 0.05607014465332031, 0.05624777603149414, 0.05626319885253906, 0.056205310821533204, 0.05598336029052734, 0.05623065567016602, 0.056027137756347656, 0.05611929702758789, 0.05634457778930664, 0.05637558364868164, 0.056532703399658206, 0.05620851135253906, 0.05608310317993164, 0.056125663757324216, 0.05583257675170898, 0.0563056640625, 0.05566873550415039, 0.05610496139526367, 0.055954689025878905, 0.05571660614013672, 0.05571164703369141, 0.05572227096557617, 0.05599212646484375, 0.05580799865722656, 0.0557916145324707, 0.055664703369140624, 0.05585654449462891, 0.055869216918945315, 0.055773727416992186, 0.05597776031494141, 0.05569785690307617, 0.05565030288696289, 0.055812095642089846, 0.05593907165527344, 0.055932926177978515, 0.056477249145507814, 0.05486227035522461, 0.05479219055175781, 0.055857086181640626, 0.054808639526367185, 0.05442278289794922, 0.05473766326904297, 0.05447420883178711, 0.0547836799621582, 0.05476873779296875, 0.05451747131347656, 0.05573020935058594, 0.06404710388183593, 0.055291072845458984, 0.05496915054321289, 0.055366943359375, 0.05478268814086914, 0.05483481597900391, 0.05462259292602539, 0.05446451187133789, 0.05442559814453125, 0.05451683044433594, 0.054397857666015625, 0.05456883239746094, 0.05459366226196289, 0.05432451248168945, 0.05458940887451172, 0.0543397102355957, 0.05418662261962891, 0.05446656036376953, 0.054093406677246096, 0.05423276901245117, 0.054103839874267576, 0.054295616149902345, 0.054866016387939455, 0.05429635238647461, 0.054714271545410156, 0.054265792846679685, 0.05450153732299805, 0.054059009552001956, 0.05468979263305664, 0.054351806640625, 0.05478815841674805, 0.054365184783935545, 0.05447577667236328, 0.054300670623779294, 0.05465481567382813, 0.054177505493164066, 0.054144702911376956, 0.05418051147460937, 0.054806655883789065, 0.05527548980712891, 0.05523660659790039, 0.054838462829589846, 0.055153182983398434, 0.05485420989990234, 0.05523632049560547, 0.05456857681274414, 0.0549312629699707, 0.05490940856933594, 0.054976543426513674, 0.054624607086181644, 0.05468521499633789, 0.056594337463378906, 0.05597372817993164, 0.055872032165527344, 0.055973888397216794, 0.05548204803466797, 0.055450080871582035, 0.05543916702270508, 0.0556544303894043, 0.055417022705078124, 0.055451454162597655, 0.05544073486328125, 0.0557042236328125, 0.05536979293823242, 0.05574444961547852, 0.05611833572387695, 0.055621726989746094, 0.05771913528442383, 0.05583103942871094, 0.05548137664794922, 0.056269790649414064, 0.05585715103149414, 0.05567692947387695, 0.05549641418457031, 0.05545529556274414, 0.05522211074829102, 0.05545868682861328, 0.05589593505859375, 0.055814369201660156, 0.05614992141723633, 0.05576467132568359, 0.05557689666748047, 0.055373825073242185, 0.05624185562133789, 0.05552979278564453, 0.055537662506103515, 0.05550899124145508, 0.05560720062255859, 0.055607391357421876, 0.05580595016479492, 0.056371200561523435, 0.055852832794189455, 0.05588579177856445, 0.05566387176513672, 0.05590534210205078, 0.05552121734619141, 0.05938380813598633, 0.05492508697509765, 0.055029376983642575, 0.054635009765625, 0.05470441436767578, 0.05484883117675781, 0.054852096557617185, 0.05444512176513672, 0.05453120040893555, 0.054484161376953125, 0.054534591674804685, 0.054587104797363284, 0.05470665740966797, 0.05555971145629883, 0.05558425521850586, 0.05556937789916992, 0.05563187026977539, 0.05544345474243164]",tokens/s,18.127335592390942,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,826.150912,8535.277568,0.0,8132.755456,7824.681472,s,1,19.826017578125,19.826017578125,0.0,19.826017578125,19.826017578125,19.826017578125,19.826017578125,[19.826017578125],,kWh,0.000361130653712515,3.982807516743873e-05,0.00011628898192000969,0.0005172477107999634,,MB,1348.780032,9539.813376,0.0,9124.708352,8500.632064,s,10,17.76268151855469,1.776268151855469,0.00756738363442467,1.777406005859375,1.7839992797851563,1.7845426086425782,1.7849772717285157,"[1.75771044921875, 1.7699132080078126, 1.7746553955078126, 1.775306884765625, 1.776722900390625, 1.778089111328125, 1.7788441162109374, 1.7824749755859375, 1.7850859375, 1.7838785400390624]",tokens/s,144.12238362354546,kWh,5.177616916791748e-05,5.710491540403452e-06,3.443152754520062e-05,9.191818825352155e-05,tokens/kWh,2785085.3554023593,MB,1385.603072,9544.00768,0.0,9126.805504,8500.634624,s,10,83.67294433593749,8.36729443359375,0.017416452451085286,8.368037109374999,8.38808916015625,8.388802392578125,8.389372978515624,"[8.3337451171875, 8.34658203125, 8.359544921875, 8.3584775390625, 8.3621474609375, 8.3739267578125, 8.3802880859375, 8.3807861328125, 8.3879306640625, 8.389515625]",tokens/s,7.52931553920967,kWh,0.00024518936014125226,2.704592816775653e-05,0.0001626669912445991,0.00043490227955360794,tokens/kWh,144860.12826758326,,s,630,83.66921893310538,0.13280828402080233,0.0017127467399337456,0.1327750244140625,0.13379580383300782,0.1340837417602539,0.14348165496826173,"[0.14600006103515625, 0.13238919067382812, 0.13113270568847657, 0.13091856384277345, 0.13098822021484374, 0.13086758422851563, 0.13101426696777344, 0.13212300109863281, 0.1325813751220703, 0.13261996459960937, 0.1311845703125, 0.1309740753173828, 0.13109791564941406, 0.1310542755126953, 0.13137727355957032, 0.13263388061523437, 0.132321533203125, 0.13211683654785156, 0.1310187530517578, 0.13125164794921876, 0.1310268096923828, 0.13122962951660155, 0.13342387390136717, 0.133359619140625, 0.13161677551269532, 0.13113536071777343, 0.13198348999023438, 0.1326551055908203, 0.1313953857421875, 0.13138143920898437, 0.13305772399902344, 0.13166822814941406, 0.13114938354492187, 0.13195132446289062, 0.1333660430908203, 0.1330109405517578, 0.13165618896484374, 0.13185807800292967, 0.13316680908203124, 0.13169728088378907, 0.13140538024902343, 0.13239033508300782, 0.13300633239746093, 0.13159951782226562, 0.13236473083496095, 0.13281907653808595, 0.1319929962158203, 0.1329542999267578, 0.13153945922851562, 0.132417724609375, 0.1331056671142578, 0.13205708312988282, 0.13204071044921875, 0.13333059692382812, 0.13224169921875, 0.13313031005859374, 0.13176832580566405, 0.1318825225830078, 0.1334932098388672, 0.13207142639160158, 0.13185842895507813, 0.13364175415039062, 0.13280455017089843, 0.14426316833496095, 0.1322939910888672, 0.13096121215820314, 0.13076966857910155, 0.130912353515625, 0.13088333129882812, 0.13103958129882812, 0.13401692199707033, 0.13274111938476563, 0.1313522491455078, 0.13277215576171875, 0.13139967346191406, 0.1308791961669922, 0.13090640258789063, 0.13149798583984376, 0.1326219787597656, 0.13139593505859376, 0.13319577026367188, 0.1315758056640625, 0.13102435302734375, 0.13271708679199218, 0.13134979248046874, 0.13184608459472658, 0.13273350524902344, 0.13224266052246095, 0.1327382354736328, 0.1313580780029297, 0.13200428771972655, 0.132031494140625, 0.13251890563964844, 0.13229466247558594, 0.13233663940429688, 0.13287718200683593, 0.1322411804199219, 0.13154304504394532, 0.13296669006347656, 0.13308457946777344, 0.13172502136230468, 0.13317625427246094, 0.13239910888671874, 0.13300531005859376, 0.13158399963378906, 0.13254861450195313, 0.13390348815917968, 0.1325344696044922, 0.13208441162109374, 0.1334087677001953, 0.13342924499511719, 0.13187890625, 0.13228851318359375, 0.13359849548339844, 0.13209245300292968, 0.13237062072753905, 0.13320806884765626, 0.132642333984375, 0.13305267333984375, 0.1317069091796875, 0.13382266235351561, 0.13342889404296876, 0.13192636108398437, 0.13280857849121094, 0.13326963806152345, 0.13287753295898438, 0.14439231872558594, 0.13237184143066405, 0.13090179443359376, 0.13085980224609375, 0.13094216918945312, 0.13099504089355468, 0.13152255249023437, 0.13427302551269532, 0.13347021484375, 0.1321306915283203, 0.13214035034179689, 0.13237026977539063, 0.13105661010742187, 0.13107814025878906, 0.13261109924316405, 0.13307594299316405, 0.1323335723876953, 0.1326796875, 0.13334649658203124, 0.13167494201660157, 0.130988037109375, 0.13243801879882813, 0.13315606689453124, 0.1334075164794922, 0.13285536193847655, 0.13237088012695314, 0.13257522583007814, 0.13113075256347656, 0.1312569580078125, 0.13404570007324218, 0.13276364135742189, 0.13279350280761718, 0.13309628295898437, 0.1320202178955078, 0.1327349090576172, 0.13135877990722655, 0.1321197052001953, 0.1335509490966797, 0.13196287536621093, 0.13300735473632813, 0.13301715087890625, 0.1320076141357422, 0.1330445098876953, 0.13183229064941407, 0.13392076110839843, 0.13360870361328125, 0.13199436950683593, 0.1331793975830078, 0.13247007751464843, 0.1328351745605469, 0.13320892333984374, 0.13240066528320313, 0.133013916015625, 0.1316999053955078, 0.13277789306640625, 0.13303292846679687, 0.13225704956054687, 0.1337288360595703, 0.13280467224121092, 0.13333308410644532, 0.1333391418457031, 0.1323865203857422, 0.1334336395263672, 0.14323945617675782, 0.1321676788330078, 0.13075833129882813, 0.13072621154785155, 0.13073356628417968, 0.13073992919921876, 0.132112548828125, 0.13553916931152343, 0.13276512145996094, 0.1316707458496094, 0.1310248260498047, 0.13097911071777343, 0.13108714294433593, 0.1318985290527344, 0.13399305725097657, 0.13384320068359376, 0.1322843475341797, 0.13141398620605468, 0.13090007019042968, 0.13112013244628906, 0.13276010131835939, 0.13283786010742188, 0.13468876647949218, 0.13290086364746093, 0.13127398681640626, 0.1311742401123047, 0.1328997497558594, 0.13219798278808595, 0.1324522247314453, 0.13419929504394532, 0.13337625122070312, 0.13223554992675782, 0.13180928039550782, 0.13265843200683594, 0.13214544677734374, 0.1325040283203125, 0.13237657165527345, 0.1326878662109375, 0.13331578063964844, 0.13229545593261718, 0.1327196807861328, 0.13221498107910157, 0.13290985107421874, 0.133074951171875, 0.1324830780029297, 0.1323704071044922, 0.13334451293945312, 0.13379014587402344, 0.13203225708007812, 0.13288099670410156, 0.1331444091796875, 0.13186457824707032, 0.13303414916992187, 0.1330380859375, 0.13393101501464844, 0.133148193359375, 0.1331161651611328, 0.133091552734375, 0.1320261688232422, 0.13298092651367188, 0.13194854736328124, 0.13345587158203126, 0.1337057342529297, 0.1437163848876953, 0.13205282592773437, 0.13089564514160157, 0.13257568359375, 0.13117213439941405, 0.13065231323242188, 0.13116621398925782, 0.13416365051269533, 0.13278598022460938, 0.13247772216796874, 0.1325028533935547, 0.13111798095703125, 0.13079551696777345, 0.13135462951660157, 0.13299325561523437, 0.132900634765625, 0.13256704711914064, 0.1337220458984375, 0.13255232238769532, 0.13102534484863282, 0.13075045776367186, 0.13262220764160157, 0.13364031982421876, 0.13264691162109374, 0.13333273315429686, 0.1329412841796875, 0.13147215270996093, 0.13092658996582032, 0.13204879760742189, 0.1331037139892578, 0.13312818908691407, 0.13265072631835936, 0.13301744079589845, 0.13139129638671876, 0.13338278198242187, 0.13171452331542968, 0.13230502319335938, 0.13259202575683593, 0.1334614715576172, 0.13394998168945313, 0.13334527587890624, 0.13169049072265626, 0.13139967346191406, 0.13246873474121093, 0.13352511596679686, 0.13248124694824218, 0.13367269897460937, 0.13340444946289062, 0.13284979248046874, 0.13163084411621093, 0.13181776428222655, 0.13295651245117188, 0.13238079833984376, 0.13394029235839844, 0.13361247253417968, 0.13398204040527345, 0.1332737579345703, 0.13258457946777344, 0.13358518981933593, 0.13210684204101564, 0.13345382690429688, 0.13378323364257813, 0.1335647430419922, 0.14439056396484376, 0.1322373046875, 0.13077503967285156, 0.1308037109375, 0.1307371826171875, 0.13074879455566407, 0.13205580139160156, 0.13565936279296875, 0.1343173828125, 0.1326189422607422, 0.13125552368164062, 0.13082089233398436, 0.13100221252441407, 0.13189955139160156, 0.13355999755859374, 0.13433477783203124, 0.13368934631347656, 0.13240960693359374, 0.13100006103515624, 0.13084431457519533, 0.1315597686767578, 0.1330253143310547, 0.13410147094726563, 0.1344898223876953, 0.13261648559570313, 0.13122969055175782, 0.13129522705078125, 0.13171916198730468, 0.13275955200195313, 0.13350863647460937, 0.1342694091796875, 0.13379379272460937, 0.1329888916015625, 0.13150416564941406, 0.13108428955078125, 0.13212399291992188, 0.13251446533203126, 0.13425447082519532, 0.13367459106445312, 0.13344607543945314, 0.132704345703125, 0.13131980895996093, 0.13188505554199217, 0.13353575134277343, 0.13242469787597655, 0.13383168029785156, 0.13331251525878907, 0.13395762634277344, 0.13306675720214844, 0.13196902465820312, 0.13303097534179686, 0.1333927001953125, 0.13325357055664064, 0.13371002197265625, 0.13429461669921874, 0.1332822723388672, 0.13207391357421874, 0.1330905303955078, 0.13337680053710937, 0.13358079528808595, 0.13319132995605468, 0.13462109375, 0.13354234313964844, 0.14350950622558595, 0.13216954040527343, 0.13084486389160158, 0.13064396667480468, 0.13074021911621095, 0.13286195373535156, 0.13288447570800782, 0.13634970092773438, 0.13381427001953125, 0.13244825744628907, 0.13114982604980469, 0.13079551696777345, 0.130651611328125, 0.13228221130371093, 0.13390713500976562, 0.1346927947998047, 0.1328640594482422, 0.13189324951171874, 0.1323399658203125, 0.13192576599121095, 0.13215289306640626, 0.131822021484375, 0.1335562286376953, 0.13384498596191408, 0.13284352111816405, 0.13351321411132813, 0.1318314208984375, 0.13227865600585936, 0.1330319366455078, 0.1322782745361328, 0.13327565002441405, 0.1329224395751953, 0.133376953125, 0.13276374816894532, 0.13156460571289064, 0.13379466247558594, 0.1330524139404297, 0.1327493133544922, 0.13346604919433594, 0.13284690856933593, 0.13344435119628906, 0.13212620544433593, 0.1328805389404297, 0.13262882995605468, 0.13293075561523438, 0.13361439514160156, 0.1337374725341797, 0.1325086669921875, 0.13351321411132813, 0.13343948364257813, 0.13293734741210939, 0.1324732208251953, 0.13310531616210938, 0.1341988525390625, 0.1327460174560547, 0.13345791625976564, 0.1342054443359375, 0.13336370849609375, 0.13370777893066407, 0.13320191955566407, 0.1330524139404297, 0.13350245666503907, 0.1334268798828125, 0.14341346740722657, 0.1324239959716797, 0.1309222412109375, 0.13088383483886717, 0.13087948608398436, 0.1326216278076172, 0.13226054382324218, 0.13508607482910157, 0.13383868408203126, 0.13255007934570312, 0.1312424011230469, 0.13094114685058594, 0.13242367553710938, 0.13316841125488282, 0.13296246337890624, 0.13377577209472657, 0.13322470092773436, 0.1321881561279297, 0.13147340393066406, 0.13259910583496093, 0.1322032012939453, 0.13269606018066407, 0.13310304260253905, 0.13335609436035156, 0.1332135009765625, 0.1331343994140625, 0.13151673889160156, 0.13273085021972655, 0.13285411071777345, 0.13303176879882814, 0.13295603942871093, 0.13346435546875, 0.13280870056152344, 0.13297254943847656, 0.13239295959472655, 0.13343081665039064, 0.1332064971923828, 0.13293157958984375, 0.13277798461914062, 0.13319168090820313, 0.133029052734375, 0.13149212646484376, 0.13272434997558594, 0.1328358154296875, 0.13285597229003906, 0.13362818908691407, 0.13380607604980468, 0.13284967041015625, 0.13364224243164063, 0.1324167022705078, 0.1336282501220703, 0.13283946228027343, 0.13319212341308595, 0.13288243103027345, 0.13366886901855468, 0.13293154907226562, 0.1325791015625, 0.13435877990722656, 0.13383477783203124, 0.13376106262207033, 0.13330476379394532, 0.1338839111328125, 0.13341807556152344, 0.14286848449707032, 0.13249740600585938, 0.13156512451171876, 0.1323686065673828, 0.13100668334960938, 0.13271017456054687, 0.1317357177734375, 0.13471673583984375, 0.133849853515625, 0.13259365844726562, 0.13278390502929688, 0.13150405883789062, 0.13148597717285157, 0.13246214294433595, 0.1332126007080078, 0.133876953125, 0.13348739624023437, 0.1327361297607422, 0.13155116271972656, 0.13266630554199219, 0.1330462646484375, 0.1326195831298828, 0.1334586181640625, 0.13274520874023438, 0.13343128967285156, 0.13299696350097656, 0.13223648071289062, 0.133376953125, 0.13217324829101562, 0.1328269500732422, 0.1326719970703125, 0.1329822998046875, 0.13324365234375, 0.13251174926757814, 0.13327360534667967, 0.13322802734375, 0.1324072265625, 0.13339500427246093, 0.1334824981689453, 0.13348246765136718, 0.13301763916015624, 0.13334732055664061, 0.13360537719726562, 0.13283932495117187, 0.133099609375, 0.13341212463378907, 0.13298556518554688, 0.13278822326660156, 0.13312205505371094, 0.13360079956054688, 0.13300489807128907, 0.13371218872070312, 0.13317567443847655, 0.1330300750732422, 0.13305142211914062, 0.13355516052246094, 0.13333299255371095, 0.13270547485351564, 0.1340046691894531, 0.1339994812011719, 0.13366047668457032, 0.13356871032714843, 0.1336682891845703, 0.1436278076171875, 0.13220317077636717, 0.13088368225097657, 0.13076275634765624, 0.13112722778320313, 0.13272889709472657, 0.13268736267089845, 0.136395263671875, 0.1331199951171875, 0.1331546630859375, 0.1315944061279297, 0.131778564453125, 0.13228749084472657, 0.13228953552246095, 0.13406207275390625, 0.1340491485595703, 0.13315692138671875, 0.13158457946777344, 0.13146435546875, 0.13244441223144532, 0.13209408569335937, 0.13313481140136718, 0.13340194702148436, 0.13348512268066406, 0.13315020751953124, 0.1318876190185547, 0.1325609893798828, 0.13177650451660156, 0.13260319519042968, 0.13314633178710938, 0.13339132690429686, 0.13355731201171875, 0.13324794006347657, 0.13330975341796875, 0.13186720275878908, 0.1330029754638672, 0.13346038818359374, 0.13309132385253905, 0.13300326538085938, 0.13301895141601563, 0.1329115447998047, 0.1334254150390625, 0.13235813903808594, 0.13341900634765624, 0.13362995910644532, 0.13292320251464843, 0.13315501403808594, 0.1336538848876953, 0.13278886413574217, 0.1337855987548828, 0.13395558166503907, 0.13363609313964844, 0.13351731872558595, 0.13365248107910158, 0.13387945556640626, 0.13360569763183594, 0.1330401611328125, 0.1336873016357422, 0.13383474731445313, 0.13365863037109374, 0.1338133087158203, 0.133949951171875, 0.13327740478515626]",tokens/s,7.52965078476103,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,822.14912,2243.821568,0.0,1855.97952,1739.124736,s,1,10.61160546875,10.61160546875,0.0,10.61160546875,10.61160546875,10.61160546875,10.61160546875,[10.61160546875],,kWh,9.353591123332686e-05,1.0310324019249726e-05,3.051113551999496e-05,0.00013435737077257154,,MB,1283.366912,2472.411136,0.0,2057.306112,1963.556864,s,10,3.604744232177735,0.36047442321777345,0.0010657318468631856,0.36009109497070313,0.36215008239746094,0.36222930755615235,0.36229268768310546,"[0.36230853271484376, 0.3594710693359375, 0.3599339904785156, 0.35903729248046873, 0.3600617980957031, 0.35963943481445315, 0.3621324768066406, 0.361402587890625, 0.3601203918457031, 0.36063665771484377]",tokens/s,710.1752121962414,kWh,1.0762885572917943e-05,1.1869538587195913e-06,7.1201445849995715e-06,1.9069984016637108e-05,tokens/kWh,13424237.78523671,MB,1316.491264,2472.411136,0.0,2057.306112,1963.559424,s,10,29.220305908203123,2.9220305908203126,0.004196106228679357,2.923776123046875,2.9260361572265623,2.9270492065429687,2.9278596459960937,"[2.91975927734375, 2.9165634765625, 2.924203857421875, 2.91803173828125, 2.928062255859375, 2.914853271484375, 2.92546875, 2.923452392578125, 2.92581103515625, 2.924099853515625]",tokens/s,21.56034923040069,kWh,8.488613385166899e-05,9.363124784588771e-06,4.8706538965200694e-05,0.00014295579760145843,tokens/kWh,440695.662974338,,s,630,29.216711975097652,0.046375733293805795,0.000758803484692743,0.04618641662597656,0.04680830001831055,0.047430085563659664,0.04974509346008301,"[0.04861209487915039, 0.046202880859375, 0.04621347045898438, 0.04626800155639649, 0.046202880859375, 0.04613740921020508, 0.04659423828125, 0.04632761764526367, 0.04637491226196289, 0.047075328826904295, 0.046228641510009764, 0.046635009765625, 0.046166881561279294, 0.046622718811035156, 0.046565185546875, 0.0467677116394043, 0.04639599990844727, 0.047890430450439454, 0.046532608032226565, 0.04666489410400391, 0.04641628646850586, 0.046233760833740235, 0.04648780822753906, 0.04609024047851563, 0.046174144744873045, 0.046237758636474606, 0.0461475830078125, 0.046115966796875, 0.04611884689331055, 0.04616640090942383, 0.046090816497802736, 0.04621311950683594, 0.04605462265014648, 0.045892383575439455, 0.04597350311279297, 0.04617830276489258, 0.04600831985473633, 0.046187904357910155, 0.04626051330566406, 0.046303199768066405, 0.04673984146118164, 0.04697087860107422, 0.04638515090942383, 0.04615711975097656, 0.046015167236328126, 0.046102527618408204, 0.046217086791992185, 0.04622880172729492, 0.046103359222412106, 0.046088191986083986, 0.046069759368896485, 0.046276351928710935, 0.046113025665283205, 0.04648518371582031, 0.04671859359741211, 0.046209728240966794, 0.045871105194091794, 0.04633209609985352, 0.04609212875366211, 0.046229473114013674, 0.04594278335571289, 0.04604073715209961, 0.046192798614501956, 0.04821404647827148, 0.04612137603759765, 0.046198463439941405, 0.0461616325378418, 0.046201438903808595, 0.04618633651733398, 0.04626243209838867, 0.046085311889648435, 0.04602329635620117, 0.04607104110717773, 0.04934751892089844, 0.046397281646728517, 0.0460494384765625, 0.04628591918945312, 0.04616515350341797, 0.046177345275878905, 0.046295745849609375, 0.045990943908691406, 0.046031295776367186, 0.046044830322265626, 0.0462344970703125, 0.0460777587890625, 0.04601494216918945, 0.04623535919189453, 0.0462213134765625, 0.046259391784667966, 0.0468304328918457, 0.046163070678710935, 0.046033439636230467, 0.04618239974975586, 0.0459062385559082, 0.04638070297241211, 0.046145889282226564, 0.046080352783203125, 0.046198814392089844, 0.04628031921386719, 0.046317569732666014, 0.046268417358398435, 0.04636671829223633, 0.04635238265991211, 0.046182239532470706, 0.04646928024291992, 0.046366016387939454, 0.04624435043334961, 0.04612847900390625, 0.04622844696044922, 0.04634815979003906, 0.04648294448852539, 0.04632787322998047, 0.04604064178466797, 0.0461607666015625, 0.045946624755859374, 0.04624614334106445, 0.046018848419189455, 0.04611779022216797, 0.04598819351196289, 0.04671126556396484, 0.04641763305664062, 0.0462723503112793, 0.046131649017333985, 0.04614144134521484, 0.04624697494506836, 0.0461376953125, 0.04828160095214844, 0.0462476806640625, 0.04708992004394531, 0.04599321746826172, 0.04611129760742187, 0.04602080154418945, 0.0460184326171875, 0.04629926300048828, 0.04605923080444336, 0.046528800964355466, 0.0459563217163086, 0.046144287109375, 0.046087646484375, 0.046166561126708985, 0.045930496215820314, 0.049433887481689455, 0.04621382522583008, 0.045959102630615235, 0.04636707305908203, 0.04627439880371094, 0.04616796875, 0.045948928833007815, 0.04599398422241211, 0.04595916748046875, 0.04594387054443359, 0.04591712188720703, 0.04605257415771485, 0.04623820877075195, 0.04701827239990235, 0.04599193572998047, 0.046075393676757816, 0.046100990295410156, 0.04595859146118164, 0.04726755142211914, 0.05602799987792969, 0.04639273452758789, 0.046070369720458984, 0.04599971389770508, 0.04580803298950195, 0.04757289505004883, 0.04597769546508789, 0.046266368865966793, 0.04638435363769531, 0.04665628814697265, 0.04596857452392578, 0.04583712005615234, 0.045851776123046875, 0.04595596694946289, 0.04593366241455078, 0.04610755157470703, 0.04589567947387695, 0.045897022247314456, 0.04584313583374024, 0.04587276840209961, 0.04601295852661133, 0.04604064178466797, 0.04680108642578125, 0.04621257781982422, 0.04604172897338867, 0.046007808685302735, 0.04630579376220703, 0.04605952072143555, 0.04621721649169922, 0.0496453742980957, 0.04758108901977539, 0.0460403823852539, 0.04597190475463867, 0.04592812728881836, 0.04623139190673828, 0.04599276733398437, 0.0461146240234375, 0.045997825622558594, 0.04598419189453125, 0.046258174896240234, 0.046063617706298826, 0.04600406265258789, 0.04618051147460937, 0.04616806411743164, 0.046222591400146486, 0.04601523208618164, 0.04596323013305664, 0.046209056854248046, 0.04650393676757812, 0.04759142303466797, 0.04608748626708985, 0.046591838836669924, 0.0464986572265625, 0.046252033233642575, 0.04649894332885742, 0.04618668746948242, 0.04618086242675781, 0.046147777557373044, 0.04611686325073242, 0.046128990173339844, 0.04643036651611328, 0.04626156616210937, 0.04626860809326172, 0.04624153518676758, 0.04651871871948242, 0.046671775817871096, 0.04626243209838867, 0.04612444686889648, 0.04642636871337891, 0.046465633392333984, 0.04605542373657227, 0.047017982482910156, 0.04646425628662109, 0.0467217903137207, 0.04636467361450195, 0.04609638214111328, 0.04612505722045898, 0.04600380706787109, 0.046034400939941406, 0.04605228805541992, 0.04605305480957031, 0.04612441635131836, 0.046088382720947264, 0.04597836685180664, 0.04608367919921875, 0.045973823547363284, 0.04643027114868164, 0.04609552001953125, 0.04676492691040039, 0.046018558502197264, 0.046186496734619144, 0.045932159423828126, 0.04856441497802735, 0.046157279968261716, 0.046172256469726565, 0.046180286407470704, 0.04623820877075195, 0.04632166290283203, 0.0462889289855957, 0.046346176147460935, 0.046612510681152346, 0.04625408172607422, 0.04656947326660156, 0.04613324737548828, 0.046510272979736325, 0.0465467529296875, 0.047358142852783204, 0.04634396743774414, 0.04688284683227539, 0.046061569213867185, 0.04639273452758789, 0.04609699249267578, 0.04614960098266602, 0.04607123184204102, 0.046209407806396485, 0.04670281600952148, 0.046342144012451174, 0.046061473846435545, 0.04623164749145508, 0.04634828948974609, 0.047510528564453126, 0.04647958374023437, 0.04630579376220703, 0.04600214385986328, 0.04653907012939453, 0.046130302429199216, 0.04620991897583008, 0.046129150390625, 0.04605132675170898, 0.045894847869873044, 0.045978431701660154, 0.046231521606445315, 0.04604444885253906, 0.050669345855712894, 0.04778815841674805, 0.04761276626586914, 0.04621433639526367, 0.04748371124267578, 0.046241249084472656, 0.046096927642822264, 0.04658537673950195, 0.04603718566894531, 0.04631584167480469, 0.04645238494873047, 0.046055744171142575, 0.04601206588745117, 0.045996383666992186, 0.046276607513427735, 0.04605542373657227, 0.04601436614990234, 0.04600841522216797, 0.04677427291870117, 0.04688396835327149, 0.04614559936523437, 0.046329792022705076, 0.04923980712890625, 0.046583072662353515, 0.046121055603027344, 0.04614038467407226, 0.046038303375244144, 0.0457806396484375, 0.04584284973144531, 0.04591059112548828, 0.046094337463378904, 0.046325408935546875, 0.04599132919311524, 0.04603363037109375, 0.04601878356933594, 0.04600012969970703, 0.0459398078918457, 0.045982177734375, 0.046020641326904296, 0.04578345489501953, 0.045856639862060546, 0.04961907196044922, 0.04603228759765625, 0.04596591949462891, 0.04593459320068359, 0.046526432037353516, 0.04627609634399414, 0.04629967880249024, 0.04588544082641602, 0.04601388931274414, 0.04611705780029297, 0.045805599212646486, 0.04622371292114258, 0.0462044792175293, 0.04673948669433594, 0.04601692962646484, 0.04607731246948242, 0.0460489616394043, 0.04591302490234375, 0.04747993469238281, 0.04656422424316406, 0.0461552963256836, 0.04589945602416992, 0.04598863983154297, 0.04630342483520508, 0.04594374465942383, 0.045830177307128905, 0.04788719940185547, 0.046045055389404295, 0.046061695098876955, 0.04603289413452148, 0.04622079849243164, 0.046687744140625, 0.046222335815429685, 0.04626432037353516, 0.045894752502441405, 0.046090782165527346, 0.046163841247558596, 0.046508544921875, 0.04598486328125, 0.0459598388671875, 0.04618880081176758, 0.046706687927246096, 0.04600831985473633, 0.04601036834716797, 0.04917465591430664, 0.04602006530761719, 0.04617257690429687, 0.046112255096435545, 0.047117984771728516, 0.046056129455566405, 0.04644265747070313, 0.046449951171875, 0.046144287109375, 0.04623126220703125, 0.046080257415771486, 0.04659747314453125, 0.046407936096191406, 0.04607564926147461, 0.046223167419433595, 0.04607673645019531, 0.04609024047851563, 0.04639129638671875, 0.047013248443603516, 0.046633022308349606, 0.04652124786376953, 0.046157470703125, 0.04604927825927734, 0.04612492752075195, 0.04714918518066406, 0.04630527877807617, 0.04622463989257813, 0.046619392395019534, 0.04604313659667969, 0.04601036834716797, 0.04675315093994141, 0.04599043273925781, 0.04642416000366211, 0.04603638458251953, 0.04615875244140625, 0.04608172988891602, 0.04594278335571289, 0.045917312622070314, 0.04620758438110351, 0.046414112091064455, 0.04677948760986328, 0.04624873733520508, 0.04624720001220703, 0.04635529708862305, 0.04622502517700195, 0.04613542556762695, 0.04601430511474609, 0.04680716705322266, 0.04685171127319336, 0.0458963508605957, 0.046176254272460936, 0.04599961471557617, 0.046166526794433595, 0.04605132675170898, 0.04626768112182617, 0.04628473663330078, 0.04610915374755859, 0.04610201644897461, 0.0459169921875, 0.04682137680053711, 0.046878719329833986, 0.050726913452148435, 0.04738057708740234, 0.049785823822021485, 0.04685171127319336, 0.0463436164855957, 0.04631548690795898, 0.04636156845092773, 0.04606806564331055, 0.045946624755859374, 0.04617001724243164, 0.046183425903320315, 0.046031425476074216, 0.04658150482177734, 0.04677702331542969, 0.04637696075439453, 0.046456512451171876, 0.04613561630249023, 0.046886913299560545, 0.04662787246704102, 0.04813923263549805, 0.04653993606567383, 0.047225311279296876, 0.046038433074951174, 0.046322654724121094, 0.04618393707275391, 0.04633651351928711, 0.04604313659667969, 0.046088191986083986, 0.04616540908813477, 0.046203617095947266, 0.046642913818359374, 0.04665155029296875, 0.045919456481933595, 0.04621561431884766, 0.04702854537963867, 0.04652649688720703, 0.04628470230102539, 0.046211166381835936, 0.046129150390625, 0.04635033416748047, 0.04622713470458984, 0.04622147369384766, 0.045921600341796875, 0.04593056106567383, 0.045918399810791016, 0.04598435211181641, 0.046063617706298826, 0.046034271240234376, 0.04592006301879883, 0.046262496948242186, 0.0461605110168457, 0.04602880096435547, 0.04612102508544922, 0.04607577514648437, 0.045946529388427734, 0.04599030303955078, 0.04651993560791016, 0.04636710357666016, 0.046394367218017575, 0.04607897567749023, 0.04665756988525391, 0.04687664031982422, 0.04671395111083984, 0.046261150360107424, 0.04722614288330078, 0.04913116836547852, 0.04617862319946289, 0.046241790771484374, 0.046225215911865236, 0.046241214752197266, 0.04629171371459961, 0.046120960235595705, 0.046548992156982424, 0.04625408172607422, 0.04613679885864258, 0.04606208038330078, 0.046438335418701175, 0.046362720489501956, 0.04639894485473633, 0.04613507080078125, 0.04604313659667969, 0.04612172698974609, 0.046069759368896485, 0.046115009307861325, 0.04624150466918946, 0.046139488220214846, 0.04606755065917969, 0.046015743255615235, 0.051176193237304685, 0.046659038543701174, 0.04612496185302734, 0.04641667175292969, 0.047026176452636716, 0.046522144317626954, 0.046865886688232425, 0.046766143798828125, 0.04636643218994141, 0.04646928024291992, 0.046228286743164065, 0.047470592498779295, 0.04719820785522461, 0.04621721649169922, 0.04602265548706055, 0.046077953338623044, 0.04614889526367188, 0.04790959930419922, 0.04675702285766602, 0.04598614501953125, 0.04604774475097656, 0.04611187362670898, 0.046058143615722656, 0.04596985626220703, 0.04606880187988281, 0.04646281433105469, 0.04613619232177734, 0.04611993789672852, 0.04696086502075195, 0.04626512145996094, 0.04587724685668945, 0.04583343887329101, 0.04654735946655274, 0.046397823333740235, 0.04601593780517578, 0.04607398223876953, 0.04611116790771484, 0.046229473114013674, 0.046238975524902345, 0.045934528350830076, 0.04981795120239258, 0.046581760406494144, 0.04708966445922851, 0.04650188827514649, 0.04617609786987305, 0.04596342468261719, 0.046069759368896485, 0.047168510437011715, 0.04621171188354492, 0.04605948638916016, 0.04627027130126953, 0.046111328125, 0.04605859375, 0.04589865493774414, 0.05118947219848633, 0.04597107315063476, 0.046139713287353515, 0.0459279670715332, 0.04601436614990234, 0.046276702880859374, 0.04628534317016601, 0.046276798248291014, 0.04632393646240234, 0.0464835205078125, 0.04659299087524414, 0.04641180801391601, 0.04614188766479492, 0.04763276672363281, 0.04647731018066406, 0.04636048126220703, 0.046481216430664066, 0.0468045768737793, 0.04614121627807617, 0.04616003036499024, 0.046039806365966794, 0.046486881256103514, 0.04640332794189453, 0.046222240447998046, 0.045897472381591795, 0.04601663970947266, 0.046115966796875, 0.04641689682006836, 0.04632511901855469, 0.04600076675415039, 0.04593174362182617, 0.04597635269165039, 0.04596121597290039, 0.04627795028686523, 0.046231361389160154, 0.04656422424316406, 0.04681849670410156, 0.046973918914794924, 0.04624367904663086, 0.04618854522705078, 0.04610047912597656, 0.04620864105224609, 0.046109054565429686, 0.046045185089111325, 0.04593385696411133, 0.046166751861572264, 0.04614144134521484, 0.04599372863769531, 0.04588806533813477]",tokens/s,21.563001358159994,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,820.232192,1507.721216,0.0,1105.199104,1100.186112,s,1,9.1686171875,9.1686171875,0.0,9.1686171875,9.1686171875,9.1686171875,9.1686171875,[9.1686171875],,kWh,6.105423697919529e-05,6.725679495781446e-06,1.9034459671996018e-05,8.681437614697275e-05,,MB,1326.850048,1719.533568,0.0,1304.428544,1276.66176,s,10,1.9329819183349608,0.1932981918334961,0.0010310175278981436,0.19312327575683594,0.19438813781738282,0.19479902648925781,0.19512773742675782,"[0.19239555358886717, 0.1938460235595703, 0.19306764221191405, 0.1942968292236328, 0.19176109313964843, 0.19317890930175782, 0.19214761352539061, 0.19283450317382814, 0.19424383544921875, 0.1952099151611328]",tokens/s,1324.3786585469677,kWh,5.803806914134696e-06,6.400570650979257e-07,3.836321151843117e-06,1.0280185131075739e-05,tokens/kWh,24902275.273831733,MB,1369.960448,1721.63072,0.0,1304.428544,1276.66432,s,10,27.032483154296873,2.703248315429687,0.025230770887203848,2.7096844482421876,2.7304510009765623,2.733205603027344,2.735409284667969,"[2.7244365234375, 2.69785791015625, 2.66334619140625, 2.68140576171875, 2.685256591796875, 2.67102392578125, 2.7298388671875, 2.72184619140625, 2.735960205078125, 2.721510986328125]",tokens/s,23.30529520370237,kWh,7.960028177711259e-05,8.779931558177552e-06,3.529312681835556e-05,0.00012367334015364568,tokens/kWh,509406.47290460416,,s,630,27.030354019165053,0.04290532383994451,0.000814258211831674,0.04291955184936523,0.043491212463378906,0.043896846199035645,0.04615418544769287,"[0.043106048583984376, 0.0462050895690918, 0.04376790237426758, 0.0429890251159668, 0.042891807556152343, 0.042872833251953124, 0.0433172492980957, 0.04317593765258789, 0.04287430572509766, 0.04271571350097656, 0.043358207702636715, 0.04307344055175781, 0.043191967010498045, 0.04288166427612305, 0.04282553482055664, 0.04291584014892578, 0.043033790588378903, 0.04298451232910156, 0.04365081787109375, 0.04315465545654297, 0.0478809928894043, 0.04343344116210938, 0.04312947082519531, 0.042739616394042966, 0.042829822540283204, 0.04347475051879883, 0.04294083023071289, 0.04283324813842773, 0.0428650894165039, 0.04306124877929687, 0.04314518356323242, 0.04283766555786133, 0.0429541130065918, 0.04307046508789063, 0.042929729461669924, 0.04316534423828125, 0.04298953628540039, 0.04295372772216797, 0.04344188690185547, 0.04311593627929688, 0.04274451065063477, 0.04296499252319336, 0.04287065505981445, 0.04309388732910156, 0.04288700866699219, 0.043028350830078124, 0.04286723327636719, 0.042882591247558596, 0.042914272308349606, 0.04307558441162109, 0.04315241622924805, 0.04363145446777344, 0.043011390686035156, 0.04286246490478516, 0.04472313690185547, 0.045856769561767576, 0.04293791961669922, 0.0432603530883789, 0.04333158493041992, 0.04278265762329102, 0.04269062423706055, 0.04272857666015625, 0.04317011260986328, 0.043128833770751954, 0.04342771148681641, 0.04348735809326172, 0.04306121444702148, 0.042814785003662106, 0.042742816925048825, 0.04272636795043945, 0.04268422317504883, 0.0425972785949707, 0.042498046875, 0.0429936637878418, 0.04265369415283203, 0.04297267150878906, 0.04243632125854492, 0.04260534286499024, 0.042678592681884765, 0.04257555389404297, 0.0427044792175293, 0.042753631591796876, 0.042537185668945314, 0.042555679321289064, 0.0425143051147461, 0.04251078414916992, 0.042349952697753906, 0.04304703903198242, 0.04253142547607422, 0.042614688873291014, 0.04254515075683594, 0.042613792419433597, 0.04280358505249023, 0.04298713684082031, 0.0428812141418457, 0.04275270462036133, 0.04279228973388672, 0.04266793441772461, 0.04267708969116211, 0.042988609313964844, 0.042394687652587894, 0.04245484924316406, 0.0457158088684082, 0.042893024444580076, 0.043014240264892575, 0.0426607666015625, 0.04272796630859375, 0.04266665649414063, 0.04270675277709961, 0.042592254638671875, 0.045767711639404296, 0.04312742233276367, 0.04254105758666992, 0.04221401596069336, 0.04314287948608399, 0.042813438415527344, 0.04243167877197265, 0.042441505432128906, 0.0426025276184082, 0.0421761589050293, 0.042133857727050784, 0.04617216110229492, 0.04217139053344727, 0.04223436737060547, 0.0419901123046875, 0.041966304779052735, 0.04233011245727539, 0.04220108795166016, 0.04208025741577148, 0.041809791564941405, 0.04209676742553711, 0.041749599456787106, 0.04210505676269531, 0.04252659225463867, 0.04228131103515625, 0.04246540832519531, 0.04215843200683594, 0.04224137496948242, 0.04216899108886719, 0.04207001495361328, 0.04199782562255859, 0.046768638610839845, 0.04224409484863281, 0.041952606201171874, 0.0418408317565918, 0.04188550567626953, 0.042179424285888674, 0.041799488067626955, 0.04175872039794922, 0.04165603256225586, 0.04174636840820312, 0.04194543838500977, 0.0418158073425293, 0.042059745788574215, 0.04177948760986328, 0.04209356689453125, 0.041917407989501956, 0.042378337860107425, 0.04238431930541992, 0.042244350433349606, 0.04200409698486328, 0.04192233657836914, 0.04196553421020508, 0.042434688568115234, 0.041853374481201175, 0.0421610221862793, 0.04631852722167969, 0.04248118209838867, 0.04238943862915039, 0.042158622741699216, 0.04281734466552734, 0.04198009490966797, 0.04209171295166016, 0.04196799850463867, 0.04214137649536133, 0.04223683166503906, 0.04216815948486328, 0.04234249496459961, 0.04228086471557617, 0.042100929260253904, 0.041942783355712894, 0.042446910858154295, 0.042987518310546875, 0.04226665496826172, 0.04211503982543945, 0.042039295196533204, 0.04220528030395508, 0.04215763092041016, 0.0424510383605957, 0.0435722541809082, 0.042619583129882815, 0.04224982452392578, 0.04243641662597656, 0.042199935913085934, 0.042106624603271484, 0.04214387130737305, 0.042608768463134765, 0.042194366455078125, 0.04265776062011719, 0.04212140655517578, 0.042076702117919924, 0.0422334098815918, 0.042137409210205076, 0.041990432739257816, 0.04199049758911133, 0.04208035278320312, 0.04257785415649414, 0.042218368530273435, 0.04205871963500977, 0.042326271057128904, 0.04235443115234375, 0.04241408157348633, 0.04191231918334961, 0.0424463996887207, 0.041982398986816404, 0.042039295196533204, 0.042092384338378905, 0.041950977325439454, 0.04212105560302734, 0.0423040657043457, 0.042877216339111325, 0.04233174514770508, 0.04611017608642578, 0.042478305816650394, 0.042280128479003906, 0.04348390579223633, 0.042211582183837894, 0.043365345001220704, 0.04212966537475586, 0.04211497497558594, 0.04219295883178711, 0.042947166442871096, 0.04240790557861328, 0.04215193557739258, 0.04242432022094727, 0.04249599838256836, 0.0426036148071289, 0.04209961700439453, 0.042231903076171876, 0.04220083236694336, 0.04221763229370117, 0.05168742370605469, 0.04234444808959961, 0.04243072128295899, 0.04205347061157227, 0.04233964920043945, 0.042527328491210936, 0.0419284782409668, 0.043203872680664064, 0.04217951965332031, 0.04231926345825195, 0.042579902648925784, 0.043491329193115234, 0.04317753601074219, 0.042925792694091795, 0.04352073669433594, 0.042625022888183595, 0.04226448059082031, 0.042299488067626956, 0.042329727172851564, 0.042139999389648436, 0.042180641174316406, 0.04310985565185547, 0.042236766815185546, 0.042390625, 0.04258671951293945, 0.04249744033813477, 0.042955135345458986, 0.04265158462524414, 0.04232790374755859, 0.04222598266601563, 0.04242208099365234, 0.04215737533569336, 0.04259766387939453, 0.0427250862121582, 0.04219903945922852, 0.042657825469970705, 0.042619937896728514, 0.04265801620483398, 0.04280579376220703, 0.04233776092529297, 0.0425458869934082, 0.042270721435546874, 0.04230287933349609, 0.04241427230834961, 0.0423076171875, 0.04314764785766602, 0.04299935913085937, 0.042582046508789065, 0.04255100631713867, 0.042388160705566405, 0.04266419219970703, 0.04286054229736328, 0.042561279296875, 0.0425711669921875, 0.04248665618896484, 0.04244246292114258, 0.04230758285522461, 0.04266950225830078, 0.04228768157958984, 0.042490974426269534, 0.04250239944458008, 0.04261545562744141, 0.04276019287109375, 0.04266150283813477, 0.04243452835083008, 0.04265820693969727, 0.042657440185546874, 0.04234684753417969, 0.0425082893371582, 0.04220927810668945, 0.04228041458129883, 0.043646591186523434, 0.04375849533081055, 0.04387635040283203, 0.04434316635131836, 0.043061279296875, 0.042313087463378904, 0.042016735076904295, 0.04218751907348633, 0.044695552825927735, 0.04228838348388672, 0.042407745361328124, 0.04226572799682617, 0.04217852783203125, 0.042051422119140626, 0.042208992004394534, 0.0458917121887207, 0.042638816833496095, 0.0421951675415039, 0.042299198150634765, 0.042893600463867185, 0.04281942367553711, 0.04245142364501953, 0.042068031311035155, 0.04225024032592774, 0.041963775634765624, 0.04207180786132812, 0.041995296478271486, 0.04236796951293945, 0.0424532470703125, 0.04197145462036133, 0.04197600173950195, 0.042718944549560545, 0.04230358505249023, 0.04201881790161133, 0.04191641616821289, 0.042329246520996094, 0.04213840103149414, 0.04198406219482422, 0.04236265563964844, 0.042124961853027346, 0.04234684753417969, 0.042097888946533206, 0.04248438262939453, 0.04214723205566406, 0.042379871368408206, 0.042586463928222656, 0.042673694610595704, 0.04254751968383789, 0.04212547302246094, 0.04211238479614258, 0.04222332763671875, 0.042363807678222655, 0.04236508941650391, 0.04212105560302734, 0.04231932830810547, 0.04231545639038086, 0.04205039978027344, 0.042102943420410155, 0.042092384338378905, 0.04244604873657227, 0.04221804809570313, 0.04201203155517578, 0.04208240127563476, 0.04204185485839844, 0.04228121566772461, 0.04208025741577148, 0.04406003189086914, 0.04366604614257812, 0.04374528121948242, 0.043498783111572265, 0.043268833160400394, 0.043447582244873044, 0.04333548736572266, 0.043410465240478514, 0.04356832122802735, 0.04333391952514649, 0.04313734436035156, 0.04311439895629883, 0.04325939178466797, 0.04321769714355469, 0.04324752044677734, 0.043069438934326174, 0.04330495834350586, 0.043046913146972655, 0.04306086349487305, 0.0430780143737793, 0.04331110382080078, 0.04326614379882812, 0.04331510543823242, 0.04313724899291992, 0.043329376220703125, 0.043263935089111326, 0.043098270416259764, 0.04331849670410156, 0.042961536407470705, 0.0434315185546875, 0.04320694351196289, 0.04323276901245117, 0.04347951889038086, 0.04317187118530273, 0.04313919830322266, 0.043063297271728515, 0.04296480178833008, 0.0434095687866211, 0.04340943908691406, 0.04360396957397461, 0.04320665740966797, 0.04311040115356445, 0.04338822555541992, 0.04312054443359375, 0.04382128143310547, 0.04321654510498047, 0.043240352630615236, 0.04312473678588867, 0.043257568359375, 0.04319875335693359, 0.04334150314331055, 0.04320467376708984, 0.043194625854492186, 0.04307763290405273, 0.04314249420166016, 0.04342441558837891, 0.04349747085571289, 0.04348223876953125, 0.043168350219726564, 0.04413673782348633, 0.04322246551513672, 0.0434161262512207, 0.04465049743652344, 0.04422655868530274, 0.043470783233642576, 0.043689151763916016, 0.043074432373046874, 0.04335411071777344, 0.0430750732421875, 0.04348483276367188, 0.04303036880493164, 0.04285337448120117, 0.042925182342529296, 0.04322598266601563, 0.04305324935913086, 0.04309939193725586, 0.043069217681884764, 0.043033374786376956, 0.04441680145263672, 0.04461369705200195, 0.04336041641235352, 0.04360192108154297, 0.043153408050537106, 0.043200225830078126, 0.043014625549316406, 0.04316508865356445, 0.04296918487548828, 0.04321104049682617, 0.043655391693115234, 0.043198272705078124, 0.04327363204956055, 0.04312944030761719, 0.0431545295715332, 0.043119518280029294, 0.042977054595947264, 0.04310563278198242, 0.042980224609375, 0.04302643203735351, 0.04296441650390625, 0.04292256164550781, 0.04300559997558594, 0.042981025695800784, 0.04300255966186523, 0.04302204895019531, 0.043397407531738284, 0.04300799942016602, 0.04295475387573242, 0.04295695877075195, 0.04289318466186524, 0.04368159866333008, 0.04318649673461914, 0.04339843368530273, 0.04329939270019531, 0.04311603164672852, 0.04299983978271484, 0.04333820724487305, 0.04341113662719726, 0.04300755310058594, 0.04299190521240234, 0.04300953674316406, 0.04308272171020508, 0.04293804931640625, 0.043190593719482424, 0.042890625, 0.04300454330444336, 0.04301174545288086, 0.04465663909912109, 0.043559967041015626, 0.04317283248901367, 0.0429854736328125, 0.04341535949707031, 0.043409889221191406, 0.04333129501342774, 0.04323715209960938, 0.043038814544677735, 0.042969215393066404, 0.04314316940307617, 0.043202560424804685, 0.043358207702636715, 0.04333283233642578, 0.04322364807128906, 0.04311497497558594, 0.04315939331054688, 0.04309372711181641, 0.043122753143310544, 0.04307548904418945, 0.04322323226928711, 0.04331753540039063, 0.04342918395996094, 0.04297296142578125, 0.042904193878173826, 0.043050369262695315, 0.043000446319580075, 0.043060638427734374, 0.04342639923095703, 0.04336396789550781, 0.04336064147949219, 0.04366262435913086, 0.04312547302246094, 0.0429486083984375, 0.04302643203735351, 0.045350910186767575, 0.044879104614257814, 0.04326902389526367, 0.04333961486816406, 0.04395241546630859, 0.04316745758056641, 0.04303462219238281, 0.045574142456054685, 0.04389616012573242, 0.04340803146362305, 0.04372652816772461, 0.044523841857910154, 0.04404608154296875, 0.04384592056274414, 0.04390662384033203, 0.0436863021850586, 0.044023712158203124, 0.04346275329589844, 0.043449726104736325, 0.043444671630859376, 0.04309625625610351, 0.043103904724121095, 0.0429431037902832, 0.04297043228149414, 0.04298099136352539, 0.043026241302490234, 0.04303267288208008, 0.04315430450439453, 0.04344451141357422, 0.043611873626708986, 0.04312268829345703, 0.043153408050537106, 0.04290969467163086, 0.042810657501220706, 0.042914527893066406, 0.04361011123657227, 0.04309395217895508, 0.043522209167480466, 0.042848159790039066, 0.04294236755371094, 0.043165985107421874, 0.04303852844238281, 0.04307686233520508, 0.04299993515014648, 0.04300051116943359, 0.04309804916381836, 0.04303606414794922, 0.042920543670654294, 0.042831710815429684, 0.04294892883300781, 0.04324131011962891, 0.04292563247680664, 0.043079296112060544, 0.04310508728027344, 0.043230430603027344, 0.04315635299682617, 0.04288911819458008, 0.043003902435302735, 0.04334169769287109, 0.04320483016967774, 0.04313183975219727, 0.042971393585205075, 0.04310432052612305, 0.042918560028076175, 0.04283801651000976, 0.042974559783935544, 0.04289193725585937, 0.04290780639648437, 0.04323721694946289, 0.042997760772705076, 0.04313907241821289, 0.042872032165527346, 0.04392732620239258, 0.043143455505371096, 0.04310844802856445, 0.04306166458129883, 0.04296316909790039, 0.04725964736938477, 0.04362188720703125, 0.04373555374145508, 0.04309222412109375, 0.04285209655761719, 0.04336547088623047, 0.042832927703857424, 0.043491199493408204, 0.04282361602783203, 0.04307356643676758, 0.043138526916503904, 0.04389740753173828, 0.043200511932373044, 0.04347260665893555]",tokens/s,23.307130922270492,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,912.134144,2944.270336,0.0,2562.719744,2545.32608,s,1,9.7637705078125,9.7637705078125,0.0,9.7637705078125,9.7637705078125,9.7637705078125,9.7637705078125,[9.7637705078125],,kWh,8.598260795834753e-05,9.47010883951838e-06,2.7723633289983285e-05,0.0001231763500878492,,MB,1507.782656,3812.491264,0.0,3393.191936,2965.804544,s,10,4.401982421875,0.44019824218750003,0.002650194212010639,0.4389751129150391,0.44345640563964844,0.44385445404052737,0.44417289276123045,"[0.43804110717773437, 0.4373080444335937, 0.4377965698242188, 0.43922024536132814, 0.43872998046875, 0.4375080871582031, 0.44252194213867185, 0.44336795043945315, 0.44425250244140624, 0.44323599243164064]",tokens/s,581.5561614418219,kWh,1.2803935697100948e-05,1.4120548033655937e-06,8.22592445513019e-06,2.244191495559673e-05,tokens/kWh,11407226.188429914,MB,1552.736256,3814.588416,0.0,3395.289088,2965.807104,s,10,37.300084960937504,3.7300084960937503,0.02297604179318907,3.7338985595703127,3.7505876953125,3.76014931640625,3.76779861328125,"[3.691657958984375, 3.696339111328125, 3.7104609375, 3.731341064453125, 3.737550537109375, 3.732173095703125, 3.746764404296875, 3.748462890625, 3.7697109375, 3.7356240234375]",tokens/s,16.8900419572708,kWh,0.0001071668974149805,1.1820907953029576e-05,5.7357989123068634e-05,0.00017634579449107873,tokens/kWh,357252.63640005403,,s,630,37.298276714324956,0.059203613832261824,0.0008879307239241785,0.05909276962280273,0.059976301574707035,0.060282085037231446,0.06291521751403809,"[0.05855583953857422, 0.05811014556884766, 0.05792499160766602, 0.058391006469726565, 0.05871023941040039, 0.05824873733520508, 0.05816400146484375, 0.058060798645019535, 0.05825126266479492, 0.05855846405029297, 0.05823395156860352, 0.05830339050292969, 0.06107955169677735, 0.059020801544189455, 0.05859958267211914, 0.058662654876708985, 0.058563167572021485, 0.058011646270751956, 0.05801369476318359, 0.05766326522827148, 0.05833750534057617, 0.058660545349121095, 0.05838460922241211, 0.059063838958740233, 0.05849350357055664, 0.05864243316650391, 0.05919049453735352, 0.058763839721679687, 0.05875027084350586, 0.058397441864013674, 0.05867536163330078, 0.05922982406616211, 0.058792320251464844, 0.05847449493408203, 0.05858099365234375, 0.05895167922973633, 0.05859852981567383, 0.05877996826171875, 0.058541919708251955, 0.05879638290405274, 0.05832537460327149, 0.058046432495117185, 0.058060832977294925, 0.05843881607055664, 0.05856547164916992, 0.05844377517700195, 0.058277889251708986, 0.058703006744384764, 0.06159036636352539, 0.05854617691040039, 0.058198047637939454, 0.05839459228515625, 0.06061385726928711, 0.058622753143310544, 0.058687488555908204, 0.05850243377685547, 0.05995798492431641, 0.05812838363647461, 0.05801567840576172, 0.05805062484741211, 0.057914657592773436, 0.057936161041259766, 0.05823942565917969, 0.05865667343139649, 0.058053440093994144, 0.05808947372436524, 0.05823635101318359, 0.058462814331054686, 0.05851337432861328, 0.059420673370361325, 0.05819574356079102, 0.05812656021118164, 0.05827993774414063, 0.05824857711791992, 0.05853452682495117, 0.05821571350097656, 0.05862198257446289, 0.05878182220458984, 0.05820060729980469, 0.058288158416748045, 0.058173439025878904, 0.05928972625732422, 0.0581077766418457, 0.05854528045654297, 0.058219070434570315, 0.05815737533569336, 0.058208255767822265, 0.05880976104736328, 0.05825356674194336, 0.058218849182128905, 0.05827724838256836, 0.05828467178344727, 0.058621952056884766, 0.05923638534545898, 0.05907680130004883, 0.0587691535949707, 0.05873871994018555, 0.058574207305908205, 0.05860748672485352, 0.05908732986450195, 0.05897404861450195, 0.058521984100341796, 0.0587182731628418, 0.05813808059692383, 0.05841769790649414, 0.0582737922668457, 0.05878988647460937, 0.059525150299072266, 0.05953123092651367, 0.058726398468017575, 0.05862393569946289, 0.059416641235351564, 0.05897574234008789, 0.059295520782470704, 0.058439647674560544, 0.059171585083007815, 0.05878579330444336, 0.05851929473876953, 0.05890460968017578, 0.061044830322265625, 0.059271297454833984, 0.05869718551635742, 0.0584925422668457, 0.058507232666015624, 0.058555198669433595, 0.059666561126708983, 0.05919744110107422, 0.05894867324829101, 0.05874697494506836, 0.05925884628295899, 0.05869619369506836, 0.058300289154052734, 0.058171009063720705, 0.05863091278076172, 0.05842752075195313, 0.05859056091308594, 0.0598922233581543, 0.05880390548706055, 0.0583656005859375, 0.05834188842773438, 0.058060447692871095, 0.05951071929931641, 0.05905641555786133, 0.05853023910522461, 0.05848476791381836, 0.05837206268310547, 0.058619678497314455, 0.05823715209960938, 0.058461280822753904, 0.059448223114013675, 0.05864243316650391, 0.058877952575683595, 0.05896131134033203, 0.05884156799316406, 0.058458240509033206, 0.058834945678710934, 0.05915974426269531, 0.05939007949829102, 0.058916545867919924, 0.05875609588623047, 0.05883001708984375, 0.05878457641601562, 0.0588730239868164, 0.05872086334228516, 0.05918323135375977, 0.058519935607910155, 0.05845692825317383, 0.05867318344116211, 0.05939286422729492, 0.05945718383789062, 0.0585852165222168, 0.058955455780029295, 0.05911811065673828, 0.058716129302978516, 0.058726432800292966, 0.059156478881835936, 0.058910720825195315, 0.05865577697753906, 0.059071456909179684, 0.059485214233398434, 0.059122657775878905, 0.059271167755126954, 0.05887385559082031, 0.05931827163696289, 0.05937152099609375, 0.05935212707519531, 0.06026540756225586, 0.059121662139892575, 0.0593175048828125, 0.05936332702636719, 0.0588042221069336, 0.05858259201049805, 0.05853424072265625, 0.05867852783203125, 0.058692447662353514, 0.05864243316650391, 0.058942527770996095, 0.06168876647949219, 0.06065151977539063, 0.05975571060180664, 0.06299075317382813, 0.060338592529296874, 0.05965404891967773, 0.05971567916870117, 0.059224063873291016, 0.059677024841308594, 0.05963126373291015, 0.059315937042236325, 0.05911523056030273, 0.0588232307434082, 0.05860691070556641, 0.05899539184570313, 0.059273216247558595, 0.05879808044433594, 0.05870495986938477, 0.059243392944335935, 0.05871382522583008, 0.05914851379394531, 0.058822273254394535, 0.05916739273071289, 0.061932735443115235, 0.05854457473754883, 0.059472095489501955, 0.05893939208984375, 0.05891481781005859, 0.05873459243774414, 0.058619903564453124, 0.05909708786010742, 0.058864673614501956, 0.0595579833984375, 0.05913183975219727, 0.05889932632446289, 0.05913315200805664, 0.058656959533691405, 0.058546783447265625, 0.05923641586303711, 0.05896761703491211, 0.05889033508300781, 0.05865299224853516, 0.058996192932128905, 0.05884316635131836, 0.05870988845825195, 0.0590076789855957, 0.058991680145263674, 0.05891107177734375, 0.05884134292602539, 0.059096641540527343, 0.059335166931152344, 0.05925120162963867, 0.059215614318847656, 0.05994291305541992, 0.058935039520263674, 0.05918515014648437, 0.05927347183227539, 0.05894521713256836, 0.05903801727294922, 0.05950054550170898, 0.05896192169189453, 0.059817249298095704, 0.05972387313842773, 0.05968681716918945, 0.05936406326293946, 0.05984460830688477, 0.06003235244750976, 0.05949462509155273, 0.0595849609375, 0.06515699005126953, 0.05940553665161133, 0.059085086822509764, 0.059076480865478516, 0.05931039810180664, 0.0590847053527832, 0.05913859176635742, 0.05906022262573242, 0.059025409698486325, 0.05927731323242187, 0.05905817413330078, 0.05918463897705078, 0.059062431335449216, 0.05914972686767578, 0.05991929626464844, 0.06004844665527344, 0.05956294250488281, 0.059230209350585934, 0.05945267105102539, 0.05938819122314453, 0.059126209259033204, 0.059007007598876955, 0.05875286483764648, 0.060024448394775394, 0.062251552581787106, 0.059049983978271485, 0.059584510803222655, 0.05868368148803711, 0.058617568969726565, 0.05997478485107422, 0.0590101432800293, 0.05913955307006836, 0.059068737030029295, 0.059382816314697266, 0.05901311874389648, 0.05903254318237305, 0.05885257720947266, 0.05893404769897461, 0.05902963256835937, 0.0590437126159668, 0.058947582244873044, 0.05871782302856445, 0.05871859359741211, 0.05866700744628906, 0.058466304779052736, 0.058531841278076174, 0.05887180709838867, 0.05850931167602539, 0.05824262237548828, 0.059009025573730466, 0.05852364730834961, 0.05837529754638672, 0.05904883193969727, 0.058799678802490235, 0.059213695526123045, 0.05868396759033203, 0.05969619369506836, 0.05905868911743164, 0.058431934356689454, 0.05866684722900391, 0.058525856018066404, 0.05885337448120117, 0.0589158706665039, 0.0583111686706543, 0.05850979232788086, 0.05883903884887695, 0.058449920654296876, 0.05885542297363281, 0.058978401184082034, 0.0586748161315918, 0.05876764678955078, 0.058877086639404295, 0.05844022369384766, 0.058946113586425784, 0.05908044815063476, 0.05833075332641602, 0.058620288848876954, 0.05825651168823242, 0.0582151985168457, 0.058508544921875, 0.05841609573364258, 0.05874169540405273, 0.0593950080871582, 0.059614463806152346, 0.05916748809814453, 0.06462643432617188, 0.061093151092529295, 0.059773887634277344, 0.06296579360961914, 0.060045310974121094, 0.05991628646850586, 0.05969929504394531, 0.05965024185180664, 0.05933027267456055, 0.05915849685668945, 0.05953254318237305, 0.059489055633544924, 0.05922611236572266, 0.05922601699829102, 0.05954684829711914, 0.05973286437988281, 0.05953257751464844, 0.059230945587158204, 0.05908889770507812, 0.059154430389404294, 0.05942809677124023, 0.060436767578125, 0.06020758438110352, 0.059252414703369144, 0.058837310791015625, 0.0587977294921875, 0.05920393753051758, 0.06061836624145508, 0.060447582244873045, 0.05959270477294922, 0.05918310546875, 0.0603135986328125, 0.05939148712158203, 0.05941708755493164, 0.05965414428710938, 0.05956198501586914, 0.06009590530395508, 0.05965475082397461, 0.06008425521850586, 0.06481097412109375, 0.06017052841186524, 0.06022883224487305, 0.06047385787963867, 0.060516353607177734, 0.0608765754699707, 0.059767265319824216, 0.0592360954284668, 0.0590274543762207, 0.059119361877441406, 0.0593326416015625, 0.06018889617919922, 0.060079967498779294, 0.05958611297607422, 0.05942742538452148, 0.05912575912475586, 0.059547103881835935, 0.059161121368408204, 0.059254432678222654, 0.05893875122070313, 0.05879404830932617, 0.059059230804443356, 0.05927436828613281, 0.05962947082519531, 0.05927372741699219, 0.05985302352905274, 0.05996752166748047, 0.05873263931274414, 0.05967462539672851, 0.06261145782470703, 0.06279139328002929, 0.0584249267578125, 0.058290302276611326, 0.05829001617431641, 0.05861759948730469, 0.058076126098632816, 0.05805055999755859, 0.05844339370727539, 0.05763315200805664, 0.05851475143432617, 0.058356704711914065, 0.05833875274658203, 0.058778079986572265, 0.05933027267456055, 0.05872035217285156, 0.05851955032348633, 0.05865676879882813, 0.05891628646850586, 0.058950206756591794, 0.05848620986938476, 0.05864505767822266, 0.06203801727294922, 0.059461631774902345, 0.06783795166015626, 0.05866435241699219, 0.05916543960571289, 0.059132865905761715, 0.05921065521240235, 0.05934067153930664, 0.05948838424682617, 0.05943500900268555, 0.059751712799072265, 0.060082176208496096, 0.05939471817016601, 0.058666206359863284, 0.05931913757324219, 0.05926707077026367, 0.0587973747253418, 0.059539745330810544, 0.059156608581542966, 0.05928374481201172, 0.05939199829101562, 0.05903974533081055, 0.059189247131347655, 0.05953286361694336, 0.059379840850830076, 0.059334976196289066, 0.05910732650756836, 0.06097305679321289, 0.059346111297607425, 0.05906095886230469, 0.05911977767944336, 0.0592213134765625, 0.05922668838500977, 0.05989791870117187, 0.05927936172485351, 0.05971353530883789, 0.059025409698486325, 0.0587446403503418, 0.05897644805908203, 0.05947945785522461, 0.058628704071044924, 0.05898412704467773, 0.059879104614257814, 0.05886383819580078, 0.05850268936157227, 0.06011331176757812, 0.05938774490356445, 0.059506622314453125, 0.05936966323852539, 0.05932831954956055, 0.059132606506347656, 0.05866108703613281, 0.059451168060302734, 0.05902950286865234, 0.059332286834716794, 0.059251007080078126, 0.059594753265380856, 0.059127742767333985, 0.059160640716552734, 0.05953945541381836, 0.05968896102905273, 0.059494014739990234, 0.060160385131835935, 0.06025270462036133, 0.05963776016235352, 0.05942476654052734, 0.05948566436767578, 0.06018921661376953, 0.060252159118652344, 0.059719039916992185, 0.05966707229614258, 0.06011417770385742, 0.06055955123901367, 0.05995721435546875, 0.05988412857055664, 0.059686912536621096, 0.05981184005737305, 0.059607070922851564, 0.059625438690185543, 0.05938150405883789, 0.05950060653686524, 0.05957036972045898, 0.05974016189575195, 0.059848705291748044, 0.060014816284179685, 0.059655776977539064, 0.05953555297851563, 0.059686912536621096, 0.05983375930786133, 0.06058051300048828, 0.05998995208740234, 0.06017801666259766, 0.060291488647460936, 0.060200958251953124, 0.060270591735839846, 0.059566078186035154, 0.059305984497070315, 0.05984864044189453, 0.05968848037719727, 0.05996937561035156, 0.05961593627929687, 0.059566078186035154, 0.05953862380981445, 0.05957427215576172, 0.05947609710693359, 0.06023238372802735, 0.06006579208374024, 0.05980185699462891, 0.06004915237426758, 0.059729503631591796, 0.05989007949829102, 0.059401695251464846, 0.05959894561767578, 0.05955753707885742, 0.05980585479736328, 0.05945391845703125, 0.05973622512817383, 0.05972937774658203, 0.05984310531616211, 0.059774974822998046, 0.059934398651123044, 0.05995552062988281, 0.0599101448059082, 0.060153057098388675, 0.06043286514282226, 0.06015804672241211, 0.05968230438232422, 0.059574783325195314, 0.05958374404907227, 0.0592883186340332, 0.05935740661621094, 0.05917452621459961, 0.05903171157836914, 0.05922611236572266, 0.059824127197265625, 0.05898428726196289, 0.05968201446533203, 0.059757503509521484, 0.05920767974853516, 0.058943489074707034, 0.05925904083251953, 0.059314014434814454, 0.05916672134399414, 0.05900009536743164, 0.058888351440429684, 0.05884320068359375, 0.059171199798583984, 0.058992767333984376, 0.06309417724609374, 0.059429473876953125, 0.05935103988647461, 0.059308032989501956, 0.05904707336425781, 0.06000112152099609, 0.059012481689453125, 0.059368064880371094, 0.05899190521240234, 0.05902108764648437, 0.059190208435058594, 0.058851329803466794, 0.05898780822753906, 0.059140094757080076, 0.05886137771606445, 0.0589403190612793, 0.05895577621459961, 0.059908096313476565, 0.059088191986083984, 0.059407039642333986, 0.059289600372314455, 0.059682144165039065, 0.059647937774658204, 0.0590382080078125, 0.059284832000732424, 0.05919424057006836, 0.05873798370361328, 0.05901587295532226, 0.058959873199462894, 0.0591396484375, 0.05950112152099609, 0.05883059310913086, 0.0595596809387207, 0.05922035217285156, 0.058966014862060545, 0.05873632049560547, 0.05872467041015625, 0.05905203247070313, 0.059709217071533205, 0.060088542938232424, 0.05915238571166992]",tokens/s,16.890860798349948,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1091.4816,4520.3456,0.0,4125.097984,4116.435456,s,1,13.485150390625,13.485150390625,0.0,13.485150390625,13.485150390625,13.485150390625,13.485150390625,[13.485150390625],,kWh,0.000186849221679167,2.060324561585591e-05,6.737783168000305e-05,0.00027483029897502594,,MB,1256.615936,5392.760832,0.0,4984.930304,4693.31456,s,10,9.26958074951172,0.926958074951172,0.008434019192174591,0.9293541870117188,0.9364100158691406,0.9370015472412109,0.9374747723388671,"[0.9067501220703125, 0.9198154907226562, 0.925846923828125, 0.9234560546875, 0.9296069946289063, 0.9291013793945313, 0.9299220581054688, 0.9312100830078125, 0.9375930786132812, 0.936278564453125]",tokens/s,276.17214512477807,kWh,2.684464992575723e-05,2.9604662414399143e-06,1.7821150620545642e-05,4.762626678774279e-05,tokens/kWh,5375185.108270648,MB,1278.758912,5392.760832,0.0,4984.930304,4693.31712,s,10,46.71592431640626,4.671592431640624,0.012588643260299189,4.676452880859374,4.681121142578125,4.686071801757812,4.690032329101562,"[4.64633837890625, 4.65569677734375, 4.66561376953125, 4.6656298828125, 4.6793896484375, 4.67459765625, 4.67830810546875, 4.679306640625, 4.68002099609375, 4.6910224609375]",tokens/s,13.485765490435758,kWh,0.0001374168171438261,1.5158070464900814e-05,9.117996435805415e-05,0.00024375485196678095,tokens/kWh,258456.39375656686,,s,630,46.71254718780524,0.07414690029810345,0.0016710346581399445,0.07393638610839844,0.0749411003112793,0.0753974910736084,0.08525176269531251,"[0.08837117004394532, 0.07261116790771484, 0.07196534729003906, 0.073152099609375, 0.07257129669189454, 0.07215952301025391, 0.07194137573242188, 0.07395990753173828, 0.0728656005859375, 0.07329606628417969, 0.07274086761474609, 0.07288524627685547, 0.07379679870605468, 0.07413536071777344, 0.07380515289306641, 0.07404611206054687, 0.07358214569091796, 0.07343762969970703, 0.07395680236816406, 0.07297901153564453, 0.0736789779663086, 0.07239984130859375, 0.07370195007324219, 0.07276099395751953, 0.07346835327148438, 0.07243801879882812, 0.07345532989501953, 0.07415837097167968, 0.07313839721679688, 0.07420649719238281, 0.07389250946044922, 0.07356121826171876, 0.07269245147705078, 0.07333888244628907, 0.07302476501464844, 0.07450399780273438, 0.07421024322509766, 0.07345356750488281, 0.0732040023803711, 0.07345881652832031, 0.0739559326171875, 0.07403084564208984, 0.07372415924072266, 0.07443865966796875, 0.0738971176147461, 0.0732491226196289, 0.07307689666748046, 0.07399795532226562, 0.07387820434570312, 0.07325027465820312, 0.07363820648193359, 0.07403107452392578, 0.07421788787841797, 0.07366233825683594, 0.07408204650878907, 0.07435263824462891, 0.07362175750732422, 0.07410892486572265, 0.07376924896240235, 0.07408406066894531, 0.07394303894042968, 0.07430553436279297, 0.07371075439453124, 0.08566802978515625, 0.07299282836914063, 0.07308089447021485, 0.07306240081787109, 0.07300653076171874, 0.07226790618896485, 0.07324700927734375, 0.07263043212890626, 0.07360921478271484, 0.07316051483154297, 0.07298067474365234, 0.07307263946533203, 0.07331849670410157, 0.07536153411865235, 0.07459923553466796, 0.07400835418701172, 0.0742150421142578, 0.07391686248779297, 0.072957763671875, 0.07392265319824219, 0.07295600128173828, 0.07337165069580077, 0.07377536010742188, 0.0732128677368164, 0.0736363525390625, 0.073168701171875, 0.0750918731689453, 0.07428361511230469, 0.07489561462402344, 0.07401036834716797, 0.07370035552978516, 0.07401113891601563, 0.07343465423583985, 0.07294649505615235, 0.0733656005859375, 0.07346959686279297, 0.07401897430419922, 0.07386956787109375, 0.07396761322021485, 0.07433363342285157, 0.07388819122314454, 0.07435810852050781, 0.07404009246826172, 0.0738776626586914, 0.07355155181884766, 0.07418681335449219, 0.0738716812133789, 0.07363561248779296, 0.07317715454101563, 0.07326937866210938, 0.07401583862304688, 0.07411510467529298, 0.07398675537109375, 0.07359487915039062, 0.07397782135009766, 0.07354950714111329, 0.07395378875732422, 0.07424803161621094, 0.07358464050292969, 0.07408233642578126, 0.07417443084716797, 0.07375785827636719, 0.07384559631347656, 0.086623779296875, 0.07315071868896485, 0.07343692779541015, 0.07323107147216797, 0.0726398696899414, 0.07361734771728516, 0.07289619445800781, 0.07287696075439454, 0.07300080108642579, 0.07309622192382813, 0.07296918487548829, 0.07292723083496094, 0.07632217407226563, 0.07695833587646485, 0.07327696228027344, 0.0742825927734375, 0.07336844635009766, 0.07326105499267578, 0.07333663940429687, 0.07298681640625, 0.07370137786865234, 0.07277772521972656, 0.07372799682617187, 0.07321571350097657, 0.07395970916748047, 0.07369522857666015, 0.0743773422241211, 0.07491571044921876, 0.07425638580322266, 0.07387923431396484, 0.07390032196044923, 0.07409394836425781, 0.07371190643310546, 0.07383692932128906, 0.07303139495849609, 0.07337299346923828, 0.07436787414550781, 0.07373532867431641, 0.07420409393310547, 0.07410380554199218, 0.07472844696044922, 0.07445913696289062, 0.07431283569335938, 0.07396620941162109, 0.07407772827148437, 0.07359766387939454, 0.07396147155761719, 0.0739653778076172, 0.07374253082275391, 0.07321913909912109, 0.07359410858154297, 0.07377069091796876, 0.07460431671142578, 0.07348678588867187, 0.074115966796875, 0.07534889221191406, 0.07423993682861328, 0.07386294555664062, 0.07405187225341797, 0.07400886535644531, 0.07374320220947266, 0.07446002960205078, 0.0748620834350586, 0.08477935791015626, 0.07343724822998046, 0.07312140655517578, 0.07282902526855468, 0.07332688140869141, 0.07328768157958984, 0.0727224349975586, 0.07335913848876953, 0.07351728057861329, 0.07271001434326171, 0.07309324645996093, 0.07335084533691406, 0.07516159820556641, 0.07574710083007813, 0.07442515563964844, 0.07432787322998047, 0.07323385620117187, 0.07295638275146485, 0.07372799682617187, 0.0739163818359375, 0.07379766082763672, 0.07242546844482421, 0.07409664154052735, 0.07427385711669922, 0.07322937774658203, 0.074010498046875, 0.07490259552001953, 0.0745831069946289, 0.07448153686523437, 0.0743013458251953, 0.07363184356689453, 0.07432192230224609, 0.07392205047607422, 0.07383910369873047, 0.07346377563476562, 0.0728719711303711, 0.07293952178955078, 0.07469261169433594, 0.07393062591552735, 0.0742085723876953, 0.07454297637939453, 0.07443961334228516, 0.074231201171875, 0.07380159759521485, 0.07406992340087891, 0.07393567657470704, 0.07407823944091797, 0.07367472076416015, 0.07451599884033203, 0.07351958465576172, 0.0741297607421875, 0.07424988555908203, 0.0737259521484375, 0.07397686767578125, 0.07512499237060546, 0.07403507232666015, 0.07408924865722656, 0.0745536651611328, 0.07383628845214844, 0.07397990417480468, 0.07394713592529296, 0.07395516967773437, 0.07393651580810547, 0.08691849517822266, 0.07333350372314452, 0.07352925109863281, 0.07409664154052735, 0.07479705810546874, 0.07314022064208985, 0.07348326110839844, 0.07254927825927734, 0.07322428894042969, 0.07275724792480469, 0.072947998046875, 0.07294070434570313, 0.07577353668212891, 0.07594729614257813, 0.0757019500732422, 0.07364425659179688, 0.07404022216796875, 0.07368793487548828, 0.07334912109375, 0.07420722961425781, 0.07346819305419922, 0.07360892486572265, 0.07362969970703125, 0.07305420684814454, 0.07335935974121094, 0.07522633361816407, 0.07583753967285156, 0.07494521331787109, 0.07404924774169921, 0.07388188934326172, 0.07334928131103516, 0.07387324523925781, 0.07386953735351562, 0.07298371124267578, 0.07348697662353515, 0.07396729278564453, 0.07323795318603515, 0.07435558319091796, 0.07405516815185546, 0.07593958282470703, 0.07468902587890625, 0.07379974365234375, 0.07481362915039062, 0.07384255981445312, 0.07393292999267578, 0.07432355499267577, 0.07354000091552734, 0.07423798370361329, 0.07393305969238281, 0.07371923065185547, 0.0747451171875, 0.07530598449707031, 0.07470489501953125, 0.07478281402587891, 0.07474575805664062, 0.07528243255615234, 0.07422313690185547, 0.07481391906738281, 0.07395532989501953, 0.07311145782470703, 0.07454102325439453, 0.07355619049072265, 0.07421737670898437, 0.08532991790771484, 0.0731504669189453, 0.07333273315429688, 0.07407820892333984, 0.07358393859863281, 0.07340306854248047, 0.07366822052001953, 0.07324082946777344, 0.07292889404296875, 0.0732873306274414, 0.07300614166259765, 0.07278521728515625, 0.07521539306640625, 0.07610070037841797, 0.07424617767333984, 0.07386809539794922, 0.0738685760498047, 0.07415289306640625, 0.07371715545654296, 0.07319795227050781, 0.07416636657714844, 0.07391426849365235, 0.07347020721435547, 0.07316675567626953, 0.07343708801269531, 0.0746495361328125, 0.07468236541748047, 0.07447142028808594, 0.07416361236572265, 0.07388457489013672, 0.0743842544555664, 0.0740544662475586, 0.07370499420166016, 0.07446498870849609, 0.07371033477783204, 0.0742760009765625, 0.07437398529052734, 0.07378943634033203, 0.0742481918334961, 0.07377030181884765, 0.07418876647949219, 0.073781982421875, 0.07392400360107422, 0.074553955078125, 0.07419673919677734, 0.07405964660644532, 0.07375091552734375, 0.074351806640625, 0.07377519989013671, 0.07386595153808594, 0.07425433349609376, 0.07379334259033203, 0.07466531372070312, 0.07426338958740235, 0.07411113739013672, 0.07520649719238282, 0.07433216094970703, 0.07394694519042969, 0.07455558776855468, 0.07448576354980468, 0.07477862548828125, 0.07386831665039062, 0.07460963439941407, 0.08506041717529297, 0.07347404479980468, 0.07317696380615234, 0.07317670440673828, 0.07319193267822266, 0.07329753875732421, 0.07355142211914062, 0.07333766174316406, 0.07298880004882813, 0.0728139877319336, 0.07293385314941406, 0.07427670288085937, 0.07591542053222657, 0.07650508880615234, 0.07424736022949219, 0.07413228607177734, 0.07406095886230468, 0.07371421051025391, 0.07310678100585938, 0.0733255386352539, 0.07410704040527344, 0.074131103515625, 0.07349472045898438, 0.0739871063232422, 0.07387619018554688, 0.07534617614746093, 0.07510809326171874, 0.07448601531982423, 0.07436083221435547, 0.07371311950683594, 0.07380636596679688, 0.0736786880493164, 0.07434249877929687, 0.0739362564086914, 0.07415468597412109, 0.07371158599853515, 0.07483014678955079, 0.07431753540039063, 0.07470489501953125, 0.0740492172241211, 0.07628627014160157, 0.07411507415771484, 0.07411244964599609, 0.07376703643798828, 0.07410527801513672, 0.07370304107666016, 0.07437741088867188, 0.07428524780273438, 0.0737791976928711, 0.07378534698486328, 0.07402700805664063, 0.07433417510986329, 0.07490383911132813, 0.07460371398925782, 0.07478534698486328, 0.07416422271728515, 0.07362355041503907, 0.07403314971923829, 0.07377510070800782, 0.07478495788574219, 0.0743128662109375, 0.07394371032714844, 0.07391232299804687, 0.08630681610107421, 0.07307859039306641, 0.07370771026611328, 0.07356963348388672, 0.07290486145019531, 0.07306905364990235, 0.07310921478271484, 0.07397628784179687, 0.07295980834960937, 0.07400038146972657, 0.07364985656738281, 0.07299922943115235, 0.07534329223632813, 0.07727747344970703, 0.07428921508789063, 0.07393302154541016, 0.07400249481201172, 0.074052734375, 0.07345439910888672, 0.07304192352294922, 0.07321190643310548, 0.07318732452392578, 0.07346371459960938, 0.07342704010009765, 0.07396556854248047, 0.07525091552734375, 0.07545843505859375, 0.07490188598632813, 0.0747176284790039, 0.0734208984375, 0.07429312133789062, 0.07369757080078125, 0.07361110687255859, 0.07337369537353515, 0.07361084747314453, 0.0736072006225586, 0.07411135864257813, 0.07415577697753906, 0.07459251403808594, 0.0752432632446289, 0.07501849365234375, 0.07462911987304688, 0.07352665710449219, 0.07467385864257813, 0.0742696304321289, 0.07399014282226563, 0.07389417266845703, 0.074401123046875, 0.07364236450195312, 0.07394735717773437, 0.07416207885742188, 0.0745818862915039, 0.07420492553710938, 0.07518016052246093, 0.07469840240478516, 0.07478953552246094, 0.07512989044189453, 0.07439440155029296, 0.07395942687988281, 0.07379484558105469, 0.07422434997558594, 0.07375049591064453, 0.07404342651367188, 0.08432003021240235, 0.07329420471191406, 0.0731927032470703, 0.07346387481689454, 0.0728255386352539, 0.07348633575439453, 0.07334912109375, 0.07426662445068359, 0.07361945343017579, 0.07369670104980469, 0.07363542175292968, 0.0733071060180664, 0.07571417236328125, 0.07592588806152344, 0.074010498046875, 0.07509004974365234, 0.07334111785888672, 0.0734735336303711, 0.07440415954589844, 0.07310765075683594, 0.07386707305908204, 0.07323648071289063, 0.0734203872680664, 0.07433379364013672, 0.07414979553222656, 0.07523625946044922, 0.07516687774658203, 0.07403353881835938, 0.07487935638427734, 0.07406774139404297, 0.07375116729736328, 0.07385171508789062, 0.07382514953613281, 0.07414374542236328, 0.07411833953857422, 0.07375545501708984, 0.07396691131591797, 0.07450870513916015, 0.07475382232666015, 0.07490201568603516, 0.07471920013427734, 0.07489036560058594, 0.07379596710205077, 0.07391900634765625, 0.07393049621582032, 0.07421977233886719, 0.07351500701904297, 0.07449600219726563, 0.07464694213867187, 0.07400713348388673, 0.0750223388671875, 0.07443456268310547, 0.07438438415527343, 0.07387872314453126, 0.0741246109008789, 0.07484835052490234, 0.07391999816894532, 0.07449897766113281, 0.07443660736083985, 0.07363603210449218, 0.07486217498779296, 0.07386748504638672, 0.07411920166015624, 0.08634966278076171, 0.07336566162109374, 0.07328892517089844, 0.07327414703369141, 0.07329615783691407, 0.073244384765625, 0.07332454681396484, 0.07329548645019532, 0.07369971466064452, 0.07357440185546875, 0.0737318115234375, 0.0733864974975586, 0.07632396697998046, 0.07608179473876953, 0.07451165008544922, 0.07446969604492187, 0.07365676879882813, 0.07414383697509766, 0.0738502426147461, 0.0737490234375, 0.07352092742919922, 0.07383798217773438, 0.0732639389038086, 0.07373619079589844, 0.07463343811035156, 0.0751749725341797, 0.07511103820800781, 0.07501663970947266, 0.07457142639160157, 0.07533548736572265, 0.07524291229248047, 0.07546959686279296, 0.0736727066040039, 0.07423782348632812, 0.07397379302978516, 0.07416432189941406, 0.07396351623535156, 0.07412531280517579, 0.07504908752441407, 0.07542691040039062, 0.07505567932128906, 0.07494064331054688, 0.07346591949462891, 0.07468592071533203, 0.074225341796875, 0.07378205108642578, 0.07420944213867188, 0.07404294586181641, 0.0740621109008789, 0.07366413116455078, 0.07445919799804687, 0.07401299285888673, 0.07501200103759766, 0.07550752258300782, 0.07549801635742187, 0.07466265869140624, 0.07381913757324218, 0.07444684600830079, 0.07405712127685547, 0.0740225601196289, 0.07415904235839844, 0.07452419281005859, 0.07422000122070313]",tokens/s,13.486740456843863,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,843.743232,1979.580416,0.0,1577.058304,1537.483264,s,1,9.495509765625,9.495509765625,0.0,9.495509765625,9.495509765625,9.495509765625,9.495509765625,[9.495509765625],,kWh,6.195592024993554e-05,6.827013637348822e-06,1.919334868796807e-05,8.797628257525244e-05,,MB,1308.34432,2157.838336,0.0,1742.733312,1681.560064,s,10,1.8938364715576173,0.18938364715576173,0.0010284611508170974,0.1889769744873047,0.19083502502441407,0.19110839233398438,0.19132708618164063,"[0.19077427673339845, 0.18889872741699218, 0.1913817596435547, 0.1883290252685547, 0.18874281311035157, 0.1890109100341797, 0.1889430389404297, 0.19045309448242187, 0.18906605529785156, 0.1882367706298828]",tokens/s,1351.753458361949,kWh,5.703432814422844e-06,6.288097747231287e-07,3.793112008845907e-06,1.012535459799188e-05,tokens/kWh,25283065.15317216,MB,1345.093632,2159.935488,0.0,1742.733312,1681.562624,s,10,27.364598876953128,2.736459887695313,0.007204961377193521,2.733830810546875,2.7449489501953126,2.747691027832031,2.749884689941406,"[2.72891552734375, 2.743422607421875, 2.744339599609375, 2.729462890625, 2.728185791015625, 2.73440185546875, 2.75043310546875, 2.73956787109375, 2.73260986328125, 2.733259765625]",tokens/s,23.022446001596442,kWh,7.977397199265811e-05,8.799278140239652e-06,3.611039213615431e-05,0.00012468364226905204,tokens/kWh,505278.7908140645,,s,630,27.36252682876586,0.04343258226788233,0.0006152871102192139,0.04325436782836914,0.043997184371948245,0.04431216812133789,0.04663942115783692,"[0.04427040100097656, 0.04649478530883789, 0.044370399475097654, 0.04366947174072266, 0.04349363327026367, 0.04320582580566406, 0.04305344009399414, 0.04300435256958008, 0.04300185775756836, 0.04337836837768555, 0.04353260803222656, 0.04325529479980469, 0.04488204956054687, 0.042775936126708984, 0.04285852813720703, 0.042992416381835936, 0.042794368743896485, 0.04315167999267578, 0.042969600677490234, 0.04322304153442383, 0.04671859359741211, 0.04438579177856445, 0.04325846481323242, 0.04295622253417969, 0.042855262756347656, 0.04305100631713867, 0.04292812728881836, 0.04282511901855469, 0.04289513778686523, 0.042810176849365236, 0.042788864135742184, 0.042784385681152344, 0.04297561645507812, 0.043269760131835935, 0.04297561645507812, 0.04293641662597656, 0.04301609420776367, 0.04282742309570312, 0.04283404922485352, 0.04287100982666016, 0.04286873626708984, 0.04285993576049805, 0.042840415954589844, 0.04291974258422852, 0.04349734497070312, 0.04434377670288086, 0.04300527954101562, 0.042885887145996095, 0.04302163314819336, 0.042957504272460936, 0.04296908950805664, 0.043786239624023435, 0.04313267135620117, 0.042958526611328124, 0.043047489166259764, 0.04299273681640625, 0.04295510482788086, 0.04418121719360352, 0.04343865585327149, 0.0435076789855957, 0.04329913711547852, 0.043476993560791016, 0.04341350555419922, 0.045341567993164064, 0.044068862915039066, 0.04399923324584961, 0.04377190399169922, 0.04363420867919922, 0.04351228713989258, 0.04342483139038086, 0.0434060173034668, 0.04341104125976562, 0.04341804885864258, 0.043276031494140624, 0.0433812141418457, 0.04334815979003906, 0.04386387252807617, 0.04386627197265625, 0.04350374221801758, 0.043435073852539065, 0.043581790924072265, 0.04371852874755859, 0.04355321502685547, 0.04360192108154297, 0.04396771240234375, 0.04357814407348633, 0.04388044738769531, 0.043670848846435545, 0.04353068923950195, 0.04318143844604492, 0.043033214569091795, 0.04307174301147461, 0.043243518829345705, 0.04308995056152344, 0.043023681640625, 0.04293497467041016, 0.04308297729492187, 0.04302105712890625, 0.043428897857666016, 0.04317897415161133, 0.04354841613769531, 0.04328185653686523, 0.043034912109375, 0.043188766479492186, 0.04305420684814453, 0.04330790328979492, 0.04393939208984375, 0.04391097640991211, 0.0437336311340332, 0.0438579216003418, 0.046698497772216796, 0.04348105621337891, 0.0431800651550293, 0.043663360595703124, 0.04382515335083008, 0.043786239624023435, 0.043598880767822264, 0.043606014251708985, 0.04341449737548828, 0.04324121475219726, 0.04315497589111328, 0.04312723159790039, 0.04334211349487305, 0.04331423950195312, 0.043476993560791016, 0.04341241455078125, 0.04384815979003906, 0.04334579086303711, 0.04366144180297851, 0.043431934356689454, 0.04347296142578125, 0.04324105453491211, 0.04320495986938477, 0.04319846343994141, 0.04369203186035156, 0.04319641494750977, 0.04331520080566406, 0.04331711959838867, 0.043917438507080075, 0.04435561752319336, 0.04454601669311523, 0.04427775955200195, 0.044129886627197266, 0.04462019348144531, 0.04456243133544922, 0.04421775817871094, 0.04363324737548828, 0.04344198226928711, 0.04335225677490234, 0.04334796905517578, 0.04340438461303711, 0.04812019348144531, 0.04374582290649414, 0.043593215942382815, 0.04337631988525391, 0.04342047882080078, 0.04355052947998047, 0.0434299201965332, 0.04328268814086914, 0.0436776008605957, 0.043253761291503906, 0.043270145416259766, 0.04332953643798828, 0.04343356704711914, 0.043229598999023434, 0.04317123031616211, 0.043237567901611325, 0.043512222290039065, 0.04356876754760742, 0.04327052688598633, 0.04335923385620117, 0.043410110473632815, 0.04308374404907227, 0.04300540924072266, 0.043178207397460935, 0.043332000732421876, 0.04343833541870117, 0.04302643203735351, 0.04298281478881836, 0.04317193603515625, 0.04377884674072265, 0.04333276748657226, 0.043162174224853515, 0.04316681671142578, 0.04319635009765625, 0.04318246459960937, 0.0436242561340332, 0.04313753509521484, 0.04335763168334961, 0.044340320587158207, 0.04749270248413086, 0.043331871032714846, 0.04302643203735351, 0.04300595092773438, 0.04324332809448242, 0.04327443313598633, 0.04298883056640625, 0.04292454528808594, 0.042870113372802734, 0.04295155334472656, 0.04303462219238281, 0.042985118865966794, 0.04281987380981445, 0.04298553466796875, 0.04294246292114258, 0.043044864654541014, 0.04290252685546875, 0.0429777603149414, 0.04293180847167969, 0.04295990371704102, 0.043129886627197266, 0.04310681533813476, 0.04292959976196289, 0.04309823989868164, 0.04295148849487305, 0.04290291213989258, 0.044708480834960936, 0.043205665588378905, 0.0435945930480957, 0.04494668960571289, 0.04342457580566406, 0.04413241577148438, 0.043096065521240234, 0.04322099304199219, 0.04303891372680664, 0.04304995346069336, 0.04300067138671875, 0.04301612854003906, 0.04320630264282226, 0.04427337646484375, 0.04306399917602539, 0.042962944030761716, 0.04321279907226563, 0.043091201782226564, 0.04320537567138672, 0.043438144683837894, 0.043019489288330076, 0.044233440399169925, 0.04401308822631836, 0.043870689392089844, 0.043151039123535156, 0.04339849472045899, 0.04333776092529297, 0.04325484848022461, 0.04312579345703125, 0.04306748962402344, 0.043149246215820315, 0.04303955078125, 0.04310835266113281, 0.043216064453125, 0.04321158218383789, 0.04301619338989258, 0.043434913635253904, 0.04334592056274414, 0.04346265411376953, 0.04336761474609375, 0.043399105072021486, 0.043049728393554684, 0.04319836807250976, 0.04321916961669922, 0.0434317741394043, 0.04322246551513672, 0.04312547302246094, 0.043589599609375, 0.043807872772216795, 0.04337961578369141, 0.04315545654296875, 0.04382310485839844, 0.04330879974365234, 0.043483230590820314, 0.043152576446533204, 0.04341244888305664, 0.04292812728881836, 0.04303696060180664, 0.04306227111816406, 0.04306108856201172, 0.043127681732177736, 0.04364287948608398, 0.04380672073364258, 0.04310249710083008, 0.043214336395263675, 0.04310857772827149, 0.04402972793579102, 0.04397014236450195, 0.043702911376953125, 0.04314643096923828, 0.043119422912597655, 0.04308579254150391, 0.04373097610473633, 0.04365097427368164, 0.04307567977905274, 0.042990753173828125, 0.043488094329833984, 0.04324358367919922, 0.04318406295776367, 0.043238433837890625, 0.043098400115966794, 0.04307321548461914, 0.043017215728759765, 0.04290764617919922, 0.04371046447753906, 0.04372582244873047, 0.04331388854980469, 0.04308611297607422, 0.04297747039794922, 0.043045951843261716, 0.0434796142578125, 0.0434870719909668, 0.04319676971435547, 0.04326387023925781, 0.04298969650268555, 0.04307555389404297, 0.04309814453125, 0.04314931106567383, 0.04316473770141602, 0.043605792999267576, 0.04316947174072266, 0.04348735809326172, 0.04372895812988281, 0.043959007263183594, 0.0442081298828125, 0.044111873626708986, 0.04680313491821289, 0.04471788787841797, 0.04372054290771484, 0.04349990463256836, 0.04396147155761719, 0.04386268615722656, 0.043655040740966794, 0.043181888580322264, 0.04329619216918945, 0.04315225601196289, 0.04314521789550781, 0.043073024749755856, 0.043208831787109374, 0.043127166748046876, 0.0432803840637207, 0.043120384216308594, 0.04315324783325195, 0.043125152587890625, 0.043121952056884766, 0.043135711669921875, 0.04311360168457031, 0.043696033477783204, 0.043498462677001956, 0.04366678237915039, 0.04313155364990234, 0.043011905670166016, 0.04306480026245117, 0.043417728424072266, 0.043276767730712894, 0.04352150344848633, 0.04320732879638672, 0.04308575820922852, 0.043098175048828125, 0.04314521789550781, 0.04311040115356445, 0.04350566482543945, 0.043149246215820315, 0.043138687133789065, 0.043063743591308594, 0.04318822479248047, 0.04305088043212891, 0.043022464752197266, 0.043036670684814454, 0.043044158935546875, 0.04323107147216797, 0.04313174438476562, 0.04328976058959961, 0.04362326431274414, 0.043224895477294925, 0.0432264633178711, 0.04360396957397461, 0.043245441436767576, 0.04325388717651367, 0.04320671844482422, 0.04323379135131836, 0.04306972885131836, 0.04365926361083984, 0.04326099014282227, 0.04374959945678711, 0.044218559265136716, 0.044251678466796875, 0.04441059112548828, 0.04406710433959961, 0.044423168182373046, 0.04407212829589844, 0.044389183044433594, 0.04447190475463867, 0.043811233520507815, 0.04361011123657227, 0.04405622482299805, 0.04328892898559571, 0.04385305786132813, 0.04342041778564453, 0.043286529541015625, 0.043261951446533206, 0.043184127807617184, 0.04313232040405274, 0.04390563201904297, 0.04326931381225586, 0.043033119201660155, 0.04316179275512695, 0.04356924819946289, 0.04317542266845703, 0.04310678482055664, 0.04298345565795898, 0.04317712020874023, 0.048888671875, 0.04552671813964844, 0.04320083236694336, 0.04340326309204102, 0.04338483047485352, 0.043245567321777346, 0.04350540924072266, 0.043656993865966796, 0.043055583953857425, 0.0431217269897461, 0.043290561676025394, 0.04327936172485351, 0.04310012817382813, 0.04319350433349609, 0.04321164703369141, 0.04312393569946289, 0.0433037109375, 0.043227039337158206, 0.043886688232421874, 0.04371017456054688, 0.043307296752929686, 0.04332953643798828, 0.0430489616394043, 0.043423744201660154, 0.04336025619506836, 0.043200511932373044, 0.04315552139282226, 0.0439901123046875, 0.045251422882080075, 0.04372825622558594, 0.04375379180908203, 0.04387257766723633, 0.04322422409057617, 0.044214271545410154, 0.044989982604980466, 0.04328086471557617, 0.04327219009399414, 0.04320870590209961, 0.04315574264526367, 0.043138912200927734, 0.044087169647216796, 0.04330495834350586, 0.04475904083251953, 0.04363020706176758, 0.043515518188476564, 0.0431962890625, 0.0431739501953125, 0.04354665756225586, 0.04349622344970703, 0.043646591186523434, 0.04321484756469727, 0.04313734436035156, 0.043237438201904295, 0.043216705322265625, 0.04321708679199219, 0.04346015930175781, 0.043714912414550784, 0.04425328063964844, 0.044025856018066405, 0.04424448013305664, 0.04415331268310547, 0.04373215866088867, 0.04327462387084961, 0.043151840209960934, 0.04321596908569336, 0.043092254638671876, 0.04647135925292969, 0.04328252792358398, 0.043221343994140626, 0.04313449478149414, 0.043137504577636716, 0.043491329193115234, 0.0437841911315918, 0.04334521484375, 0.043149566650390624, 0.04302473449707031, 0.04301219177246094, 0.043134880065917966, 0.04325795364379883, 0.043152961730957035, 0.04329107284545899, 0.04309372711181641, 0.04297119903564453, 0.04312070465087891, 0.04320223999023438, 0.043173599243164065, 0.04309478378295899, 0.04311859130859375, 0.04328758239746094, 0.04310220718383789, 0.043252544403076174, 0.043703617095947264, 0.043863937377929686, 0.04336943817138672, 0.04332953643798828, 0.04381427383422851, 0.04410815811157227, 0.04405657577514648, 0.04337664031982422, 0.04324147033691406, 0.043270111083984375, 0.04313910293579101, 0.04316745758056641, 0.043149505615234375, 0.04323455810546875, 0.04317475128173828, 0.04346265411376953, 0.043286529541015625, 0.043582752227783204, 0.04339129638671875, 0.043206462860107424, 0.04334652709960937, 0.04304617691040039, 0.04319046401977539, 0.04390304183959961, 0.043391456604003904, 0.04337254333496094, 0.04321596908569336, 0.043270977020263675, 0.04307334518432617, 0.043741470336914064, 0.04309196853637695, 0.04298863983154297, 0.04295363235473633, 0.04304281616210937, 0.04293017578125, 0.042874881744384766, 0.04345779037475586, 0.043866878509521486, 0.04390835189819336, 0.043447040557861326, 0.045039615631103515, 0.04424086380004883, 0.04339263916015625, 0.04318191909790039, 0.04353468704223633, 0.04362057495117187, 0.043679744720458984, 0.04400249481201172, 0.043778175354003905, 0.043475166320800784, 0.04310678482055664, 0.043163169860839845, 0.043104736328125, 0.04297513580322266, 0.042842208862304686, 0.04308377456665039, 0.04290127944946289, 0.04296707153320312, 0.04299385452270508, 0.04299679946899414, 0.04303763198852539, 0.04401702499389649, 0.04369062423706055, 0.04346579360961914, 0.04352412796020508, 0.04314614486694336, 0.0430912971496582, 0.04339369583129883, 0.04474915313720703, 0.04399862289428711, 0.04361481475830078, 0.043835391998291014, 0.043302913665771485, 0.04324105453491211, 0.04304633712768555, 0.04317078399658203, 0.04332339096069336, 0.04319744110107422, 0.043459041595458985, 0.04327203369140625, 0.04321247863769531, 0.04309491348266602, 0.04315558242797852, 0.04326326370239258, 0.043819648742675785, 0.043253856658935545, 0.04314921569824219, 0.043191871643066405, 0.04331983947753906, 0.0437391357421875, 0.04353142547607422, 0.04340313720703125, 0.043174655914306644, 0.043184127807617184, 0.043143680572509766, 0.04300543975830078, 0.043161823272705076, 0.04308563232421875, 0.04311391830444336, 0.043686656951904296, 0.04307763290405273, 0.04311638259887695, 0.04307369613647461, 0.04325513458251953, 0.04321756744384766, 0.04321791839599609, 0.04308684921264649, 0.04312473678588867, 0.043169792175292966, 0.04314316940307617, 0.043169792175292966, 0.04340304183959961, 0.04367792129516602, 0.0434947509765625, 0.043496097564697266, 0.04342076873779297, 0.04318892669677735, 0.04363030242919922, 0.04313753509521484, 0.04320156860351562, 0.04316668701171875, 0.043245567321777346, 0.04356300735473633, 0.04313292694091797, 0.04333308792114258, 0.043997024536132814, 0.04310291290283203, 0.04676403045654297, 0.043404319763183596, 0.043109024047851566, 0.04303696060180664]",tokens/s,23.024189393857046,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,1041.330176,2579.365888,0.0,2176.843776,2071.865856,s,1,11.5447685546875,11.5447685546875,0.0,11.5447685546875,11.5447685546875,11.5447685546875,11.5447685546875,[11.5447685546875],,kWh,0.00011027507376663077,1.2157111546485025e-05,3.6163917820006275e-05,0.00015859610313312208,,MB,1340.837888,2923.298816,0.0,2508.193792,2438.578688,s,10,3.695839538574219,0.36958395385742193,0.0012063327066543167,0.369835205078125,0.37093370056152347,0.37126390838623047,0.37152807464599613,"[0.3704662780761719, 0.36826516723632813, 0.368335693359375, 0.36838623046875, 0.369931640625, 0.3703173217773438, 0.3715941162109375, 0.3708603210449219, 0.36794400024414065, 0.36973876953125]",tokens/s,692.67076486432,kWh,1.102148031358029e-05,1.2154636148901721e-06,7.296826825110693e-06,1.9533770753581155e-05,tokens/kWh,13105508.569207875,MB,1397.686272,2925.395968,0.0,2508.193792,2438.581248,s,10,32.2017138671875,3.22017138671875,0.020836151047861382,3.2178804931640625,3.2453430908203127,3.2526954711914065,3.2585773754882816,"[3.21694189453125, 3.202781494140625, 3.201371337890625, 3.218819091796875, 3.2332041015625, 3.2600478515625, 3.231302978515625, 3.188402099609375, 3.2051337890625, 3.243709228515625]",tokens/s,19.56417607455203,kWh,9.265536489308188e-05,1.0220158427648591e-05,5.116521994448941e-05,0.00015404074326521992,tokens/kWh,408982.70590352605,,s,630,32.19906469345095,0.051109626497541145,0.0007275223620183857,0.051009296417236324,0.05180436592102051,0.05229656200408936,0.05397780296325684,"[0.055087104797363284, 0.05144166564941406, 0.051156993865966796, 0.05079580688476563, 0.051466846466064455, 0.05132681655883789, 0.05170140838623047, 0.05132505416870117, 0.05132041549682617, 0.050748321533203126, 0.05295718383789062, 0.05124710464477539, 0.051757057189941405, 0.05120140838623047, 0.050659969329833986, 0.0507817268371582, 0.050735584259033205, 0.052279296875, 0.051023872375488284, 0.05095423889160156, 0.05102345657348633, 0.05068019104003906, 0.05096230316162109, 0.05104860687255859, 0.05205942535400391, 0.050750175476074216, 0.05077721786499023, 0.051409793853759767, 0.050991104125976565, 0.05100249481201172, 0.05050185775756836, 0.05039923095703125, 0.050254497528076175, 0.050542495727539063, 0.05067577743530274, 0.051054401397705076, 0.05082748794555664, 0.05079040145874023, 0.050687873840332034, 0.05404070281982422, 0.05097881698608398, 0.05103590393066406, 0.05084355163574219, 0.05083990478515625, 0.05048249435424805, 0.05037126541137695, 0.050915328979492185, 0.05056073760986328, 0.050516254425048826, 0.05107097625732422, 0.05023129653930664, 0.05061017608642578, 0.050542015075683594, 0.0505022087097168, 0.050486942291259766, 0.050776416778564454, 0.05088256072998047, 0.051184734344482424, 0.050645919799804685, 0.050472576141357424, 0.050753280639648436, 0.051091999053955076, 0.05043619155883789, 0.05266697692871094, 0.050794273376464846, 0.05079062271118164, 0.05071036911010742, 0.051017887115478514, 0.05068729782104492, 0.050959041595458984, 0.05116723251342774, 0.05081865692138672, 0.0505450553894043, 0.05064396667480469, 0.050549633026123045, 0.050566497802734374, 0.050653759002685546, 0.050383071899414066, 0.0511192626953125, 0.0508383674621582, 0.05045043182373047, 0.05045248031616211, 0.05046681594848633, 0.05042585754394531, 0.05032550430297852, 0.05044780731201172, 0.05036851119995117, 0.05060665512084961, 0.0505134391784668, 0.05068960189819336, 0.05049436950683594, 0.05056435012817383, 0.05079046249389649, 0.0505863037109375, 0.050374656677246096, 0.05052316665649414, 0.05068899154663086, 0.05108921432495117, 0.05108755111694336, 0.05086207962036133, 0.0510300178527832, 0.05113446426391602, 0.05069539260864258, 0.050798847198486326, 0.05087820816040039, 0.05164656066894531, 0.05051036834716797, 0.050716064453125, 0.05045244979858399, 0.05072758483886719, 0.050632289886474606, 0.05068239974975586, 0.05045862579345703, 0.0506429443359375, 0.05075088119506836, 0.05091097640991211, 0.05088137435913086, 0.05335558319091797, 0.05115590286254883, 0.051476478576660156, 0.05087017440795898, 0.05099734497070312, 0.05067139053344726, 0.050726337432861326, 0.05079734420776367, 0.05220761489868164, 0.05382380676269531, 0.05137472152709961, 0.05094902420043945, 0.05166495895385742, 0.05104924774169922, 0.05041459274291992, 0.050516864776611325, 0.0506901741027832, 0.05054003143310547, 0.05042598342895508, 0.05035615921020508, 0.05033110427856445, 0.050381023406982424, 0.05053430557250976, 0.05031974411010742, 0.05039152145385742, 0.05038665771484375, 0.05066371154785156, 0.050329151153564455, 0.05052870559692383, 0.05051980972290039, 0.050641151428222654, 0.05083750534057617, 0.051025440216064456, 0.05067542266845703, 0.05039519882202149, 0.05065955352783203, 0.0505838737487793, 0.05094575881958008, 0.05030083084106445, 0.05044390487670898, 0.05036307144165039, 0.050909408569335936, 0.050727169036865236, 0.05044198226928711, 0.05026611328125, 0.05053807830810547, 0.05030543899536133, 0.05041702270507813, 0.050831039428710936, 0.050756542205810544, 0.050423809051513675, 0.050851104736328125, 0.051929439544677734, 0.05125568008422852, 0.051320831298828126, 0.05056918334960937, 0.050722366333007814, 0.050817024230957034, 0.05067164611816406, 0.0504997444152832, 0.050960670471191405, 0.0506798095703125, 0.05047296142578125, 0.05072281646728516, 0.050677726745605466, 0.05537795257568359, 0.05063065719604492, 0.05042169570922852, 0.050417728424072265, 0.05073100662231445, 0.05231561660766602, 0.050401824951171875, 0.05211129760742188, 0.050301822662353515, 0.050485313415527346, 0.051664894104003906, 0.05027596664428711, 0.05026649475097656, 0.050277889251708986, 0.050278911590576174, 0.05015961456298828, 0.05018624114990235, 0.050423809051513675, 0.050350303649902346, 0.05063248062133789, 0.05032950210571289, 0.051355743408203126, 0.05180416107177734, 0.05105459213256836, 0.05069209671020508, 0.050923519134521485, 0.055563678741455076, 0.05114108657836914, 0.050861568450927735, 0.050780609130859376, 0.05130873489379883, 0.05114879989624024, 0.05353881454467774, 0.05162908935546875, 0.05072304153442383, 0.05073382568359375, 0.05137936019897461, 0.05129097747802734, 0.051156993865966796, 0.05163417434692383, 0.05057331085205078, 0.051402145385742185, 0.05047561645507812, 0.05049043273925781, 0.05067184066772461, 0.05107580947875977, 0.05081292724609375, 0.05068931198120117, 0.05087919998168945, 0.05073871994018555, 0.05073276901245117, 0.050606048583984375, 0.050400032043457034, 0.05107097625732422, 0.050990142822265626, 0.05095110321044922, 0.05091328048706055, 0.05103206253051758, 0.05150502395629883, 0.0515010871887207, 0.051279136657714844, 0.05149164962768555, 0.05178572845458984, 0.051773441314697265, 0.05118975830078125, 0.05144166564941406, 0.051550209045410154, 0.0518389778137207, 0.051056640625, 0.05115203094482422, 0.05212911987304687, 0.05183062362670898, 0.05129910278320313, 0.05055680084228516, 0.05061443328857422, 0.05084723281860352, 0.05098908615112305, 0.05051030349731445, 0.0509306869506836, 0.0508526725769043, 0.050811073303222654, 0.05081292724609375, 0.050902881622314454, 0.050845855712890624, 0.05089484786987305, 0.051089408874511716, 0.05127987289428711, 0.05145721435546875, 0.05130732727050781, 0.05153792190551758, 0.052324352264404295, 0.05143132781982422, 0.051140705108642576, 0.05202534484863281, 0.05111580657958984, 0.05146844863891602, 0.051146049499511716, 0.051104736328125, 0.05107484817504883, 0.05146527862548828, 0.05158377456665039, 0.05131689453125, 0.05130854415893555, 0.05142927932739258, 0.05168956756591797, 0.051487777709960936, 0.05131753540039063, 0.051413185119628904, 0.05245337677001953, 0.05108531188964844, 0.0511016960144043, 0.05123276901245117, 0.05094150543212891, 0.05131692886352539, 0.051376384735107423, 0.05138022232055664, 0.05140851211547852, 0.0514543342590332, 0.051582977294921874, 0.051337215423583986, 0.051794944763183595, 0.05112499237060547, 0.051054847717285155, 0.050974720001220705, 0.051093505859375, 0.051574783325195314, 0.05161369705200195, 0.051165184020996096, 0.05188982391357422, 0.05202934265136719, 0.051313087463378905, 0.05135526275634766, 0.051935615539550783, 0.053129280090332034, 0.051580478668212894, 0.05168064117431641, 0.051534175872802734, 0.05231068801879883, 0.05159632110595703, 0.05159215927124024, 0.051978240966796874, 0.051607551574707033, 0.051095550537109374, 0.0510648307800293, 0.05110784149169922, 0.051335166931152344, 0.0517283821105957, 0.051439167022705075, 0.051280319213867186, 0.05140409469604492, 0.05120409774780273, 0.051356353759765626, 0.051170719146728515, 0.051311199188232424, 0.05134950256347656, 0.05140435028076172, 0.05141139221191406, 0.0514068489074707, 0.05132073593139649, 0.051447006225585935, 0.05373427200317383, 0.05120819091796875, 0.051525535583496096, 0.05121811294555664, 0.05139487838745117, 0.05118483352661133, 0.05158390426635742, 0.051566017150878905, 0.05187641525268555, 0.051806209564208984, 0.051484672546386716, 0.051757057189941405, 0.05146214294433594, 0.05178134536743164, 0.055553470611572266, 0.05186643218994141, 0.051256481170654296, 0.05127667236328125, 0.051783679962158206, 0.051929088592529295, 0.05147443389892578, 0.051438846588134766, 0.051116798400878904, 0.052566017150878906, 0.052424705505371094, 0.05298995208740234, 0.05490800094604492, 0.05278771209716797, 0.05112668609619141, 0.05118975830078125, 0.05215641784667969, 0.05132432174682617, 0.05109721755981445, 0.05263254547119141, 0.05114998245239258, 0.051305057525634766, 0.05260697555541992, 0.05152153778076172, 0.0519392318725586, 0.05188617706298828, 0.05199228668212891, 0.051546398162841796, 0.05135564804077149, 0.05136553573608398, 0.0514420166015625, 0.052961280822753906, 0.05158092880249023, 0.051875774383544924, 0.051531105041503905, 0.0514147834777832, 0.05162083053588867, 0.052193279266357424, 0.05150457763671875, 0.051444286346435546, 0.05097881698608398, 0.05063884735107422, 0.050694145202636716, 0.050661376953125, 0.05144985580444336, 0.05164831924438477, 0.05145148849487305, 0.05140035247802734, 0.051338176727294925, 0.051089408874511716, 0.05130364990234375, 0.050895648956298827, 0.05197318267822266, 0.05090604782104492, 0.051133472442626955, 0.05073814392089844, 0.051666240692138675, 0.051308448791503904, 0.05150799942016602, 0.051101150512695315, 0.051108383178710935, 0.05163008117675781, 0.05094512176513672, 0.05076425552368164, 0.05109763336181641, 0.05126185607910156, 0.051106143951416015, 0.05082076644897461, 0.05099929428100586, 0.05077196884155273, 0.0513177604675293, 0.05128908920288086, 0.051668609619140625, 0.05147881698608398, 0.051522945404052736, 0.05119846343994141, 0.051611873626708986, 0.05079040145874023, 0.050551902770996096, 0.050498241424560546, 0.05058992004394531, 0.05071257781982422, 0.05056025695800781, 0.050377471923828125, 0.05070137786865234, 0.05251100921630859, 0.05106800079345703, 0.050983615875244144, 0.050978271484375, 0.050452606201171875, 0.050367103576660154, 0.05048524856567383, 0.05037788772583008, 0.05040604782104492, 0.050481342315673826, 0.05042335891723633, 0.05048160171508789, 0.05110367965698242, 0.05107030487060547, 0.05043478393554687, 0.050239486694335936, 0.050374496459960935, 0.050247840881347657, 0.05038489532470703, 0.05047283172607422, 0.05027033615112304, 0.05051596832275391, 0.05046262359619141, 0.05051574325561523, 0.050836830139160155, 0.05044527816772461, 0.050477054595947264, 0.050479103088378906, 0.05041315078735351, 0.050483615875244144, 0.050597118377685546, 0.050438846588134766, 0.05094406509399414, 0.05031321716308594, 0.05060160064697266, 0.050377086639404295, 0.05016985702514649, 0.050312286376953126, 0.05056950378417969, 0.05057718276977539, 0.05051814270019531, 0.05040611267089844, 0.0506060791015625, 0.05125529479980469, 0.050726913452148435, 0.05057878494262695, 0.05051836776733398, 0.05037088012695313, 0.050253822326660154, 0.05041766357421875, 0.05024358367919922, 0.05095151901245117, 0.05053219223022461, 0.05059257507324219, 0.050581439971923825, 0.05033087921142578, 0.050643775939941404, 0.05067734527587891, 0.050786720275878904, 0.05069823837280273, 0.05086003112792969, 0.051066719055175784, 0.05136809539794922, 0.05194137573242188, 0.051163616180419924, 0.05075404739379883, 0.05049327850341797, 0.05062985610961914, 0.0507421760559082, 0.05091740798950195, 0.05045862579345703, 0.0514785270690918, 0.050536449432373044, 0.05055865478515625, 0.05062483215332031, 0.05104947280883789, 0.050648063659667966, 0.05039718246459961, 0.05026611328125, 0.05047296142578125, 0.050253822326660154, 0.050382240295410156, 0.050313568115234374, 0.050561279296875, 0.050444286346435545, 0.050724864959716794, 0.05044355010986328, 0.050495487213134765, 0.050649822235107424, 0.0506695671081543, 0.05504761505126953, 0.050702911376953125, 0.05029996871948242, 0.050514881134033206, 0.05019852828979492, 0.050955745697021486, 0.05029532623291016, 0.050290687561035156, 0.05051596832275391, 0.051784832000732424, 0.050350975036621094, 0.053217281341552736, 0.05052620697021484, 0.05066662216186523, 0.05021990585327148, 0.05163430404663086, 0.05027417755126953, 0.05025939178466797, 0.05026054382324219, 0.050685951232910156, 0.05112406539916992, 0.051425441741943356, 0.050972671508789064, 0.05101772689819336, 0.05149401473999023, 0.05146428680419922, 0.05098166275024414, 0.05100953674316406, 0.05129011154174805, 0.05168742370605469, 0.05106687927246094, 0.050972671508789064, 0.0510090560913086, 0.05098339080810547, 0.05083238220214844, 0.05076863861083984, 0.05264608001708984, 0.052456512451171874, 0.05172054290771484, 0.05139107131958008, 0.05132406234741211, 0.051181568145751956, 0.051388992309570315, 0.05108531188964844, 0.051394657135009764, 0.05116828918457031, 0.05171273422241211, 0.05140707015991211, 0.051633312225341794, 0.0515280647277832, 0.05141670227050781, 0.05110464096069336, 0.051909889221191406, 0.05208697509765625, 0.051872318267822265, 0.051386367797851565, 0.05247081756591797, 0.051837921142578125, 0.051630016326904296, 0.05227465438842773, 0.05154207992553711, 0.05225936126708984, 0.051647678375244144, 0.051015518188476563, 0.050974910736083984, 0.05135782241821289, 0.05129641723632813, 0.051466751098632815, 0.05113558578491211, 0.05153385543823242, 0.051299198150634766, 0.051156959533691405, 0.050957950592041015, 0.05099151992797851, 0.051517440795898435, 0.051326305389404296, 0.051536544799804684, 0.05121820831298828, 0.05338873672485352, 0.0513155517578125, 0.05095942306518555, 0.051202945709228516, 0.051120128631591794, 0.051286014556884765, 0.05109932708740234, 0.05107315063476563, 0.05107283020019531, 0.05129046249389648, 0.05131999969482422, 0.051159103393554686, 0.05161471939086914, 0.05159097671508789, 0.0511627197265625, 0.05122598266601563, 0.051340286254882815, 0.05251023864746094, 0.050909664154052736, 0.05105459213256836, 0.0514703369140625]",tokens/s,19.56578571451915,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,866.930688,6477.971456,0.0,6075.449344,6044.13184,s,1,14.9632802734375,14.9632802734375,0.0,14.9632802734375,14.9632802734375,14.9632802734375,14.9632802734375,[14.9632802734375],,kWh,0.0002310992183500275,2.5482304195701646e-05,7.3417280955973e-05,0.00032999880350170215,,MB,1415.655424,7002.259456,0.0,6587.154432,6470.128128,s,10,10.332634033203124,1.0332634033203125,0.005256157672685836,1.032478271484375,1.0404341796875,1.0411805908203124,1.0417777197265625,"[1.0245020751953124, 1.0273753662109375, 1.03185302734375, 1.0302879638671876, 1.0328743896484376, 1.032726318359375, 1.03858935546875, 1.032230224609375, 1.040268310546875, 1.041927001953125]",tokens/s,247.75870235736954,kWh,3.016207619833494e-05,3.3241493340547383e-06,2.011743276059963e-05,5.360365829298931e-05,tokens/kWh,4775793.446796926,MB,1453.002752,7016.93952,0.0,6599.737344,6470.130688,s,10,50.06724169921875,5.006724169921875,0.007779972950421386,5.0080830078125,5.014426953125,5.015475927734375,5.0163151074218755,"[4.98943603515625, 4.99848974609375, 5.00606591796875, 5.00394384765625, 5.00414892578125, 5.011376953125, 5.01010009765625, 5.01296142578125, 5.01652490234375, 5.01419384765625]",tokens/s,12.58307784928025,kWh,0.00014663647428208527,1.6177353962787765e-05,9.687254972020053e-05,0.0002596863779650736,tokens/kWh,242600.3261845069,,s,630,50.06442234802246,0.07946733706035311,0.001492822654636188,0.0791368293762207,0.08027044830322266,0.08040983352661134,0.08959004142761233,"[0.0887193603515625, 0.08003788757324219, 0.0792248306274414, 0.07885343933105468, 0.07840035247802735, 0.07838909149169922, 0.0785400619506836, 0.07848159790039062, 0.07845037078857423, 0.07852937316894532, 0.07843186950683594, 0.07848326110839844, 0.0804808349609375, 0.07966918182373046, 0.07955836486816406, 0.07957698822021485, 0.07946460723876952, 0.07876432037353516, 0.0785245132446289, 0.07859375762939454, 0.07849183654785157, 0.07857686614990235, 0.07873760223388672, 0.07854338836669922, 0.07858338928222657, 0.07862242889404297, 0.07864015960693359, 0.07875555419921874, 0.07987187194824219, 0.07976156616210937, 0.07924937438964844, 0.07881318664550781, 0.07856707000732421, 0.07868355560302734, 0.07858211517333985, 0.07881545257568359, 0.07869580841064452, 0.07870156860351563, 0.0786894073486328, 0.07880723571777344, 0.07869715118408203, 0.07874150085449219, 0.08002559661865234, 0.08017510223388671, 0.07990886688232422, 0.08038195037841797, 0.07982694244384765, 0.079351806640625, 0.07881100463867187, 0.0788194580078125, 0.07881890869140624, 0.0789361572265625, 0.07882166290283203, 0.07877021026611328, 0.07888492584228515, 0.07900115203857422, 0.07890096282958985, 0.07891129302978515, 0.07898812866210937, 0.07901388549804687, 0.08010546875, 0.07956636810302735, 0.08035990142822266, 0.08892527770996093, 0.07991584014892578, 0.07919206237792968, 0.07873538970947265, 0.07968080139160157, 0.07962057495117188, 0.08009075164794922, 0.0798070068359375, 0.07962124633789062, 0.0792892837524414, 0.0787783660888672, 0.07860633850097656, 0.07853670501708984, 0.07847840118408203, 0.07852880096435547, 0.07848416137695312, 0.07841993713378906, 0.07851628875732422, 0.0784751968383789, 0.07983417510986328, 0.07961411285400391, 0.08009523010253906, 0.07980726623535156, 0.07923856353759766, 0.07886089324951172, 0.07866162872314453, 0.07861769866943359, 0.07871292877197265, 0.0786174087524414, 0.07859200286865234, 0.0786627197265625, 0.07861666870117187, 0.07855165100097657, 0.07871862030029297, 0.07861824035644531, 0.08002582550048828, 0.07977446746826172, 0.07977523040771485, 0.07932978820800782, 0.0790312957763672, 0.07869337463378906, 0.07866368103027344, 0.07870041656494141, 0.07883331298828125, 0.07863053131103516, 0.07880518341064453, 0.07875856018066406, 0.07874867248535156, 0.07870934295654297, 0.07899177551269532, 0.08013209533691407, 0.07998464202880859, 0.07977574157714844, 0.07988428497314454, 0.07985501098632812, 0.08038623809814453, 0.0801263656616211, 0.0800987548828125, 0.07998345947265625, 0.07972249603271485, 0.07920816040039062, 0.07894745635986328, 0.07899017333984375, 0.08887686157226563, 0.07999139404296875, 0.07930390167236329, 0.07892221069335938, 0.07974944305419922, 0.0797318115234375, 0.07962306976318359, 0.07917542266845704, 0.07874483489990235, 0.07860530853271484, 0.07864934539794922, 0.07860428619384766, 0.07862067413330077, 0.07862271881103515, 0.07863091278076172, 0.07876573181152344, 0.078595458984375, 0.07905699157714843, 0.07896358489990235, 0.0805191650390625, 0.07994300842285157, 0.07983990478515625, 0.080174560546875, 0.07955213165283204, 0.0790127716064453, 0.07863081359863282, 0.07882931518554688, 0.0787470703125, 0.07859839630126954, 0.07869849395751953, 0.07873299407958985, 0.0786626205444336, 0.07863622283935547, 0.07867801666259766, 0.07863986968994141, 0.08003132629394531, 0.07981251525878906, 0.08019344329833984, 0.079891357421875, 0.079844482421875, 0.08014911651611328, 0.07955661010742188, 0.07918182373046875, 0.07897222137451172, 0.07880496215820312, 0.07878630065917969, 0.0787547836303711, 0.07879843139648438, 0.07868428802490235, 0.07876841735839844, 0.07877632141113282, 0.07883980560302735, 0.07873062133789062, 0.08023654174804687, 0.08003238677978515, 0.07999443054199219, 0.07988796997070312, 0.08048521423339844, 0.08039536285400391, 0.0801596450805664, 0.08036863708496093, 0.08015564727783203, 0.08040038299560547, 0.09063452911376953, 0.07986380767822265, 0.07927321624755859, 0.07869721221923828, 0.07835004425048828, 0.07838902282714844, 0.07834243011474609, 0.07978211212158202, 0.07994572448730469, 0.07934566497802735, 0.07882342529296875, 0.07841996765136719, 0.07849993896484375, 0.07846083068847656, 0.07847526550292969, 0.07860559844970703, 0.0784491195678711, 0.078489501953125, 0.07851862335205079, 0.07851773071289063, 0.08034905242919922, 0.08041948699951172, 0.07984931182861328, 0.07963785552978515, 0.07920706939697265, 0.0789133758544922, 0.07861074829101562, 0.07864524841308594, 0.07872512054443359, 0.07869235229492187, 0.07886841583251954, 0.07880300903320313, 0.07870259094238281, 0.07869644927978516, 0.0787946548461914, 0.0801435546875, 0.07989254760742187, 0.0798948516845703, 0.07984121704101563, 0.08035794830322265, 0.08044547271728515, 0.08070111846923828, 0.07967571258544921, 0.0794007339477539, 0.07891580963134766, 0.07896377563476563, 0.07885715484619141, 0.07894598388671875, 0.07889337921142578, 0.0788479995727539, 0.07882956695556641, 0.07898521423339844, 0.07880499267578125, 0.07884595489501953, 0.07956995391845703, 0.0801924819946289, 0.0795525131225586, 0.07899712371826172, 0.08031078338623047, 0.08029913330078126, 0.08005299377441406, 0.08004959869384766, 0.08051679992675781, 0.09011328125, 0.07984819030761718, 0.07912242889404297, 0.07869420623779297, 0.07848329925537109, 0.07842976379394531, 0.07838185882568359, 0.0784382095336914, 0.07839762878417969, 0.0797286376953125, 0.07998873901367187, 0.07936819458007813, 0.07890060424804687, 0.07848818969726562, 0.07845791625976563, 0.0786337890625, 0.07854489898681641, 0.0794144287109375, 0.07879305267333984, 0.07868812561035156, 0.07858662414550781, 0.07861862182617188, 0.08001068878173828, 0.08070368194580078, 0.08021151733398438, 0.08011859130859375, 0.07950646209716797, 0.07914514923095703, 0.07858672332763672, 0.07859808349609375, 0.07870412445068359, 0.07865097808837891, 0.07854582214355468, 0.078671875, 0.07858809661865235, 0.07870240020751954, 0.07866162872314453, 0.07997235107421875, 0.07978803253173829, 0.08021965026855468, 0.08022908782958985, 0.07994956970214843, 0.08026860809326172, 0.08013804626464843, 0.07982905578613281, 0.07945273590087891, 0.07894802856445313, 0.07876668548583984, 0.07875344085693359, 0.0788729248046875, 0.07931878662109375, 0.07889945220947266, 0.07877164459228515, 0.07875625610351562, 0.07872118377685547, 0.0788736343383789, 0.08024163055419922, 0.0799170913696289, 0.08035734558105469, 0.08007421112060546, 0.08037225341796875, 0.08041180419921876, 0.08047452545166016, 0.09111939239501952, 0.07994989013671874, 0.07913881683349609, 0.07879286193847657, 0.07844863891601563, 0.07841283416748047, 0.07857660675048828, 0.07846707153320312, 0.07844044494628906, 0.0799662094116211, 0.07999282836914062, 0.0800890884399414, 0.07938662719726562, 0.07899545288085938, 0.07881932830810547, 0.07856694030761718, 0.0784634552001953, 0.0784691162109375, 0.0785401611328125, 0.07856739044189454, 0.07849644470214844, 0.07856329345703125, 0.07878790283203126, 0.0810414047241211, 0.08007341003417968, 0.08010128021240234, 0.08012604522705079, 0.08029936218261718, 0.07971807861328126, 0.07935279846191406, 0.07889715576171875, 0.07875481414794921, 0.07885446166992187, 0.07874211120605469, 0.07873545837402343, 0.07894563293457031, 0.07873113250732422, 0.07894429016113282, 0.07882418823242188, 0.07884210968017578, 0.08004787445068359, 0.07980265808105469, 0.0803305892944336, 0.08047833251953125, 0.079863037109375, 0.0804500503540039, 0.08039183807373047, 0.08047357177734375, 0.08040742492675781, 0.08044748687744141, 0.08037920379638672, 0.08046867370605469, 0.07967708587646484, 0.0792824935913086, 0.07891158294677734, 0.07903340911865234, 0.07890351867675781, 0.07892649841308594, 0.07909375762939454, 0.0792063980102539, 0.07898521423339844, 0.07930441284179687, 0.08014262390136719, 0.08986156463623046, 0.07989282989501953, 0.07913689422607421, 0.07877027130126953, 0.07843807983398438, 0.07988166046142578, 0.07996710205078125, 0.07930675506591797, 0.07897612762451171, 0.07849049377441407, 0.07851622772216797, 0.07850188446044921, 0.07849504089355469, 0.07851241302490235, 0.07949967956542969, 0.07939437103271485, 0.07897747039794922, 0.078561279296875, 0.0785244140625, 0.08048639678955079, 0.08003504180908202, 0.08015132904052734, 0.08002732849121094, 0.07973020935058593, 0.07922870635986329, 0.07886540985107422, 0.07858790588378907, 0.07865570831298828, 0.07857891082763672, 0.07861711883544922, 0.07871014404296875, 0.07868019104003907, 0.07857584381103516, 0.07932550048828126, 0.0799109115600586, 0.08026451110839844, 0.08022086334228516, 0.07981260681152344, 0.08023206329345703, 0.07974118041992187, 0.0801744613647461, 0.07981878662109375, 0.0802529296875, 0.07964540863037109, 0.07915213012695313, 0.07877645111083985, 0.07876681518554687, 0.07891983795166016, 0.07879679870605469, 0.07877222442626954, 0.07909580993652343, 0.08005427551269531, 0.07939625549316406, 0.07885884857177734, 0.08019792175292968, 0.07960137939453125, 0.07902617645263672, 0.0800847396850586, 0.07948313903808593, 0.08026322937011719, 0.07976748657226562, 0.08043673706054688, 0.08037229156494141, 0.09071846771240234, 0.07982259368896484, 0.0790887680053711, 0.07876287841796875, 0.07840089416503906, 0.0784369888305664, 0.07844633483886719, 0.07841391754150391, 0.07986192321777344, 0.07931903839111328, 0.08010880279541016, 0.08016713714599609, 0.08038678741455078, 0.07978726196289063, 0.07942816162109376, 0.07883904266357422, 0.07847193908691406, 0.07852607727050781, 0.0785186538696289, 0.07851731109619141, 0.078510498046875, 0.07856169891357422, 0.07913279724121093, 0.07991820526123047, 0.0792072982788086, 0.07994163513183594, 0.08030617523193359, 0.07988428497314454, 0.08013737487792968, 0.08019235229492187, 0.08017257690429687, 0.08016313934326172, 0.08011177825927734, 0.07958252716064453, 0.0791169891357422, 0.07874908447265624, 0.07871887969970703, 0.07870925140380859, 0.07871711730957032, 0.07879206085205079, 0.0786909408569336, 0.07891763305664062, 0.0794808349609375, 0.08000921630859376, 0.07933939361572266, 0.08006396484375, 0.07962064361572266, 0.0802991714477539, 0.07979106903076172, 0.08033702087402343, 0.07979145812988281, 0.08033539581298828, 0.08020374298095703, 0.0802017593383789, 0.07985356903076171, 0.07946585845947265, 0.079370849609375, 0.07922892761230468, 0.07900367736816406, 0.0790519027709961, 0.07886502075195312, 0.07897727966308593, 0.07915110778808594, 0.09015119934082032, 0.07993350219726562, 0.07925084686279296, 0.078864990234375, 0.07857766723632813, 0.07856944274902344, 0.08001513671875, 0.08001331329345703, 0.08002531433105468, 0.07961039733886718, 0.07927603149414063, 0.07930828857421875, 0.07893971252441406, 0.07856192016601563, 0.07897939300537109, 0.07896268463134766, 0.07913040161132813, 0.07875977325439452, 0.07897535705566407, 0.08004739379882812, 0.07930448150634765, 0.07860896301269531, 0.07878284454345703, 0.0801107177734375, 0.08015296173095703, 0.08023619079589844, 0.08014851379394532, 0.08048284912109376, 0.08013648223876953, 0.07956047821044922, 0.07905248260498046, 0.0787113265991211, 0.078635009765625, 0.07866162872314453, 0.07870230102539062, 0.07875373077392578, 0.07869251251220703, 0.07911007690429687, 0.0799562225341797, 0.07944758605957031, 0.07935842895507812, 0.07996825408935547, 0.08019149017333985, 0.08029747009277344, 0.07944064331054687, 0.08033049774169922, 0.07984515380859375, 0.08029148864746094, 0.08022278594970703, 0.0802870101928711, 0.08023503875732421, 0.08045382690429688, 0.07962937927246094, 0.08010438537597656, 0.07952166748046875, 0.07912406158447266, 0.07875862121582031, 0.07888377380371094, 0.07887884521484376, 0.07894841766357422, 0.07879955291748048, 0.07926911926269531, 0.08023872375488281, 0.09080841827392579, 0.07996867370605469, 0.07919356536865234, 0.07875542449951171, 0.0783135986328125, 0.07842486572265625, 0.07849152374267578, 0.07983641815185546, 0.07939574432373046, 0.0791021728515625, 0.0790771484375, 0.07882077026367187, 0.07853260803222656, 0.07977225494384765, 0.07940096282958985, 0.07876608276367188, 0.07846438598632813, 0.07850982666015625, 0.07853555297851562, 0.07853282928466797, 0.07996931457519531, 0.08003616333007812, 0.08004243469238281, 0.08009318542480469, 0.08004402923583985, 0.08048025512695313, 0.08044338989257813, 0.0799551010131836, 0.07931581115722657, 0.07929446411132812, 0.07892787170410157, 0.07865097808837891, 0.07861084747314454, 0.07863481903076172, 0.07865904235839843, 0.07901618957519531, 0.0797250213623047, 0.07983718109130859, 0.07964208221435547, 0.0794170913696289, 0.07907331085205078, 0.08023455810546876, 0.07972223663330077, 0.08030012512207031, 0.07954927825927735, 0.0800416030883789, 0.08025885009765625, 0.08034159851074218, 0.08020697784423828, 0.0803193588256836, 0.08030963134765624, 0.07996463775634766, 0.07943183898925782, 0.07918592071533204, 0.07880089569091797, 0.07885004425048828, 0.0787927017211914, 0.07883161926269532, 0.07913676452636718, 0.08006861114501954, 0.07946240234375, 0.07953817749023437, 0.08004608154296874]",tokens/s,12.583786458586493,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,2580.647936,11834.097664,0.0,11431.575552,10953.091072,s,1,21.863673828125,21.863673828125,0.0,21.863673828125,21.863673828125,21.863673828125,21.863673828125,[21.863673828125],,kWh,0.0004197766955375035,4.629725061949096e-05,0.00013836149957799707,0.0006044354457349915,,MB,1957.31456,12729.581568,0.0,12314.476544,11624.261632,s,10,17.844894409179688,1.7844894409179688,0.008686781083049825,1.7873071899414064,1.7925514282226562,1.7933415100097656,1.793973575439453,"[1.7643204345703125, 1.7763023681640624, 1.7793931884765626, 1.7822445068359376, 1.7870601806640625, 1.78755419921875, 1.790576416015625, 1.7923758544921875, 1.7909356689453124, 1.794131591796875]",tokens/s,143.45839999384344,kWh,5.207357764875003e-05,5.743337235617866e-06,3.460780546400074e-05,9.242472034836865e-05,tokens/kWh,2769821.7428744272,MB,1961.422848,12733.775872,0.0,12316.573696,11624.264192,s,10,88.9117109375,8.89117109375,0.01912654409121755,8.89410302734375,8.91485966796875,8.916952783203124,8.918627275390625,"[8.8566337890625, 8.8955859375, 8.882083984375, 8.865115234375, 8.8926201171875, 8.8809990234375, 8.9020859375, 8.91439453125, 8.903146484375, 8.9190458984375]",tokens/s,7.085680765302729,kWh,0.0002605127826874995,2.8736561883403538e-05,0.00017308791624799923,0.00046233726081890234,tokens/kWh,136264.16328290946,,s,630,88.90784005737304,0.14112355564662388,0.0016145138068442417,0.14106793212890625,0.1420726287841797,0.14257409591674805,0.15113886184692382,"[0.15267634582519532, 0.13926602172851563, 0.1384405059814453, 0.1391425018310547, 0.1383514862060547, 0.138387451171875, 0.1410867156982422, 0.14145126342773437, 0.13906527709960936, 0.13879257202148437, 0.13888966369628905, 0.1393410186767578, 0.1392109375, 0.141210205078125, 0.14165606689453125, 0.14073770141601563, 0.1400202178955078, 0.14018351745605467, 0.14062214660644531, 0.14080825805664063, 0.14033920288085938, 0.1403116455078125, 0.14068576049804687, 0.1400564727783203, 0.13972866821289062, 0.13979849243164064, 0.13988124084472656, 0.1398643798828125, 0.13933941650390624, 0.13959788513183594, 0.14060304260253906, 0.13919874572753907, 0.13977944946289061, 0.1400101776123047, 0.1391815643310547, 0.1400491180419922, 0.14006207275390625, 0.14130192565917968, 0.14028416442871094, 0.1402224578857422, 0.14105381774902342, 0.14042739868164061, 0.1407344665527344, 0.14169448852539063, 0.14117730712890625, 0.14123353576660155, 0.1405704345703125, 0.1414869079589844, 0.1406519012451172, 0.14105459594726563, 0.14148403930664064, 0.14121302795410157, 0.1413589782714844, 0.14121673583984376, 0.1411131591796875, 0.14079542541503906, 0.14115068054199217, 0.14180752563476562, 0.1417729034423828, 0.1408546905517578, 0.14123068237304687, 0.1411894073486328, 0.1413089599609375, 0.15200703430175783, 0.1386376953125, 0.1383096923828125, 0.13835189819335938, 0.1384897003173828, 0.13913792419433593, 0.14243621826171876, 0.1425041961669922, 0.13962637329101563, 0.13872230529785157, 0.139438720703125, 0.14010806274414062, 0.1400587158203125, 0.14208985900878907, 0.1436995849609375, 0.14062258911132813, 0.13991935729980468, 0.1408000030517578, 0.14037564086914062, 0.14062042236328126, 0.14150787353515626, 0.14269900512695313, 0.14147532653808595, 0.1409971160888672, 0.141127685546875, 0.14111756896972658, 0.14114186096191406, 0.14158387756347657, 0.14269290161132814, 0.1410655975341797, 0.14089894104003906, 0.14127923583984375, 0.14149221801757814, 0.14110310363769532, 0.14171955871582032, 0.14186521911621094, 0.1414490203857422, 0.14012716674804687, 0.1411939239501953, 0.14145901489257812, 0.14143154907226563, 0.14166220092773438, 0.14165196228027344, 0.14098431396484376, 0.14077938842773438, 0.14114828491210937, 0.14196524047851564, 0.14086764526367188, 0.14165005493164062, 0.14101901245117188, 0.14097987365722656, 0.14108831787109374, 0.14082127380371093, 0.14244805908203126, 0.14070751953125, 0.14185562133789062, 0.1413684539794922, 0.14112240600585937, 0.14137551879882812, 0.14164787292480469, 0.14167266845703125, 0.14181132507324218, 0.1411480712890625, 0.1510592041015625, 0.13972685241699218, 0.13813760375976564, 0.1396841278076172, 0.13836857604980468, 0.13941094970703125, 0.14336236572265626, 0.14268861389160156, 0.13990847778320312, 0.13970700073242187, 0.13943994140625, 0.13824751281738282, 0.1397708740234375, 0.14179927062988282, 0.14222335815429688, 0.14016278076171876, 0.13989683532714844, 0.13979440307617189, 0.1391576690673828, 0.13980278015136718, 0.1412095947265625, 0.1420240936279297, 0.14019235229492188, 0.1409551696777344, 0.13972323608398438, 0.1397596435546875, 0.14029350280761718, 0.1413038787841797, 0.14202470397949218, 0.14109036254882812, 0.1418311309814453, 0.13987635803222656, 0.14052166748046874, 0.14062367248535157, 0.14135501098632813, 0.14201446533203124, 0.14151884460449218, 0.14136233520507813, 0.14059344482421876, 0.1413617248535156, 0.14095974731445313, 0.1414512939453125, 0.14201846313476563, 0.14114822387695314, 0.14142874145507814, 0.1412046661376953, 0.14148486328125, 0.14150860595703124, 0.14083482360839844, 0.1414058837890625, 0.14153555297851564, 0.14104762268066406, 0.140828857421875, 0.14163352966308593, 0.140980224609375, 0.14086099243164063, 0.14096018981933595, 0.14153523254394532, 0.14158438110351562, 0.14110032653808594, 0.14182009887695313, 0.14097053527832032, 0.141485595703125, 0.15127285766601561, 0.14028448486328124, 0.14037738037109376, 0.14002000427246095, 0.13847946166992187, 0.13950941467285155, 0.14215263366699218, 0.14189138793945313, 0.13993157958984376, 0.14028399658203125, 0.13993302917480469, 0.13912109375, 0.13955314636230468, 0.14056550598144532, 0.14076937866210937, 0.1408152618408203, 0.14042626953125, 0.13963363647460938, 0.14018095397949218, 0.13949958801269532, 0.13989039611816406, 0.14065330505371093, 0.14057676696777344, 0.1415679931640625, 0.14010333251953125, 0.14023507690429687, 0.13985337829589845, 0.14074668884277344, 0.14010832214355468, 0.14187271118164063, 0.14111347961425783, 0.14090579223632813, 0.13995928955078124, 0.1401297607421875, 0.14095759582519532, 0.14023948669433595, 0.14166656494140625, 0.14126669311523438, 0.14108876037597656, 0.1400478973388672, 0.14019017028808595, 0.1410084228515625, 0.1407918701171875, 0.14066319274902345, 0.14036376953125, 0.14042726135253905, 0.1403243865966797, 0.140327392578125, 0.14193203735351562, 0.14030284118652345, 0.14124021911621093, 0.14035789489746095, 0.14119839477539062, 0.14040579223632813, 0.14045977783203126, 0.14052146911621094, 0.14082608032226562, 0.14145590209960937, 0.1404783935546875, 0.14209426879882814, 0.14055027770996092, 0.1406402587890625, 0.1404884796142578, 0.15275010681152343, 0.1395462646484375, 0.13856393432617187, 0.13942105102539062, 0.13869529724121094, 0.13990531921386717, 0.14379212951660156, 0.14265519714355468, 0.14062725830078124, 0.13897593688964843, 0.13980876159667968, 0.13957734680175782, 0.14003814697265626, 0.1432227783203125, 0.14209552001953124, 0.14060812377929688, 0.1396308135986328, 0.1406097869873047, 0.13969996643066407, 0.14068531799316406, 0.14271078491210937, 0.1426657257080078, 0.14014463806152344, 0.14009138488769532, 0.14136524963378908, 0.1407178192138672, 0.1414228515625, 0.14272099304199218, 0.14172163391113282, 0.1402040252685547, 0.14085501098632813, 0.14070538330078125, 0.14094563293457033, 0.14132627868652345, 0.14203138732910156, 0.14193174743652343, 0.14069203186035156, 0.14145050048828126, 0.1410748748779297, 0.14093728637695313, 0.14101461791992187, 0.14154226684570312, 0.1418936309814453, 0.1410966339111328, 0.14091091918945312, 0.1413038024902344, 0.14100210571289062, 0.14109759521484375, 0.14120774841308595, 0.14146949768066405, 0.14080921936035157, 0.14100572204589842, 0.14064035034179687, 0.1405535430908203, 0.14047917175292968, 0.14201036071777343, 0.14135848999023437, 0.14054159545898437, 0.14102934265136718, 0.1412162628173828, 0.14044937133789062, 0.14138029479980468, 0.14159686279296874, 0.1519656982421875, 0.13978419494628908, 0.1395437774658203, 0.13925672912597656, 0.13976351928710937, 0.13950572204589845, 0.14341500854492187, 0.14261482238769532, 0.14030438232421874, 0.1399598388671875, 0.138672607421875, 0.14002073669433593, 0.13960646057128906, 0.14143341064453124, 0.14232371520996093, 0.14032182312011718, 0.14016610717773437, 0.13976995849609375, 0.14015049743652344, 0.1400198974609375, 0.14219264221191405, 0.14136524963378908, 0.14082424926757814, 0.14076138305664063, 0.14003817749023437, 0.14085877990722656, 0.14018179321289062, 0.14093740844726563, 0.14155584716796876, 0.1403999938964844, 0.14170089721679688, 0.1402010498046875, 0.14093305969238282, 0.14033900451660156, 0.1403514862060547, 0.14202835083007812, 0.1401921844482422, 0.14168678283691405, 0.1402777557373047, 0.14111744689941405, 0.14042930603027343, 0.14026045227050782, 0.14114288330078126, 0.14030239868164063, 0.14213063049316407, 0.14033555603027345, 0.14202838134765625, 0.14068585205078124, 0.14030029296875, 0.14044729614257812, 0.14116000366210937, 0.14213618469238282, 0.1413017578125, 0.1410846710205078, 0.14052557373046876, 0.14110076904296875, 0.14040293884277344, 0.14129872131347657, 0.14207203674316407, 0.14154415893554687, 0.14124038696289062, 0.14087986755371093, 0.14124803161621094, 0.1510955810546875, 0.14130125427246093, 0.1402334442138672, 0.13981459045410155, 0.14026800537109374, 0.1401750030517578, 0.14144149780273438, 0.14238050842285158, 0.140231201171875, 0.1405911102294922, 0.14052934265136718, 0.14015519714355468, 0.14072422790527345, 0.1403187255859375, 0.14165177917480468, 0.1407607727050781, 0.14093772888183595, 0.14093904113769531, 0.14040701293945312, 0.14023680114746093, 0.14062156677246093, 0.1417870330810547, 0.14101695251464844, 0.14096022033691405, 0.14086962890625, 0.14151884460449218, 0.14003114318847656, 0.14178314208984374, 0.14171983337402344, 0.14104591369628905, 0.14106144714355467, 0.1402767333984375, 0.14145468139648437, 0.14098908996582032, 0.1411249542236328, 0.14197946166992187, 0.1420826873779297, 0.14282978820800782, 0.141623291015625, 0.14208409118652343, 0.14168873596191406, 0.1418255615234375, 0.14168121337890624, 0.1420001220703125, 0.1409490203857422, 0.1414655303955078, 0.14159516906738281, 0.1410847930908203, 0.14110678100585938, 0.14097161865234376, 0.14080685424804687, 0.1421835479736328, 0.14074528503417968, 0.14126486206054686, 0.14040713500976562, 0.14110105895996095, 0.14057061767578125, 0.14169711303710938, 0.14208937072753905, 0.14137216186523438, 0.14201446533203124, 0.14058303833007812, 0.14143037414550783, 0.15105746459960936, 0.14096163940429687, 0.14051942443847656, 0.14025526428222657, 0.139791748046875, 0.13976751708984375, 0.14217926025390626, 0.14354220581054689, 0.14194419860839844, 0.14245552062988281, 0.14134262084960938, 0.14144125366210938, 0.14118217468261718, 0.14125961303710938, 0.14268905639648438, 0.14054092407226562, 0.1419730224609375, 0.14127705383300782, 0.1413002624511719, 0.14054147338867187, 0.1415850830078125, 0.14176463317871094, 0.14180735778808592, 0.14093107604980468, 0.14105599975585936, 0.14126693725585937, 0.1405482940673828, 0.14152828979492188, 0.14134947204589843, 0.1418212127685547, 0.14098416137695313, 0.1405716552734375, 0.14051519775390625, 0.1414982147216797, 0.14134288024902344, 0.14112355041503907, 0.14119740295410158, 0.14107026672363282, 0.14119322204589843, 0.14086553955078124, 0.14072550964355468, 0.14081491088867187, 0.14187315368652345, 0.14111888122558594, 0.1413148498535156, 0.14125260925292968, 0.14116864013671876, 0.14111033630371095, 0.1406570281982422, 0.14160540771484376, 0.14107241821289063, 0.14144461059570312, 0.14146202087402343, 0.1419366455078125, 0.14204042053222657, 0.14034786987304687, 0.14203514099121095, 0.1412704620361328, 0.14131619262695314, 0.14169564819335936, 0.1421207733154297, 0.14261453247070313, 0.14298521423339844, 0.15115653991699218, 0.14031462097167968, 0.13983334350585938, 0.13990716552734375, 0.1398841552734375, 0.1394977569580078, 0.14189952087402344, 0.14252467346191405, 0.14044563293457032, 0.13995196533203125, 0.1407674560546875, 0.13984768676757814, 0.13971376037597658, 0.14200230407714845, 0.1420044860839844, 0.14023469543457032, 0.14080662536621094, 0.1408892822265625, 0.13996025085449218, 0.1408009033203125, 0.14102732849121094, 0.14188954162597656, 0.14042112731933593, 0.1416800994873047, 0.14002024841308594, 0.14089010620117187, 0.14084915161132813, 0.14096588134765625, 0.14122393798828126, 0.140337158203125, 0.14205264282226562, 0.1407884521484375, 0.14132223510742187, 0.14094146728515625, 0.14130592346191406, 0.14096563720703126, 0.14099888610839845, 0.141966552734375, 0.1409641571044922, 0.1408632049560547, 0.14116220092773438, 0.14184515380859375, 0.1411176300048828, 0.141154296875, 0.14215577697753906, 0.1419594268798828, 0.14119480895996095, 0.1412425537109375, 0.14169847106933595, 0.14236936950683593, 0.14249696350097657, 0.14277708435058595, 0.14238502502441405, 0.14150796508789062, 0.14141477966308594, 0.14204486083984375, 0.14121037292480468, 0.1411782684326172, 0.14166896057128905, 0.14166610717773437, 0.1413961639404297, 0.1410723876953125, 0.14214256286621094, 0.1512334442138672, 0.14113587951660156, 0.14107443237304687, 0.14171955871582032, 0.1411661376953125, 0.1412283477783203, 0.14192857360839845, 0.14289923095703125, 0.14101295471191405, 0.14105001831054687, 0.14101084899902344, 0.14112147521972657, 0.14112562561035155, 0.14182194519042968, 0.14171888732910157, 0.14146217346191406, 0.14084815979003906, 0.14114057922363282, 0.1411114501953125, 0.14074716186523437, 0.1405581512451172, 0.141889404296875, 0.14156588745117188, 0.14094697570800782, 0.1411561279296875, 0.14093196105957032, 0.14038038635253905, 0.14121551513671876, 0.14125782775878906, 0.14187202453613282, 0.14088192749023437, 0.1408570861816406, 0.140197509765625, 0.14024896240234375, 0.14188621520996095, 0.14210867309570313, 0.1411788787841797, 0.14108055114746093, 0.14130998229980468, 0.1403678741455078, 0.14029808044433595, 0.1420638427734375, 0.1419365692138672, 0.1419771270751953, 0.14106057739257813, 0.1419144287109375, 0.14119667053222656, 0.14111068725585937, 0.14159555053710937, 0.14140620422363281, 0.1420779571533203, 0.14193869018554686, 0.14215507507324218, 0.1415683898925781, 0.14290931701660156, 0.14251052856445312, 0.1417502746582031, 0.14263909912109374, 0.14226431274414061, 0.141330078125, 0.14199229431152344, 0.14125392150878907, 0.14123490905761718]",tokens/s,7.085989262515603,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 68.12 MiB is free. Process 79250 has 14.67 GiB memory in use. Of the allocated memory 14.27 GiB is allocated by PyTorch, and 293.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,897.257472,577.69984,0.0,182.452224,179.733504,s,1,7.877572265625,7.877572265625,0.0,7.877572265625,7.877572265625,7.877572265625,7.877572265625,[7.877572265625],,kWh,2.558793690833454e-05,2.8154384288212715e-06,9.265562967999963e-06,3.7668938305155774e-05,,MB,1205.055488,674.168832,0.0,266.338304,224.293376,s,10,0.26332313537597657,0.026332313537597657,0.000610555558624612,0.026153087615966797,0.0267448860168457,0.027288587379455564,0.027723548469543457,"[0.026606048583984375, 0.02783228874206543, 0.026089855194091797, 0.02617750358581543, 0.026577024459838866, 0.02662406349182129, 0.025579135894775392, 0.026088127136230467, 0.026128671646118165, 0.025620416641235353]",tokens/s,9721.895481552714,kWh,7.676794833879956e-07,8.462414545547406e-08,5.081871957008012e-07,1.3604908245442707e-06,tokens/kWh,188167384.43330067,MB,1238.601728,686.751744,0.0,278.921216,224.295936,s,10,11.490330200195315,1.1490330200195316,0.01679524657240238,1.1495841064453125,1.16569306640625,1.1701708740234376,1.1737531201171876,"[1.164697998046875, 1.1567578125, 1.174648681640625, 1.14178076171875, 1.16360595703125, 1.13744384765625, 1.12833935546875, 1.142410400390625, 1.119871826171875, 1.1607735595703126]",tokens/s,54.82871153600887,kWh,3.332297936244537e-05,3.6750928415303726e-06,1.3648851907698941e-05,5.064692411167469e-05,tokens/kWh,1243905.747584734,,s,630,11.485126411438001,0.018230359383234904,0.0005112548961969608,0.018171520233154298,0.01880431652069092,0.018962901496887206,0.019943947334289565,"[0.018439231872558595, 0.01866339111328125, 0.018562015533447267, 0.018786624908447267, 0.01859756851196289, 0.018696384429931642, 0.018680704116821288, 0.018768447875976563, 0.01888889694213867, 0.018825408935546874, 0.01873481559753418, 0.018884767532348634, 0.018837568283081054, 0.018847232818603517, 0.01878041648864746, 0.01879449653625488, 0.01886240005493164, 0.01865465545654297, 0.018614784240722656, 0.01889244842529297, 0.018651552200317383, 0.018642080307006835, 0.018993600845336914, 0.019159839630126952, 0.018760608673095702, 0.01876483154296875, 0.018767551422119142, 0.018751487731933594, 0.01871254348754883, 0.018669120788574217, 0.018997728347778322, 0.01865727996826172, 0.01872307205200195, 0.018747135162353514, 0.018733119964599608, 0.018628864288330077, 0.018670400619506835, 0.018710527420043945, 0.018918272018432616, 0.018571231842041017, 0.018412832260131837, 0.018470848083496094, 0.01838371276855469, 0.018323423385620118, 0.018042560577392577, 0.017882944107055664, 0.01795123291015625, 0.017831808090209962, 0.01785625648498535, 0.017836416244506836, 0.01785206413269043, 0.01781491279602051, 0.01791689682006836, 0.018007167816162108, 0.017929056167602538, 0.018064863204956056, 0.01795129585266113, 0.017974880218505858, 0.017905248641967773, 0.018010751724243164, 0.017913728713989257, 0.017920320510864257, 0.01792854309082031, 0.018087072372436522, 0.018312000274658204, 0.01863680076599121, 0.01884547233581543, 0.01850332832336426, 0.018425567626953125, 0.018295648574829102, 0.0181711368560791, 0.018076416015625, 0.018091552734375, 0.018323392868041993, 0.017973888397216798, 0.019459999084472657, 0.018114303588867186, 0.018006336212158202, 0.018280384063720703, 0.01801366424560547, 0.017959583282470704, 0.0182291202545166, 0.01833087921142578, 0.01811942481994629, 0.01926380729675293, 0.018413248062133788, 0.018160991668701172, 0.01817888069152832, 0.018222944259643555, 0.018433664321899416, 0.018229631423950194, 0.019092832565307617, 0.018766464233398436, 0.018614303588867186, 0.018473024368286132, 0.01861030387878418, 0.01933401679992676, 0.019438623428344726, 0.01968230438232422, 0.018762304306030275, 0.01846681594848633, 0.01818454360961914, 0.018422815322875978, 0.01789411163330078, 0.01792639923095703, 0.018190336227416993, 0.018378751754760742, 0.018219200134277344, 0.01799884796142578, 0.017906496047973633, 0.017922048568725587, 0.017897087097167967, 0.017891359329223634, 0.01800831985473633, 0.018032127380371094, 0.018299615859985352, 0.018300800323486327, 0.018249727249145507, 0.018130048751831055, 0.018365312576293945, 0.018343584060668945, 0.018393407821655272, 0.018468544006347655, 0.018223424911499024, 0.018124832153320312, 0.0180982723236084, 0.018126144409179687, 0.018141824722290038, 0.01901568031311035, 0.018462400436401367, 0.018114879608154298, 0.01816166305541992, 0.01819379234313965, 0.01808563232421875, 0.018104352951049806, 0.018232416152954102, 0.018239231109619142, 0.018585599899291993, 0.018135040283203126, 0.018251840591430663, 0.01834592056274414, 0.018346176147460938, 0.0183621768951416, 0.018786304473876952, 0.019564544677734375, 0.018759679794311524, 0.01868560028076172, 0.018627199172973632, 0.01886012840270996, 0.018640512466430663, 0.01861984062194824, 0.018639423370361327, 0.01872467231750488, 0.018883039474487304, 0.018880224227905272, 0.018796768188476563, 0.018868000030517577, 0.018724863052368163, 0.018819103240966795, 0.019064767837524414, 0.018784223556518556, 0.018669408798217775, 0.018666784286499025, 0.018643903732299804, 0.018905248641967774, 0.019111072540283203, 0.018805440902709962, 0.01882316780090332, 0.018759424209594727, 0.018876384735107422, 0.018817119598388672, 0.018800832748413085, 0.018894847869873048, 0.0187391357421875, 0.01873721694946289, 0.018755584716796874, 0.01872617530822754, 0.01858563232421875, 0.018837535858154297, 0.018987520217895508, 0.018685247421264647, 0.018786815643310546, 0.019511680603027343, 0.01848931121826172, 0.018337791442871093, 0.018595199584960937, 0.018342527389526367, 0.01827052879333496, 0.01834592056274414, 0.017897024154663085, 0.018120319366455077, 0.018475648880004882, 0.0180118408203125, 0.017993791580200195, 0.017995647430419922, 0.018128896713256838, 0.0182728328704834, 0.01823686408996582, 0.01834454345703125, 0.018230464935302733, 0.0181911678314209, 0.018081375122070312, 0.018038335800170897, 0.018010688781738282, 0.018067968368530272, 0.018048704147338866, 0.017905344009399415, 0.017852832794189453, 0.017952768325805665, 0.01782809638977051, 0.017860416412353516, 0.017888639450073244, 0.017857088088989257, 0.017838336944580077, 0.01788003158569336, 0.01787766456604004, 0.017918399810791016, 0.017970783233642578, 0.017916095733642577, 0.01792195129394531, 0.017905664443969727, 0.01795686340332031, 0.017955007553100585, 0.01814713668823242, 0.0182807674407959, 0.018402687072753908, 0.01845689582824707, 0.018367712020874023, 0.018315935134887697, 0.018139263153076172, 0.01802604866027832, 0.017954336166381837, 0.018262943267822265, 0.01809212875366211, 0.018159008026123045, 0.01830963134765625, 0.01858723258972168, 0.018257759094238282, 0.018180992126464842, 0.0182609920501709, 0.018119552612304687, 0.018253631591796875, 0.018354175567626953, 0.018612192153930663, 0.018640512466430663, 0.018073888778686525, 0.0180184326171875, 0.018083200454711915, 0.018041311264038087, 0.018000032424926756, 0.01803264045715332, 0.01838876724243164, 0.01776470375061035, 0.018001920700073244, 0.0177903995513916, 0.01799635124206543, 0.017934335708618163, 0.018020383834838866, 0.017950687408447265, 0.017817024230957032, 0.017854463577270507, 0.01785094451904297, 0.018098175048828127, 0.017834175109863282, 0.017806943893432618, 0.017901952743530274, 0.01787228775024414, 0.017864831924438475, 0.017879648208618162, 0.017778495788574218, 0.0177806396484375, 0.017905664443969727, 0.017751455307006836, 0.017988351821899413, 0.01821673583984375, 0.018307167053222655, 0.01840025520324707, 0.01846540832519531, 0.018581855773925782, 0.01848320007324219, 0.018308095932006836, 0.018222272872924803, 0.01930441665649414, 0.020285343170166014, 0.018415231704711914, 0.018433664321899416, 0.018299455642700194, 0.01858572769165039, 0.018651264190673828, 0.01947225570678711, 0.018766048431396485, 0.01884752082824707, 0.01882931137084961, 0.0188538875579834, 0.018677759170532226, 0.018625663757324218, 0.018721664428710937, 0.018924768447875977, 0.018773792266845703, 0.018637823104858398, 0.0186943359375, 0.018739007949829103, 0.01884160041809082, 0.01868185615539551, 0.019314176559448244, 0.020423168182373046, 0.018675167083740233, 0.018636863708496095, 0.01875119972229004, 0.018706880569458007, 0.01865532875061035, 0.018790624618530274, 0.01884172821044922, 0.0187674560546875, 0.018928192138671876, 0.018383007049560546, 0.01911180877685547, 0.0189833927154541, 0.018937856674194335, 0.01876291275024414, 0.018572128295898438, 0.01861222457885742, 0.018558143615722656, 0.01867804718017578, 0.018591327667236326, 0.018519296646118164, 0.018783935546875, 0.018552671432495116, 0.018382944107055665, 0.018494592666625977, 0.01835523223876953, 0.018136192321777343, 0.01818448066711426, 0.01776255989074707, 0.017764671325683594, 0.017987039566040038, 0.01781920051574707, 0.017826719284057616, 0.017722496032714842, 0.018014528274536132, 0.018021184921264647, 0.01789107131958008, 0.01817190361022949, 0.017748064041137695, 0.017639328002929687, 0.017708703994750975, 0.017740224838256834, 0.01771244812011719, 0.017648416519165037, 0.01762006378173828, 0.01767193603515625, 0.01765475273132324, 0.017624576568603514, 0.01774473571777344, 0.01795552062988281, 0.01800217628479004, 0.01906537628173828, 0.01839023971557617, 0.018117599487304688, 0.01790979194641113, 0.01779836845397949, 0.017554176330566405, 0.017498111724853514, 0.017541120529174805, 0.01750204849243164, 0.017576095581054687, 0.017473407745361328, 0.017583360671997072, 0.017730432510375975, 0.017459199905395507, 0.01755673599243164, 0.017492128372192384, 0.017465408325195313, 0.017443679809570314, 0.017476959228515623, 0.01744316864013672, 0.01854182434082031, 0.02026380729675293, 0.01785241508483887, 0.017913856506347657, 0.01781171226501465, 0.017888256072998047, 0.0178920955657959, 0.018091936111450196, 0.018077152252197266, 0.01778879928588867, 0.017787647247314454, 0.017837087631225587, 0.017953760147094728, 0.01787654495239258, 0.017920480728149415, 0.017924064636230468, 0.017985919952392578, 0.018091039657592775, 0.018255552291870116, 0.01819139289855957, 0.018390815734863283, 0.018151679992675782, 0.018032480239868164, 0.017912128448486327, 0.017702592849731445, 0.017741823196411134, 0.017956159591674806, 0.01765635108947754, 0.01760892868041992, 0.01790355110168457, 0.017641471862792968, 0.017598207473754884, 0.017674495697021484, 0.017769887924194337, 0.01776291275024414, 0.01880419158935547, 0.017959455490112304, 0.017896575927734373, 0.01782054328918457, 0.017795072555541993, 0.017899168014526366, 0.01798793601989746, 0.017823808670043944, 0.017712736129760744, 0.017670495986938477, 0.01768684768676758, 0.017811199188232422, 0.017799104690551758, 0.017862655639648437, 0.017958303451538087, 0.017893503189086914, 0.01786419105529785, 0.01778278350830078, 0.017738719940185547, 0.017628576278686522, 0.017582687377929687, 0.017566848754882812, 0.01768886375427246, 0.01773833656311035, 0.017812896728515625, 0.017921760559082033, 0.018150272369384764, 0.018296640396118165, 0.01841119956970215, 0.01855539131164551, 0.01799577522277832, 0.018405376434326173, 0.018394784927368166, 0.019083295822143555, 0.018364736557006836, 0.018322751998901366, 0.018330144882202148, 0.018512224197387694, 0.01842355155944824, 0.018923583984375, 0.01856208038330078, 0.018430944442749023, 0.018400543212890624, 0.01830790328979492, 0.01833683204650879, 0.018536895751953126, 0.01825833511352539, 0.018232608795166017, 0.01853308868408203, 0.018335039138793946, 0.01840358352661133, 0.018372671127319336, 0.01834432029724121, 0.018550048828125, 0.019481311798095702, 0.018677312850952147, 0.01840787124633789, 0.018368703842163086, 0.0183438720703125, 0.018482847213745116, 0.018645248413085937, 0.01839321517944336, 0.01832943916320801, 0.018191423416137695, 0.01813804817199707, 0.01804902458190918, 0.01799100875854492, 0.01777110481262207, 0.017710847854614256, 0.017543807983398437, 0.017491552352905275, 0.017647712707519532, 0.017475584030151366, 0.017506048202514647, 0.017506111145019532, 0.01751215934753418, 0.01749068832397461, 0.01767523193359375, 0.017627967834472656, 0.017670431137084962, 0.018237344741821288, 0.01777471923828125, 0.017827423095703124, 0.017889568328857422, 0.017712480545043947, 0.017781408309936523, 0.017743072509765624, 0.017806112289428713, 0.017590240478515626, 0.017645280838012697, 0.017813823699951173, 0.017699935913085937, 0.017898399353027342, 0.01773036766052246, 0.01773664093017578, 0.017696767807006835, 0.01762713623046875, 0.017508352279663086, 0.017661951065063478, 0.017679647445678712, 0.01758451271057129, 0.017727840423583986, 0.017946624755859376, 0.017541120529174805, 0.017841728210449218, 0.01805766487121582, 0.018256927490234377, 0.018207584381103516, 0.020897823333740233, 0.018595935821533204, 0.01809119987487793, 0.017761375427246092, 0.01776406478881836, 0.017780895233154296, 0.01760268783569336, 0.017775583267211913, 0.017532831192016603, 0.017599327087402344, 0.017719615936279298, 0.017599903106689452, 0.01758172798156738, 0.01758019256591797, 0.017506784439086914, 0.017579616546630858, 0.01758835220336914, 0.017493919372558595, 0.01833203125, 0.017549312591552735, 0.017485408782958983, 0.017561792373657226, 0.01763555145263672, 0.017530879974365234, 0.017729536056518554, 0.017982784271240233, 0.017863359451293945, 0.017735551834106446, 0.017756288528442382, 0.01785638427734375, 0.01765830421447754, 0.017761951446533204, 0.01769424057006836, 0.01776896095275879, 0.01765376091003418, 0.017602752685546875, 0.01746073532104492, 0.017531423568725585, 0.017544992446899416, 0.017481727600097655, 0.01754681587219238, 0.017637760162353515, 0.01797283172607422, 0.01764352035522461, 0.017650400161743164, 0.01763699150085449, 0.017601760864257812, 0.017681312561035157, 0.01739967918395996, 0.01804966354370117, 0.01767203140258789, 0.01787446403503418, 0.017766271591186523, 0.01776291275024414, 0.017822719573974608, 0.01904332733154297, 0.018176416397094726, 0.018220640182495116, 0.017909759521484374, 0.01788703918457031, 0.017811647415161135, 0.01784012794494629, 0.01788083267211914, 0.017852672576904295, 0.018157215118408204, 0.017708511352539064, 0.017875839233398437, 0.017954879760742188, 0.018229183197021485, 0.018266111373901366, 0.018427072525024416, 0.018518783569335937, 0.018513599395751954, 0.018534175872802733, 0.018430496215820314, 0.01847324752807617, 0.018398271560668946, 0.01836310386657715, 0.018354175567626953, 0.018531391143798828, 0.018539072036743164, 0.020104703903198243, 0.02005081558227539, 0.0187108154296875, 0.018590944290161133, 0.018887168884277345, 0.018315263748168945, 0.01843404769897461, 0.018321279525756837, 0.018527551651000975, 0.01856384086608887, 0.01854080009460449, 0.018419519424438476, 0.01838307189941406, 0.01862224006652832, 0.018694143295288086, 0.01859584045410156, 0.018616031646728516, 0.018655584335327147, 0.018589599609375, 0.01870649528503418, 0.018569536209106445, 0.018463903427124024, 0.018489824295043946, 0.01838697624206543, 0.018337791442871093, 0.018305088043212892, 0.018067136764526367, 0.018082048416137694, 0.021626752853393556, 0.019337343215942382]",tokens/s,54.85355384269742,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,1066.61888,1126.039552,0.0,723.51744,703.269888,s,1,8.76661328125,8.76661328125,0.0,8.76661328125,8.76661328125,8.76661328125,8.76661328125,[8.76661328125],,kWh,4.0589966695839995e-05,4.4700162120730205e-06,1.2873621410003366e-05,5.793360431791638e-05,,MB,1490.976768,1339.949056,0.0,924.844032,869.297664,s,10,0.6054632606506347,0.06054632606506347,0.0003514644092670226,0.06047796821594238,0.06111801109313965,0.06116118221282959,0.06119571910858154,"[0.06120435333251953, 0.060482398986816406, 0.060477439880371096, 0.06110841751098633, 0.059965534210205076, 0.060478496551513675, 0.060251678466796875, 0.06066214370727539, 0.06038828659057617, 0.060444511413574216]",tokens/s,4228.167366008315,kWh,1.8557885904274257e-06,2.0465496518506204e-07,1.2278121636835335e-06,3.288255719296021e-06,tokens/kWh,77852825.89117089,MB,1523.539968,1381.892096,0.0,966.787072,869.300224,s,10,22.979126953125,2.2979126953125,0.008628629318359341,2.2974385986328123,2.3069375244140624,2.310122570800781,2.312670607910156,"[2.306229736328125, 2.3133076171875, 2.3055244140625, 2.2969658203125, 2.297903564453125, 2.2969736328125, 2.28595068359375, 2.288580810546875, 2.28625, 2.301440673828125]",tokens/s,27.416185187763382,kWh,6.668819692831996e-05,7.355417430820648e-06,2.6794626920916857e-05,0.00010083824128005746,tokens/kWh,624762.9788091055,,s,630,22.975974033355733,0.036469800052945575,0.00045617310959380965,0.03636752128601074,0.03682037658691406,0.03709775867462158,0.03884510696411133,"[0.03633500671386719, 0.036639392852783205, 0.036566688537597654, 0.03656246566772461, 0.03642806243896484, 0.03660240173339844, 0.036368064880371094, 0.036432193756103515, 0.03661619186401367, 0.036362239837646484, 0.03637801742553711, 0.03660451126098633, 0.03655680084228516, 0.03660591888427735, 0.036433952331542965, 0.03678937530517578, 0.036459007263183595, 0.03639334487915039, 0.03622719955444336, 0.03646780776977539, 0.03650620651245117, 0.0365733757019043, 0.03630390548706055, 0.03640537643432617, 0.036415870666503904, 0.03643360137939453, 0.03643881607055664, 0.03663052749633789, 0.036423553466796876, 0.03661836624145508, 0.03638272094726563, 0.03739033508300781, 0.036659198760986327, 0.03644979095458984, 0.03654054260253906, 0.036518272399902345, 0.0363765754699707, 0.03661619186401367, 0.03885465621948242, 0.03732643127441406, 0.037046623229980466, 0.0365847053527832, 0.0368884162902832, 0.03659695816040039, 0.036693759918212894, 0.03690496063232422, 0.036847614288330076, 0.03644400024414062, 0.036788383483886716, 0.036501758575439455, 0.03665036773681641, 0.03659404754638672, 0.0365335693359375, 0.036455104827880856, 0.03665510559082031, 0.036462593078613284, 0.03670399856567383, 0.0364403190612793, 0.03648102569580078, 0.0365404167175293, 0.03645990371704102, 0.03636697769165039, 0.03659366226196289, 0.0362935676574707, 0.036395008087158204, 0.03736073684692383, 0.03632812881469726, 0.03641350555419922, 0.03733110427856445, 0.036722686767578124, 0.03664633560180664, 0.036865791320800784, 0.03687916946411133, 0.036523136138916015, 0.03692585754394531, 0.03975551986694336, 0.03713087844848633, 0.036462017059326175, 0.03653289413452149, 0.0368353271484375, 0.03643427276611328, 0.03626057434082031, 0.03644655990600586, 0.03639561462402344, 0.03719782257080078, 0.036425537109375, 0.03656108856201172, 0.03681280136108398, 0.0363458251953125, 0.036568382263183596, 0.036451038360595704, 0.036789249420166016, 0.0364400634765625, 0.036582401275634766, 0.036687454223632815, 0.03663123321533203, 0.03672444915771484, 0.037185535430908204, 0.03702710342407227, 0.036649696350097655, 0.03649126434326172, 0.036550655364990234, 0.03681625747680664, 0.03644208145141602, 0.036753952026367186, 0.03675503921508789, 0.0366374397277832, 0.03681155014038086, 0.03648937606811523, 0.03670214462280273, 0.036381599426269534, 0.03749385452270508, 0.03654339218139648, 0.036657150268554685, 0.03636358261108399, 0.03630969619750977, 0.036427295684814454, 0.03784751892089844, 0.03647660827636719, 0.03665478515625, 0.036549247741699216, 0.03631292724609375, 0.03667779159545898, 0.03659571075439453, 0.036550655364990234, 0.03670425415039062, 0.035971969604492185, 0.03639091110229492, 0.036267040252685546, 0.03623011016845703, 0.03622707366943359, 0.03626803207397461, 0.036585472106933595, 0.03631417465209961, 0.03619887924194336, 0.036550689697265625, 0.03617222213745117, 0.03697459030151367, 0.03673907089233398, 0.036206592559814454, 0.03630694580078125, 0.03630694580078125, 0.03640115356445312, 0.03632060623168945, 0.03639961624145508, 0.03641974258422852, 0.036415424346923825, 0.03649728012084961, 0.03640544128417969, 0.03629056167602539, 0.03659724807739258, 0.03710003280639648, 0.036931583404541016, 0.03659724807739258, 0.038488574981689457, 0.03784908676147461, 0.03754547119140625, 0.036716705322265626, 0.03643958282470703, 0.03639993667602539, 0.03658121490478516, 0.03650166320800781, 0.03641548919677735, 0.036587520599365236, 0.036534271240234374, 0.0365219841003418, 0.037285888671875, 0.03652608108520508, 0.03649852752685547, 0.03709619140625, 0.03653817749023437, 0.036413536071777344, 0.03629609680175781, 0.03637241744995117, 0.03630281448364258, 0.036385726928710935, 0.03752755355834961, 0.03690444946289063, 0.03634966278076172, 0.036490016937255856, 0.03660534286499023, 0.03645500946044922, 0.03670336151123047, 0.03640927886962891, 0.03642668914794922, 0.036601856231689454, 0.03658342361450195, 0.03672883224487305, 0.03703180694580078, 0.03690304183959961, 0.036808353424072265, 0.03648966217041016, 0.03690460968017578, 0.036305248260498045, 0.036365825653076174, 0.036552928924560545, 0.03642755126953125, 0.03616953659057617, 0.036364574432373044, 0.03632547378540039, 0.03621823883056641, 0.039564449310302736, 0.03620022583007813, 0.03653222274780273, 0.03657721710205078, 0.037057758331298825, 0.03642211151123047, 0.037257598876953124, 0.03629072189331055, 0.036564319610595704, 0.03644057464599609, 0.036313087463378906, 0.0362927360534668, 0.03629452896118164, 0.036311038970947264, 0.03630080032348633, 0.03617715072631836, 0.03633011245727539, 0.03621286392211914, 0.036329471588134765, 0.03625388717651367, 0.03625894546508789, 0.03619091033935547, 0.036469791412353514, 0.03624854278564453, 0.036437313079833986, 0.0366190071105957, 0.0362184944152832, 0.036182334899902344, 0.03626825714111328, 0.0363579216003418, 0.03637452697753906, 0.036278270721435545, 0.03646003341674805, 0.03673548889160156, 0.03662233734130859, 0.036283519744873045, 0.03638726425170898, 0.036189727783203125, 0.03634262466430664, 0.03644163131713867, 0.03625983810424805, 0.036325920104980466, 0.03633356857299805, 0.036239009857177734, 0.03676156616210938, 0.036297313690185545, 0.036214561462402345, 0.03635532760620117, 0.036354305267333985, 0.03635196685791016, 0.036216575622558596, 0.036400001525878904, 0.03638886260986328, 0.03611340713500977, 0.03616572952270508, 0.036364608764648435, 0.0362665901184082, 0.03623920059204101, 0.03663203048706055, 0.036391422271728514, 0.03657241439819336, 0.03608572769165039, 0.03637676620483398, 0.03637123107910156, 0.03650764846801758, 0.03630284881591797, 0.03620454406738281, 0.03681894302368164, 0.03641548919677735, 0.03634995269775391, 0.03634380722045898, 0.03668582534790039, 0.03626931381225586, 0.036301567077636716, 0.03632128143310547, 0.036312736511230466, 0.03635235214233398, 0.03664486312866211, 0.036203582763671874, 0.03634483337402344, 0.03643929672241211, 0.03611910247802735, 0.036337406158447265, 0.03622895812988281, 0.03615798568725586, 0.036767967224121095, 0.03617769622802734, 0.03650681686401367, 0.03633145523071289, 0.03643695831298828, 0.03630633544921875, 0.036448097229003905, 0.03641206359863281, 0.03636431884765625, 0.0362454719543457, 0.03649945449829101, 0.036362239837646484, 0.03700067138671875, 0.036349536895751954, 0.03668255996704101, 0.036327552795410153, 0.03629830551147461, 0.036285888671875, 0.036477664947509765, 0.03928886413574219, 0.03717113494873047, 0.03641737747192383, 0.03623369598388672, 0.036275199890136715, 0.03636243057250976, 0.036303359985351565, 0.03695014572143555, 0.03633961486816406, 0.03792310333251953, 0.03667763137817383, 0.03674726486206055, 0.03636576080322266, 0.03649388885498047, 0.036270431518554684, 0.03620828628540039, 0.036262977600097654, 0.03629296112060547, 0.03640585708618164, 0.03626959991455078, 0.03618454360961914, 0.03645756912231445, 0.03621958541870117, 0.03619452667236328, 0.03612057495117187, 0.036423553466796876, 0.0366429443359375, 0.03633356857299805, 0.03625686264038086, 0.0362691535949707, 0.036243263244628905, 0.03628265762329102, 0.03617526245117188, 0.03624787139892578, 0.036447265625, 0.036272926330566405, 0.036286048889160157, 0.03804220962524414, 0.03649126434326172, 0.03641753768920898, 0.0363765754699707, 0.036577537536621095, 0.03625958251953125, 0.03622707366943359, 0.0363842887878418, 0.03634128189086914, 0.03628470230102539, 0.036260513305664065, 0.03625078582763672, 0.03658019256591797, 0.036238529205322265, 0.03684844970703125, 0.040130561828613284, 0.03631257629394531, 0.03663257598876953, 0.03653887939453125, 0.036345855712890625, 0.03632774353027344, 0.036474559783935545, 0.03621014404296875, 0.03649385452270508, 0.03671654510498047, 0.03665100860595703, 0.036362239837646484, 0.036286720275878905, 0.0364870719909668, 0.036167518615722656, 0.03633750534057617, 0.036638622283935544, 0.03624534225463867, 0.03618857574462891, 0.036190208435058595, 0.03628227233886719, 0.03657708740234375, 0.03686604690551758, 0.03633961486816406, 0.03621692657470703, 0.0374128646850586, 0.036022209167480466, 0.036085281372070316, 0.03611497497558594, 0.036087841033935544, 0.03614716720581055, 0.035969024658203126, 0.03595206451416016, 0.036141632080078125, 0.03605836868286133, 0.03618057632446289, 0.03706281661987305, 0.03607580947875977, 0.03612847900390625, 0.03625574493408203, 0.03598099136352539, 0.035955009460449217, 0.03627788925170899, 0.036219200134277346, 0.035991455078125, 0.03678019332885742, 0.03611264038085937, 0.036535839080810546, 0.03621027374267578, 0.036358783721923825, 0.03656415939331055, 0.03629129409790039, 0.036329120635986326, 0.03624800109863281, 0.03622012710571289, 0.03653059387207031, 0.036456512451171874, 0.03633935928344727, 0.03645711898803711, 0.03611155319213867, 0.03624787139892578, 0.03642828750610352, 0.036423423767089846, 0.0372262077331543, 0.036477535247802735, 0.036319168090820315, 0.03645644760131836, 0.03616172790527344, 0.03608966445922852, 0.0361058235168457, 0.03622134399414063, 0.036157440185546875, 0.036251007080078126, 0.03618060684204102, 0.03625983810424805, 0.036157440185546875, 0.036222976684570314, 0.036130496978759766, 0.0361267204284668, 0.03611270523071289, 0.036063232421875, 0.0360709114074707, 0.03606784057617188, 0.03602022552490235, 0.036620288848876956, 0.03653836822509766, 0.03642800140380859, 0.036253471374511716, 0.036124671936035156, 0.03642367935180664, 0.03620249557495117, 0.036081790924072266, 0.03626176071166992, 0.036257793426513675, 0.03607551956176758, 0.03627040100097656, 0.03622675323486328, 0.03628646469116211, 0.03879116821289062, 0.03625564956665039, 0.036261695861816406, 0.0369664306640625, 0.036267711639404294, 0.03639932632446289, 0.03603081512451172, 0.03616092681884766, 0.03610275268554688, 0.036208641052246096, 0.03622092819213867, 0.03613267135620117, 0.03607936096191406, 0.03607392120361328, 0.035970046997070314, 0.038972415924072266, 0.03649919891357422, 0.03619251251220703, 0.03621292877197266, 0.03660291290283203, 0.03617174530029297, 0.03625062561035156, 0.036226879119873046, 0.0360079345703125, 0.03625961685180664, 0.03627030563354492, 0.03621683120727539, 0.036144161224365236, 0.03652707290649414, 0.03637004852294922, 0.036208831787109375, 0.035987998962402346, 0.036239009857177734, 0.03613062286376953, 0.03609747314453125, 0.03609174346923828, 0.03609487915039063, 0.0360816650390625, 0.03626377487182617, 0.036316734313964844, 0.03617033767700195, 0.03614656066894531, 0.03605535888671875, 0.0362305908203125, 0.03628854370117188, 0.03618902587890625, 0.03626326370239258, 0.0363711051940918, 0.036155391693115234, 0.03654655838012695, 0.03647283172607422, 0.036413440704345705, 0.03615948867797852, 0.035952640533447267, 0.036508991241455076, 0.03623376083374023, 0.0368559684753418, 0.03624755096435547, 0.03624480056762695, 0.0360843505859375, 0.03635411071777344, 0.03617792129516602, 0.03614096069335938, 0.03594249725341797, 0.036560897827148435, 0.03621231842041016, 0.036171295166015624, 0.036155296325683595, 0.03618300628662109, 0.03612057495117187, 0.03620454406738281, 0.035996929168701175, 0.03611724853515625, 0.036431873321533206, 0.03626150512695313, 0.036022655487060545, 0.03611238479614258, 0.03606703948974609, 0.036317470550537106, 0.03631465530395508, 0.03648067092895508, 0.036297054290771485, 0.03617839813232422, 0.03619392013549805, 0.036469120025634766, 0.036378623962402344, 0.03630284881591797, 0.03691843032836914, 0.03637334442138672, 0.0361959342956543, 0.03627017593383789, 0.03647110366821289, 0.03621478271484375, 0.03615856170654297, 0.03602115249633789, 0.03602022552490235, 0.03596486282348633, 0.03620159912109375, 0.03620451354980469, 0.037067073822021485, 0.03652057647705078, 0.036334911346435544, 0.036241344451904293, 0.03621270370483398, 0.036248382568359376, 0.03626710510253906, 0.03627840042114258, 0.0362567367553711, 0.03626784133911133, 0.03623116683959961, 0.03622092819213867, 0.03687811279296875, 0.03666262435913086, 0.03658793640136719, 0.036362239837646484, 0.03621478271484375, 0.03683327865600586, 0.03939276885986328, 0.03671068954467773, 0.03635756683349609, 0.03642556762695313, 0.03722940826416016, 0.03624524688720703, 0.03640102386474609, 0.03661590576171875, 0.036602336883544924, 0.03661238479614258, 0.03689436721801758, 0.03631958389282226, 0.036296222686767576, 0.03634223937988281, 0.03625983810424805, 0.03649740982055664, 0.036378623962402344, 0.036351200103759765, 0.036389663696289064, 0.03625574493408203, 0.036261886596679685, 0.036588672637939454, 0.03647516632080078, 0.03654921722412109, 0.0361778564453125, 0.03619750213623047, 0.03620249557495117, 0.036207553863525394, 0.03709904098510742, 0.036401630401611325, 0.03627862548828125, 0.036311775207519534, 0.03630771255493164, 0.036456321716308596, 0.036300159454345705, 0.03630995178222656, 0.03652608108520508, 0.03625305557250977, 0.038821727752685546, 0.037077728271484374, 0.03644409561157227, 0.03656307220458985, 0.0362239990234375, 0.036139358520507814, 0.03638748931884766, 0.03643715286254883, 0.0364040641784668, 0.036843135833740236, 0.03647305679321289, 0.03610988616943359, 0.03633321762084961, 0.036641727447509764, 0.03634995269775391, 0.03630668640136719, 0.036458751678466794, 0.03634995269775391, 0.03619839859008789, 0.03640864181518555]",tokens/s,27.419947423573344,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,823.087104,4724.752384,0.0,4322.230272,4218.036736,s,1,14.3262373046875,14.3262373046875,0.0,14.3262373046875,14.3262373046875,14.3262373046875,14.3262373046875,[14.3262373046875],,kWh,0.00019770394714583212,2.180094625431473e-05,6.310727270799482e-05,0.0002826121661081417,,MB,1357.438976,5379.063808,0.0,4963.958784,4656.87808,s,10,9.594007751464844,0.9594007751464844,0.005139990143652389,0.9599226684570312,0.9643399719238281,0.9651019012451172,0.9657114447021484,"[0.9463994140625, 0.9564050903320312, 0.9589148559570313, 0.9579970703125, 0.9609810180664062, 0.9596262817382812, 0.9602190551757812, 0.9634304809570312, 0.964170654296875, 0.9658638305664062]",tokens/s,266.8332219774506,kWh,2.8023868409092177e-05,3.0905487535762713e-06,1.8506378441453883e-05,4.962079560412234e-05,tokens/kWh,5159127.274830159,MB,1394.540544,5381.16096,0.0,4963.958784,4656.88064,s,10,45.261849609375,4.5261849609375,0.002791975721835822,4.526523681640625,4.52946298828125,4.52989189453125,4.53023501953125,"[4.52936767578125, 4.52329296875, 4.527078125, 4.52175732421875, 4.52278125, 4.52758935546875, 4.53032080078125, 4.528779296875, 4.52596923828125, 4.52491357421875]",tokens/s,13.91900696584678,kWh,0.00013233702027132369,1.4597732619324528e-05,8.761098423014653e-05,0.0002345457371207947,tokens/kWh,268604.32755404984,,s,630,45.258734519958466,0.07183926114279127,0.0012926589829579554,0.07162756729125977,0.07194241180419922,0.07232740249633789,0.08140313293457031,"[0.08300605010986328, 0.073238525390625, 0.07239699554443359, 0.07197686767578125, 0.0715181121826172, 0.0713543701171875, 0.07146905517578125, 0.07131302642822265, 0.07144882965087891, 0.07142950439453125, 0.07133875274658204, 0.07149481964111327, 0.0714947509765625, 0.07146380615234375, 0.07150470733642578, 0.07156463623046876, 0.0715261459350586, 0.0715663070678711, 0.0714993896484375, 0.07156159973144531, 0.07152845001220703, 0.0714826889038086, 0.0716025619506836, 0.07162297821044922, 0.07155506896972656, 0.07163075256347656, 0.07160169219970704, 0.07158217620849609, 0.07179654693603515, 0.07164041900634766, 0.07194102478027344, 0.07161859130859374, 0.07192166137695312, 0.07155926513671874, 0.07157955169677735, 0.07151213073730468, 0.07164412689208985, 0.07170275115966797, 0.07161264038085938, 0.07157523345947266, 0.07183241271972657, 0.07170047760009765, 0.07158169555664062, 0.07173519897460938, 0.071780029296875, 0.07171932983398438, 0.072015869140625, 0.07174928283691406, 0.07171311950683594, 0.07170047760009765, 0.072052734375, 0.0720711669921875, 0.07188275146484376, 0.0718888931274414, 0.071729248046875, 0.07176169586181641, 0.07172313690185547, 0.07191961669921874, 0.0719731216430664, 0.07185177612304687, 0.07190322875976562, 0.07189421081542968, 0.07198953247070312, 0.08104780578613281, 0.07304009246826172, 0.07233023834228515, 0.07193638610839843, 0.07124201965332032, 0.07128854370117188, 0.07124031829833985, 0.07128809356689453, 0.071436767578125, 0.07148365020751953, 0.07139532470703125, 0.07136003112792969, 0.07142848205566406, 0.07146067047119141, 0.0714959716796875, 0.07138114929199219, 0.07135772705078125, 0.0714163818359375, 0.07139059448242188, 0.07134272003173828, 0.0722903060913086, 0.07151821136474609, 0.07178800201416016, 0.07147164916992188, 0.07155030059814453, 0.07154895782470704, 0.07171244812011719, 0.07165151977539062, 0.07162051391601562, 0.0718795166015625, 0.07155097961425781, 0.0714997787475586, 0.07161561584472656, 0.07160889434814453, 0.0716016616821289, 0.0715696029663086, 0.07194480133056641, 0.07159750366210937, 0.07155773162841797, 0.07155712127685547, 0.0716488037109375, 0.07171939086914063, 0.07160399627685547, 0.07170480346679688, 0.07159795379638671, 0.07168370819091797, 0.07166393280029297, 0.0716126708984375, 0.07166764831542968, 0.0717127685546875, 0.07169404602050782, 0.07168438720703126, 0.07176112365722656, 0.07174972534179687, 0.07176262664794922, 0.07175532531738281, 0.07162095642089844, 0.07173513793945313, 0.07191340637207032, 0.07180060577392579, 0.07189353942871093, 0.0717496337890625, 0.07176630401611328, 0.08138166046142578, 0.07279615783691407, 0.07266480255126953, 0.07179293060302734, 0.07131903839111328, 0.07136921691894531, 0.07135794830322266, 0.07144908905029297, 0.07148707580566406, 0.07160870361328125, 0.07154220581054688, 0.07137545776367188, 0.07148057556152344, 0.07151078033447265, 0.07148480224609376, 0.07143488311767578, 0.07138438415527344, 0.07148598480224609, 0.07143020629882812, 0.07137689971923829, 0.07150192260742187, 0.07153049468994141, 0.07156470489501954, 0.07138896179199218, 0.07149967956542969, 0.07142825317382813, 0.07174156951904297, 0.07147532653808594, 0.0714961929321289, 0.07167378997802734, 0.07159315490722656, 0.07149827575683594, 0.071666015625, 0.071642333984375, 0.07156771087646484, 0.07156781005859375, 0.07161622619628906, 0.07159600067138672, 0.07165369415283203, 0.07171481323242188, 0.0718662109375, 0.07170233917236328, 0.07172950744628906, 0.0717984619140625, 0.07171234893798828, 0.07166537475585938, 0.07182406616210937, 0.07175523376464844, 0.07175981140136718, 0.07177513885498046, 0.07170467376708985, 0.0716789779663086, 0.07155961608886718, 0.07180681610107421, 0.07182160186767578, 0.07182575988769531, 0.07180083465576172, 0.071804931640625, 0.0718106231689453, 0.07178899383544922, 0.07183090972900391, 0.07478131103515626, 0.07182774353027344, 0.08111923217773437, 0.07276739501953125, 0.07198713684082031, 0.07166547393798828, 0.07124732971191407, 0.07135116577148437, 0.0712642593383789, 0.07130521392822266, 0.07142195129394531, 0.07128028869628907, 0.07144473266601563, 0.07143344116210938, 0.07138361358642578, 0.07136854553222656, 0.07137532806396485, 0.07132300567626954, 0.07130995178222656, 0.07133798217773438, 0.07142195129394531, 0.07136838531494141, 0.07138566589355469, 0.07144217681884765, 0.07138713836669922, 0.0713636474609375, 0.071484130859375, 0.07173244476318359, 0.07162694549560547, 0.07161939239501953, 0.07161798095703124, 0.07154950714111329, 0.07149568176269532, 0.07147929382324218, 0.07152025604248047, 0.07148748779296875, 0.07318323516845703, 0.07151821136474609, 0.0715489273071289, 0.07158697509765626, 0.07154774475097657, 0.07161856079101563, 0.07161347198486329, 0.0717694091796875, 0.07174873352050781, 0.0717890853881836, 0.07174873352050781, 0.07171981048583985, 0.0717801284790039, 0.07157068634033203, 0.07172726440429687, 0.07179747009277344, 0.07170047760009765, 0.07165654754638671, 0.07164643096923828, 0.07163676452636719, 0.07167327880859375, 0.07166770935058593, 0.07173587036132813, 0.07177216339111328, 0.0718888931274414, 0.07179468536376953, 0.0720540771484375, 0.07182406616210937, 0.07176553344726562, 0.08141190338134766, 0.07326934051513671, 0.07193113708496093, 0.07232393646240234, 0.0712437744140625, 0.07131958770751953, 0.07128463745117188, 0.07135369873046875, 0.07141449737548829, 0.0713704605102539, 0.07130060577392579, 0.07140227508544922, 0.07137891387939453, 0.07139740753173827, 0.07140966033935547, 0.07137417602539063, 0.07157417297363282, 0.0714684829711914, 0.07147564697265625, 0.07149295806884766, 0.0715697250366211, 0.07147772979736328, 0.07162470245361328, 0.07143132781982423, 0.07152726745605469, 0.07149980926513672, 0.07158576202392578, 0.07155824279785156, 0.07163970947265624, 0.07157276916503906, 0.07151913452148438, 0.07156105804443359, 0.07151023864746094, 0.07147315216064454, 0.0714567642211914, 0.07150125122070312, 0.07151570892333985, 0.07152127838134766, 0.07150003051757813, 0.07156416320800782, 0.07158464050292969, 0.07155916595458985, 0.07176601409912109, 0.07162675476074219, 0.07169023895263672, 0.07181670379638672, 0.07173785400390625, 0.07163913726806641, 0.07241104125976562, 0.071583740234375, 0.07175987243652343, 0.07186803436279297, 0.07191539001464843, 0.07172124481201173, 0.07170668792724609, 0.07170269012451172, 0.07180691528320313, 0.0717947540283203, 0.0717529296875, 0.07189379119873048, 0.07176169586181641, 0.07182489776611328, 0.07174422454833984, 0.08185212707519532, 0.07279788970947265, 0.07228438568115235, 0.07168339538574219, 0.0713235855102539, 0.0712671661376953, 0.07134413146972657, 0.07131033325195313, 0.07179574584960938, 0.07146905517578125, 0.07140281677246094, 0.07160899353027343, 0.07153167724609374, 0.07168256378173828, 0.07174285125732421, 0.07147618865966797, 0.07154278564453125, 0.071542236328125, 0.07160870361328125, 0.07152845001220703, 0.0715341796875, 0.07152051544189453, 0.0718289566040039, 0.07147122955322266, 0.07144316864013672, 0.07155315399169922, 0.07156304168701172, 0.07166909027099609, 0.0715864028930664, 0.07162451171875, 0.07154057312011719, 0.07170451354980469, 0.07162850952148438, 0.07158255767822265, 0.07161039733886719, 0.07158678436279296, 0.07159705352783204, 0.071738525390625, 0.07173001861572266, 0.07178034973144531, 0.07178009796142579, 0.07184819030761719, 0.07177625274658203, 0.07173324584960937, 0.07177938842773438, 0.07181999969482422, 0.07177852630615235, 0.0716470718383789, 0.07173321533203125, 0.0717900161743164, 0.07220015716552734, 0.07182621002197266, 0.07169843292236328, 0.07168000030517578, 0.07176601409912109, 0.07191929626464844, 0.07187251281738281, 0.07186669158935546, 0.0720239028930664, 0.07195372772216797, 0.07196144104003906, 0.07185929870605469, 0.07186319732666016, 0.08177394866943359, 0.07315929412841797, 0.07235769653320312, 0.07190137481689453, 0.07150182342529297, 0.07136873626708984, 0.07136182403564453, 0.0714257583618164, 0.07148028564453125, 0.07140755462646485, 0.07145887756347656, 0.07143440246582031, 0.07197586822509766, 0.07176182556152344, 0.07149874877929688, 0.07144003295898438, 0.07146739196777344, 0.07134563446044923, 0.07170816040039063, 0.07175218963623047, 0.07173347473144531, 0.07139282989501954, 0.07147702026367188, 0.07144889831542969, 0.0715964126586914, 0.07158921813964844, 0.07165840148925781, 0.07153421020507812, 0.07156364440917969, 0.07166976165771484, 0.07163699340820312, 0.07159603118896485, 0.07155635070800781, 0.07160294342041015, 0.07156121826171875, 0.07375212860107422, 0.07161488342285156, 0.07165090942382812, 0.07176236724853516, 0.07161036682128906, 0.07168614196777344, 0.07166531372070313, 0.07166912078857422, 0.07178860473632813, 0.071744384765625, 0.0717364501953125, 0.07174793243408203, 0.07174972534179687, 0.07177171325683594, 0.0718918685913086, 0.07181629180908203, 0.07182950592041015, 0.0717795181274414, 0.0719664306640625, 0.071851806640625, 0.07180691528320313, 0.07207762908935547, 0.07191657257080078, 0.0719738540649414, 0.07195645141601563, 0.07198521423339843, 0.07196460723876953, 0.07201996612548828, 0.08192237091064453, 0.07309273529052734, 0.0725549087524414, 0.07218582153320313, 0.07162675476074219, 0.07153218841552735, 0.07159436798095703, 0.07140287780761718, 0.07148502349853515, 0.0715889892578125, 0.07151750183105468, 0.07154048156738281, 0.0715796127319336, 0.07167810821533203, 0.07157011413574219, 0.07152623748779297, 0.07153001403808594, 0.071512451171875, 0.07162818908691407, 0.07156396484375, 0.07157350158691406, 0.0715982437133789, 0.07146086120605469, 0.07194214630126954, 0.07186342620849609, 0.07171981048583985, 0.0715869140625, 0.07157183837890625, 0.07152198028564453, 0.07150592041015626, 0.07155184173583984, 0.07154812622070313, 0.07149027252197265, 0.07156086730957031, 0.07163536071777343, 0.0715901107788086, 0.07158483123779297, 0.07162489318847656, 0.07179859161376953, 0.07151830291748047, 0.07167814636230468, 0.07170496368408204, 0.07169644927978516, 0.07170662689208984, 0.07177625274658203, 0.07177356719970703, 0.07181533050537109, 0.07163270568847656, 0.07175545501708984, 0.07163184356689453, 0.07171481323242188, 0.07166156768798829, 0.07171891021728516, 0.07236121368408203, 0.07213542175292968, 0.07187987518310547, 0.07181330871582031, 0.07183014678955078, 0.0717864990234375, 0.07171686553955078, 0.07181318664550781, 0.07187606048583985, 0.07211465454101562, 0.08198291015625, 0.07280902099609375, 0.07228118133544922, 0.07182125091552734, 0.07136729431152344, 0.07131375885009765, 0.07130729675292968, 0.0712655029296875, 0.07143296051025391, 0.0713766098022461, 0.07143452453613282, 0.07133731079101563, 0.07132227325439452, 0.07137824249267578, 0.07147408294677735, 0.07160537719726562, 0.07152217864990235, 0.07148003387451173, 0.0714005126953125, 0.07145318603515625, 0.07143065643310546, 0.07141142272949219, 0.07154102325439453, 0.07154278564453125, 0.07149353790283203, 0.07159593963623047, 0.0715816650390625, 0.0715179214477539, 0.07155763244628906, 0.07162262725830078, 0.07160326385498048, 0.07158601379394532, 0.07171762847900391, 0.07164723205566406, 0.07167692565917969, 0.07162572479248047, 0.07167542266845703, 0.07172144317626954, 0.07179878234863281, 0.07168358612060546, 0.07167231750488282, 0.0717291488647461, 0.07182335662841798, 0.07178201293945312, 0.071870849609375, 0.07183155059814453, 0.07189913940429687, 0.07170457458496093, 0.0718438720703125, 0.07182128143310547, 0.07182950592041015, 0.07179901123046875, 0.07179856109619141, 0.0718636474609375, 0.07187728118896484, 0.07184384155273438, 0.07200355529785156, 0.07185206604003906, 0.07189478302001953, 0.07196902465820312, 0.07188880157470703, 0.07184780883789063, 0.07181334686279296, 0.08150425720214843, 0.07305420684814454, 0.07237129974365235, 0.07196915435791015, 0.0714306869506836, 0.07136592102050782, 0.07133052825927734, 0.07133929443359376, 0.07144226837158203, 0.07146147155761719, 0.0713642578125, 0.07142463684082032, 0.0714969940185547, 0.07145116424560546, 0.07155843353271485, 0.07153142547607422, 0.07147891235351563, 0.071438720703125, 0.07158505249023438, 0.07169302368164063, 0.07142374420166016, 0.07149183654785156, 0.07157981109619141, 0.07140748596191407, 0.07153456115722656, 0.07149593353271484, 0.0716124496459961, 0.07149081420898437, 0.07151849365234375, 0.07150364685058594, 0.07154729461669922, 0.07158169555664062, 0.0714997787475586, 0.0715709457397461, 0.07159174346923829, 0.07165727996826173, 0.07159430694580078, 0.07255078125, 0.07152003479003906, 0.07144831848144531, 0.07161468505859375, 0.07167129516601563, 0.07178438568115235, 0.0717093734741211, 0.07172934722900391, 0.0717333755493164, 0.07176911926269532, 0.07154338836669921, 0.0718872299194336, 0.07159718322753907, 0.07178310394287109, 0.0718232650756836, 0.07185596466064453, 0.07189881896972657, 0.07181980895996094, 0.07174371337890625, 0.07177420806884766, 0.07179007720947266, 0.07185254669189453, 0.07177152252197265, 0.07179532623291016, 0.0718912353515625, 0.07184559631347656]",tokens/s,13.919964989789506,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 422.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 328.12 MiB is free. Process 208709 has 14.42 GiB memory in use. Of the allocated memory 14.22 GiB is allocated by PyTorch, and 91.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1047.486464,1393.491968,0.0,998.244352,995.459072,s,1,9.1995439453125,9.1995439453125,0.0,9.1995439453125,9.1995439453125,9.1995439453125,9.1995439453125,[9.1995439453125],,kWh,6.138048507500286e-05,6.759885228847262e-06,2.232835119599791e-05,9.046872149984803e-05,,MB,1316.298752,1619.984384,0.0,1212.153856,1174.001664,s,10,2.065621322631836,0.2065621322631836,0.001057923379171417,0.20703142547607423,0.20757976837158204,0.20789478073120116,0.20814679061889646,"[0.20546205139160156, 0.2072406005859375, 0.207006591796875, 0.2082097930908203, 0.20497737121582033, 0.20498719787597655, 0.2070726776123047, 0.20705625915527343, 0.20609901428222657, 0.207509765625]",tokens/s,1239.3365482586466,kWh,6.249252624379373e-06,6.890893309256835e-07,4.169678276638249e-06,1.1108020231943305e-05,tokens/kWh,23046411.030457206,MB,1338.712064,1619.984384,0.0,1212.153856,1174.004224,s,10,22.765936279296877,2.2765936279296874,0.010787243324537987,2.280182861328125,2.28728349609375,2.2916496826171873,2.2951426318359376,"[2.286313232421875, 2.257575927734375, 2.296015869140625, 2.26831201171875, 2.279771240234375, 2.28084423828125, 2.280594482421875, 2.27190283203125, 2.263395263671875, 2.281211181640625]",tokens/s,27.67292292621042,kWh,6.532558710478703e-05,7.2054362435162614e-06,3.535490889856203e-05,0.00010788593224686533,tokens/kWh,583950.0914340061,,s,630,22.761943065643333,0.03613006835816398,0.0007618873217216914,0.03608678245544433,0.0365433364868164,0.036807624435424804,0.038606347618103026,"[0.03631513595581055, 0.03638886260986328, 0.03630489730834961, 0.03641753768920898, 0.03643932723999024, 0.03671318435668945, 0.03632534408569336, 0.03666128158569336, 0.03630819320678711, 0.03644054412841797, 0.03646905517578125, 0.03628851318359375, 0.03624284744262695, 0.03624406433105469, 0.036173824310302735, 0.03623321533203125, 0.036154464721679686, 0.0362259521484375, 0.03627798461914063, 0.03643830490112305, 0.036062782287597656, 0.035994049072265624, 0.036268096923828125, 0.03680006408691406, 0.0360816650390625, 0.03593801498413086, 0.03613056182861328, 0.03600476837158203, 0.03606118392944336, 0.03620249557495117, 0.036171775817871094, 0.03632467269897461, 0.03680716705322266, 0.03615094375610352, 0.03638457489013672, 0.03647151947021485, 0.03633148956298828, 0.03638070297241211, 0.036269855499267575, 0.03617337417602539, 0.036276897430419924, 0.036345855712890625, 0.036216705322265626, 0.03619782257080078, 0.03608000183105469, 0.03618572616577148, 0.03661606216430664, 0.03612713623046875, 0.03635036849975586, 0.03609395217895508, 0.03621683120727539, 0.03639839935302734, 0.036049633026123046, 0.0360807991027832, 0.03611334228515625, 0.036359870910644534, 0.0361064338684082, 0.03632332611083984, 0.03639449691772461, 0.03618790435791015, 0.03641622543334961, 0.036286495208740235, 0.03637452697753906, 0.036089855194091795, 0.03622611236572266, 0.03609417724609375, 0.03588927841186523, 0.03541183853149414, 0.035952865600585936, 0.036131328582763675, 0.03614672088623047, 0.036176353454589846, 0.03618815994262695, 0.03607932662963867, 0.03604246520996094, 0.0361060791015625, 0.036063968658447264, 0.036073471069335936, 0.03620364761352539, 0.03607846450805664, 0.036116481781005856, 0.03645183944702148, 0.036114944458007815, 0.03611580657958984, 0.03637315368652344, 0.036345855712890625, 0.03615913772583008, 0.036267616271972655, 0.036692737579345706, 0.03616358566284179, 0.03586867141723633, 0.03549507141113281, 0.035915775299072264, 0.03579375839233399, 0.03560451126098633, 0.03560444641113281, 0.03557891082763672, 0.035503070831298826, 0.035604480743408204, 0.03596492767333984, 0.03577974319458008, 0.03558281707763672, 0.03577446365356445, 0.03547663879394531, 0.03570979309082031, 0.035563518524169925, 0.03548364639282227, 0.03538739013671875, 0.03550931167602539, 0.035275104522705075, 0.03582419204711914, 0.03569052886962891, 0.035741504669189454, 0.03582585525512695, 0.035827713012695314, 0.03561593627929688, 0.035447616577148434, 0.03531478500366211, 0.03510774230957031, 0.03537510299682617, 0.03563430404663086, 0.035447486877441405, 0.035483230590820314, 0.03567196655273437, 0.03564358520507813, 0.03540838241577148, 0.036004417419433596, 0.035833854675292966, 0.03548160171508789, 0.035438591003417966, 0.03548364639282227, 0.035315711975097655, 0.03712575912475586, 0.03573183822631836, 0.03578019332885742, 0.036105758666992185, 0.03603340911865234, 0.03621478271484375, 0.03589529418945313, 0.03587891387939453, 0.035776512145996094, 0.038744224548339846, 0.043791969299316405, 0.0459975357055664, 0.03665795135498047, 0.03726889419555664, 0.03670412826538086, 0.037017887115478515, 0.03653023910522461, 0.0365428466796875, 0.0364620475769043, 0.036416030883789065, 0.03625369644165039, 0.03636627197265625, 0.03639302444458008, 0.03640444946289063, 0.0364095344543457, 0.03645091247558594, 0.0363392333984375, 0.036831710815429686, 0.036397056579589845, 0.03664896011352539, 0.03649331283569336, 0.03610009765625, 0.035910816192626954, 0.035939167022705075, 0.035846080780029294, 0.03612377548217773, 0.03573356628417969, 0.035773311614990234, 0.03599788665771484, 0.03612854385375976, 0.03589532852172852, 0.0359444465637207, 0.03590348815917969, 0.03618406295776367, 0.0360263671875, 0.03596083068847656, 0.03633273696899414, 0.035928768157958986, 0.03603263854980469, 0.0359659538269043, 0.03593667221069336, 0.03576073455810547, 0.03557331085205078, 0.03573907089233398, 0.03591993713378906, 0.035851200103759764, 0.03592547225952149, 0.03603308868408203, 0.03596083068847656, 0.03570217514038086, 0.03559513473510742, 0.03530902481079102, 0.03533849716186523, 0.035514240264892576, 0.036141151428222655, 0.036136768341064454, 0.03607686233520508, 0.036037151336669924, 0.03591551971435547, 0.03571161651611328, 0.03590758514404297, 0.03583590316772461, 0.03603046417236328, 0.036050945281982424, 0.03593625640869141, 0.03629040145874023, 0.03593977737426758, 0.036444896697998046, 0.036087806701660154, 0.03611795043945312, 0.03611091232299805, 0.0362630729675293, 0.0359637451171875, 0.03611603164672852, 0.03605686569213867, 0.03604665756225586, 0.03621897506713867, 0.03638502502441406, 0.03612416076660156, 0.0359444465637207, 0.03610726547241211, 0.0363675537109375, 0.03611936187744141, 0.036797534942626955, 0.036170654296875, 0.03596822357177734, 0.03600259017944336, 0.035985408782958986, 0.0361085433959961, 0.03600950241088867, 0.03635398483276367, 0.03862748718261719, 0.03642176055908203, 0.035934207916259765, 0.03549184036254883, 0.035489791870117186, 0.03578265762329102, 0.03605500793457031, 0.036081695556640626, 0.03584342575073242, 0.03549801635742188, 0.035299457550048825, 0.035197311401367185, 0.03527692794799805, 0.035186622619628904, 0.035397022247314454, 0.03545353698730469, 0.03559596633911133, 0.03563484954833984, 0.03836928176879883, 0.03598332977294922, 0.03587376022338867, 0.035811328887939455, 0.035837310791015625, 0.03582169723510742, 0.03607807922363281, 0.03552175903320313, 0.03606403350830078, 0.03562905502319336, 0.0360079345703125, 0.03608575820922852, 0.03604044723510742, 0.03588940811157226, 0.036046527862548826, 0.036182334899902344, 0.03597721481323242, 0.03600352096557617, 0.03594249725341797, 0.03581267166137695, 0.035754913330078124, 0.0355404167175293, 0.03668751907348633, 0.035715744018554686, 0.035975425720214844, 0.036018177032470705, 0.036208641052246096, 0.03609600067138672, 0.035905025482177735, 0.03604326248168945, 0.036239360809326174, 0.036308609008789065, 0.03619190216064453, 0.03636912155151367, 0.036675582885742186, 0.03707411193847656, 0.03697747039794922, 0.03709952163696289, 0.03701929473876953, 0.036439937591552736, 0.03645196914672852, 0.036380992889404294, 0.03628441619873047, 0.03621532821655273, 0.03634902572631836, 0.03626691055297852, 0.03624905776977539, 0.03640169525146485, 0.036239360809326174, 0.036224414825439456, 0.036799072265625, 0.03633135986328125, 0.036369728088378905, 0.036363105773925784, 0.036359169006347655, 0.036365310668945314, 0.036428958892822265, 0.03614761734008789, 0.03631507110595703, 0.03589580917358398, 0.03579904174804688, 0.03594649505615234, 0.03587481689453125, 0.03630617523193359, 0.03628630447387695, 0.03569084930419922, 0.03552854537963867, 0.03561033630371094, 0.04100960159301758, 0.035587425231933596, 0.03542287826538086, 0.03538236618041992, 0.03623004913330078, 0.03603235244750977, 0.03594460678100586, 0.03570073699951172, 0.03851878356933594, 0.03585971069335937, 0.035526496887207035, 0.03513817596435547, 0.03518288040161133, 0.03566947174072266, 0.035435039520263674, 0.035604480743408204, 0.03538905715942383, 0.03527923202514648, 0.03544678497314453, 0.035794944763183595, 0.03634918212890625, 0.03610265731811523, 0.036398944854736326, 0.03721420669555664, 0.03613737487792969, 0.035872608184814456, 0.036019935607910156, 0.03630329513549805, 0.03643734359741211, 0.03562972640991211, 0.03586492919921875, 0.03560524749755859, 0.03623004913330078, 0.0363765754699707, 0.03624755096435547, 0.03659161758422851, 0.03575603103637695, 0.03544416046142578, 0.03628268814086914, 0.03634991836547852, 0.036260128021240234, 0.03649331283569336, 0.03633724975585938, 0.036647327423095705, 0.03644163131713867, 0.03692303848266602, 0.0366569938659668, 0.03649212646484375, 0.03632326507568359, 0.03637267303466797, 0.03637990570068359, 0.03635891342163086, 0.03686604690551758, 0.036517887115478515, 0.03700735855102539, 0.03647283172607422, 0.036547744750976566, 0.036413345336914066, 0.0363570556640625, 0.03855459213256836, 0.036674560546875, 0.036335166931152345, 0.036018623352050784, 0.035694591522216795, 0.03568025588989258, 0.0360079345703125, 0.03698908615112305, 0.03581353759765625, 0.03591446304321289, 0.036236255645751954, 0.036239360809326174, 0.03592192077636719, 0.036044769287109375, 0.03585436630249023, 0.03557580947875977, 0.03546908950805664, 0.035727039337158206, 0.04241667175292969, 0.03923932647705078, 0.03579702377319336, 0.03566828918457031, 0.03592752075195312, 0.03591222381591797, 0.03601747131347656, 0.035754688262939455, 0.03562656021118164, 0.03557011032104492, 0.03562496185302735, 0.035624286651611325, 0.035830432891845704, 0.03587071990966797, 0.035880863189697264, 0.03617728042602539, 0.03585004806518555, 0.036109214782714845, 0.0357498893737793, 0.035703968048095704, 0.0362402229309082, 0.03618201446533203, 0.03595017623901367, 0.035665599822998044, 0.03557449722290039, 0.03554508972167969, 0.03590566253662109, 0.03616748809814453, 0.035856063842773435, 0.03563763046264649, 0.03566947174072266, 0.035846687316894534, 0.0364738883972168, 0.036293598175048826, 0.036238624572753904, 0.03631135940551758, 0.036729248046875, 0.03618764877319336, 0.0363524169921875, 0.03630259323120117, 0.036331871032714846, 0.036274177551269535, 0.036173473358154296, 0.03651824188232422, 0.03658863830566406, 0.036259166717529295, 0.03645708847045898, 0.03639292907714844, 0.036344894409179686, 0.035871742248535156, 0.03554099273681641, 0.03529523086547852, 0.03530137634277344, 0.035619998931884764, 0.03618697738647461, 0.03595468902587891, 0.036224384307861325, 0.03605295944213867, 0.03575260925292969, 0.03583590316772461, 0.035676063537597655, 0.035468929290771486, 0.03545471954345703, 0.03521200180053711, 0.035398815155029295, 0.036025150299072266, 0.0357540168762207, 0.036066753387451175, 0.03582534408569336, 0.03592057418823242, 0.03614739227294922, 0.03609190368652344, 0.03606902313232422, 0.03583158493041992, 0.03618649673461914, 0.03573574447631836, 0.03577967834472656, 0.03621571350097656, 0.03596492767333984, 0.035764320373535156, 0.03631504058837891, 0.036435966491699216, 0.0372262077331543, 0.03676803207397461, 0.036029792785644534, 0.03587676620483399, 0.035832576751708985, 0.03578060913085938, 0.036122623443603515, 0.035899391174316404, 0.03649846267700195, 0.03775968170166016, 0.036117889404296874, 0.03606780624389649, 0.03690335845947266, 0.03605692672729492, 0.03600751876831055, 0.03616128158569336, 0.03617670440673828, 0.036083713531494144, 0.03609132766723633, 0.03614777755737305, 0.036261886596679685, 0.03626598358154297, 0.036222976684570314, 0.03626355361938476, 0.03615071868896484, 0.03620345687866211, 0.03631382369995117, 0.03618953704833985, 0.03646716690063476, 0.03631052780151367, 0.036324031829833986, 0.03648716735839844, 0.036190208435058595, 0.0365588493347168, 0.03680799865722656, 0.036307647705078126, 0.03648876953125, 0.036630977630615236, 0.03636633682250977, 0.03693772888183594, 0.036206592559814454, 0.03641753768920898, 0.03631039810180664, 0.03772684860229492, 0.03576617431640625, 0.035458656311035154, 0.03545753479003906, 0.03533824157714844, 0.03544863891601562, 0.03528278350830078, 0.03553519821166992, 0.035683551788330076, 0.03586947250366211, 0.03572447967529297, 0.03551110458374023, 0.035422206878662106, 0.03552249526977539, 0.035272640228271486, 0.035461246490478514, 0.03572323226928711, 0.036388607025146485, 0.036030750274658206, 0.036111679077148434, 0.03584451293945313, 0.03639865493774414, 0.03620060729980469, 0.036041278839111325, 0.03581542587280274, 0.035672065734863284, 0.035432449340820314, 0.03598745727539063, 0.036111934661865235, 0.03669561767578125, 0.035289985656738285, 0.035278335571289066, 0.03565824127197265, 0.03575807952880859, 0.03544582366943359, 0.03556857681274414, 0.03561417770385742, 0.03551286315917969, 0.03536076736450195, 0.03554099273681641, 0.03572531127929687, 0.03574915313720703, 0.035482303619384765, 0.03573276901245117, 0.03549078369140625, 0.035643199920654296, 0.03655267333984375, 0.03637152099609375, 0.0363570556640625, 0.03666329574584961, 0.03645587158203125, 0.03635843276977539, 0.03630313491821289, 0.03613654327392578, 0.036192798614501955, 0.036124542236328126, 0.036534271240234374, 0.0364150390625, 0.03649491119384766, 0.03656179046630859, 0.036296703338623046, 0.036347904205322266, 0.03629171371459961, 0.03660070419311524, 0.036474815368652345, 0.036467937469482424, 0.035936351776123046, 0.036082176208496096, 0.036319488525390624, 0.03579084777832031, 0.0355676155090332, 0.03625574493408203, 0.036536319732666016, 0.03630080032348633, 0.036446208953857424, 0.03794851303100586, 0.035977886199951174, 0.03591932678222656, 0.03552259063720703, 0.035584766387939455, 0.03611033630371094, 0.0355810546875, 0.03575616073608399, 0.03627494430541992, 0.0366196174621582, 0.036238304138183595, 0.03626361465454102, 0.03598057556152344, 0.03578339385986328, 0.035741119384765624, 0.03582624053955078, 0.035706241607666014, 0.03554982376098633, 0.03544063949584961, 0.03544073486328125, 0.035727264404296875, 0.03570406341552734, 0.03633023834228516, 0.03594623947143555, 0.03628249740600586, 0.0366429443359375, 0.03623321533203125, 0.036459617614746094, 0.03705948638916016, 0.03685171127319336, 0.03671244812011719, 0.03612089538574219, 0.03596681594848633, 0.03631497573852539]",tokens/s,27.67777769161179,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,822.10816,4900.913152,0.0,4498.39104,4467.073024,s,1,14.29221484375,14.29221484375,0.0,14.29221484375,14.29221484375,14.29221484375,14.29221484375,[14.29221484375],,kWh,0.00020694684823749864,2.2820741937687247e-05,6.623921965800972e-05,0.0002960068098331956,,MB,1363.673088,5469.241344,0.0,5054.13632,4892.018688,s,10,10.311571899414062,1.0311571899414063,0.005154613828016917,1.0311505737304687,1.0366505126953125,1.0366867675781248,1.0367157714843749,"[1.0204786376953126, 1.0247535400390626, 1.0292059326171874, 1.029791259765625, 1.0309576416015624, 1.031343505859375, 1.0357178955078126, 1.0359580078125, 1.0366424560546874, 1.0367230224609374]",tokens/s,248.26476748374975,kWh,3.0054032216251016e-05,3.3144472109895623e-06,1.9924238161600495e-05,5.329271758884107e-05,tokens/kWh,4803658.202891191,MB,1400.582144,5486.01856,0.0,5068.816384,4892.021248,s,10,48.086830566406256,4.808683056640626,0.0044379606703610006,4.807716796875001,4.814943701171875,4.816116381835938,4.817054526367188,"[4.8048720703125, 4.80783203125, 4.803392578125, 4.8087900390625, 4.8076015625, 4.8043740234375, 4.80555517578125, 4.81244091796875, 4.8172890625, 4.81468310546875]",tokens/s,13.101300139338393,kWh,0.00014096866865666543,1.5549919617418867e-05,9.343582474860001e-05,0.0002499544130226843,tokens/kWh,252045.96005385395,,s,630,48.084574111938494,0.07632472081260076,0.0014816394622249919,0.07608934020996094,0.07648821029663086,0.07712911987304688,0.0872615898895264,"[0.08671641540527343, 0.07740211486816406, 0.07668940734863282, 0.07631871795654296, 0.07589478302001954, 0.07584358215332031, 0.07585759735107422, 0.07585823822021484, 0.07582514953613281, 0.0758497314453125, 0.07586752319335938, 0.07588108825683594, 0.07585139465332032, 0.0758787841796875, 0.07586201477050782, 0.07585177612304687, 0.0758246078491211, 0.07586825561523437, 0.07587638092041016, 0.07589929962158203, 0.07585177612304687, 0.07585363006591797, 0.07586415863037109, 0.07589897918701172, 0.0759002914428711, 0.0760142059326172, 0.0759582748413086, 0.0760442886352539, 0.07593778991699218, 0.076042236328125, 0.07597875213623047, 0.07602543640136719, 0.07592339324951172, 0.07604179382324219, 0.07595919799804687, 0.07613008117675782, 0.07635990142822266, 0.0767872314453125, 0.07609801483154296, 0.07610572814941406, 0.07602995300292968, 0.07610777282714844, 0.07610163116455078, 0.0762060775756836, 0.07611801910400391, 0.07617922973632812, 0.07623907470703126, 0.07627366638183594, 0.07615897369384765, 0.07623884582519531, 0.07619785308837891, 0.07648668670654298, 0.07615462493896484, 0.07623014068603516, 0.07616998291015625, 0.07625727844238281, 0.07610368347167969, 0.0761712646484375, 0.07623462677001953, 0.07639600372314453, 0.07623542022705078, 0.07629945373535156, 0.076372802734375, 0.08628428649902344, 0.07859331512451172, 0.07661411285400391, 0.07622882843017578, 0.07577507019042969, 0.07577286529541015, 0.07578828430175781, 0.07574732971191406, 0.07574118041992188, 0.07575321960449219, 0.07579058837890625, 0.07585587310791016, 0.07578623962402344, 0.07597840118408203, 0.07587055969238281, 0.07588864135742188, 0.0758446044921875, 0.07590399932861328, 0.07588658905029297, 0.07594553375244141, 0.07588249969482422, 0.07595769500732422, 0.07596749114990234, 0.0763526382446289, 0.07596121978759765, 0.0762056655883789, 0.07607453155517578, 0.07610182189941406, 0.0781743392944336, 0.07608918762207031, 0.07603804779052735, 0.07601837158203124, 0.07595372772216796, 0.07607087707519532, 0.07604898834228516, 0.07616512298583984, 0.07602585601806641, 0.07607234954833984, 0.07609590148925781, 0.07609772491455079, 0.07603187561035156, 0.07612547302246093, 0.07618646240234375, 0.07619993591308594, 0.07610368347167969, 0.07619379425048828, 0.07622246551513671, 0.07618355560302735, 0.07610163116455078, 0.07614582061767577, 0.0761219482421875, 0.07626445007324219, 0.07612006378173829, 0.07615078735351563, 0.07614669036865235, 0.07705776214599609, 0.07618560028076173, 0.07623442840576172, 0.07621257781982421, 0.07640089416503906, 0.07618764495849609, 0.07630028533935547, 0.07632796478271485, 0.0874616928100586, 0.07725596618652343, 0.07650192260742188, 0.07615078735351563, 0.07576332855224609, 0.07579891204833984, 0.07583097839355468, 0.07583897399902344, 0.07574610900878906, 0.07579443359375, 0.0757391357421875, 0.07584153747558593, 0.07577334594726562, 0.07585648345947266, 0.07597456359863282, 0.07589449310302734, 0.07580710601806641, 0.07587020874023437, 0.07587596893310547, 0.07592384338378906, 0.07589478302001954, 0.07591935729980469, 0.07593977355957031, 0.07594400024414062, 0.07612188720703125, 0.07605270385742187, 0.07600128173828125, 0.0760791015625, 0.07597030639648437, 0.07606294250488281, 0.0759986572265625, 0.07599983978271484, 0.07593778991699218, 0.07613030242919921, 0.07599104309082032, 0.076115966796875, 0.07605862426757813, 0.07609548950195312, 0.07603971099853515, 0.0761099853515625, 0.07600553894042969, 0.07609564971923828, 0.07611801910400391, 0.07616512298583984, 0.07606476593017578, 0.07614873504638672, 0.07607500457763672, 0.07616486358642578, 0.07605683135986328, 0.0760889892578125, 0.07604214477539062, 0.07620652770996093, 0.07628800201416015, 0.07621222686767579, 0.07618726348876953, 0.07625081634521484, 0.07613452911376953, 0.07623693084716797, 0.07624134063720703, 0.07635763549804687, 0.0762798080444336, 0.07626927947998047, 0.07631635284423828, 0.08779702758789062, 0.07811468505859374, 0.07677433776855469, 0.07651055908203125, 0.07588931274414062, 0.07597647857666015, 0.07588066864013672, 0.07585177612304687, 0.07580876922607421, 0.0758497314453125, 0.07584358215332031, 0.07591731262207031, 0.07587430572509765, 0.07596604919433594, 0.07595980834960937, 0.0758895034790039, 0.07590099334716797, 0.07598438262939453, 0.07595247650146485, 0.07601372528076172, 0.07596646118164062, 0.07603404998779296, 0.07603404998779296, 0.07605657958984376, 0.07606240081787109, 0.07605280303955078, 0.07604828643798828, 0.07667107391357422, 0.07618761444091797, 0.07623683166503906, 0.07610368347167969, 0.07604633331298828, 0.07594172668457032, 0.07603596496582031, 0.07601999664306641, 0.07608902740478515, 0.07598931121826172, 0.0760975341796875, 0.07612127685546875, 0.0761536636352539, 0.07610572814941406, 0.07614825439453125, 0.07611186981201172, 0.07617174530029297, 0.07614463806152344, 0.07622447967529297, 0.07617129516601563, 0.07624889373779296, 0.07616086578369141, 0.07619004821777343, 0.07613433837890625, 0.07626057434082031, 0.0761349105834961, 0.07619404602050782, 0.07619388580322266, 0.07624444580078125, 0.07614054107666016, 0.07617795562744141, 0.07627340698242188, 0.07642752075195312, 0.07627903747558594, 0.07627958679199219, 0.07642985534667969, 0.08866844940185546, 0.07744236755371094, 0.07670783996582031, 0.07626351928710938, 0.07580732727050782, 0.0758497314453125, 0.0758045425415039, 0.07585804748535156, 0.07582105255126953, 0.07587142181396485, 0.0759095687866211, 0.07595849609375, 0.07591539001464843, 0.07599311828613281, 0.07594188690185547, 0.07592960357666016, 0.0759582748413086, 0.07601766204833985, 0.07597785949707031, 0.07619468688964844, 0.07641193389892578, 0.07602889251708984, 0.076006591796875, 0.07603897857666016, 0.07598079681396484, 0.07597030639648437, 0.07595852661132813, 0.07605452728271485, 0.07597248077392578, 0.07613017272949218, 0.07627295684814453, 0.07607510375976563, 0.07596851348876953, 0.07603030395507812, 0.075989501953125, 0.07625933074951172, 0.07596441650390626, 0.07600128173828125, 0.0760134048461914, 0.07609709167480469, 0.07596038055419922, 0.07601996612548828, 0.07600566101074219, 0.07618150329589844, 0.07610691070556641, 0.07623145294189453, 0.0761119384765625, 0.07615897369384765, 0.07606681823730468, 0.07614873504638672, 0.07610739135742188, 0.07632064056396484, 0.07613081359863282, 0.07618150329589844, 0.07621590423583985, 0.07630480194091797, 0.07631462097167968, 0.0762938232421875, 0.07639686584472656, 0.07639859008789063, 0.07615078735351563, 0.07625103759765625, 0.0762053451538086, 0.08677168273925781, 0.07736937713623047, 0.0766197738647461, 0.07622451019287109, 0.07572601318359375, 0.07593247985839843, 0.07581488037109375, 0.07584464263916016, 0.07573606109619141, 0.075757568359375, 0.07576371002197266, 0.07584563446044922, 0.07579347229003906, 0.07586473846435547, 0.07592918395996094, 0.07621087646484374, 0.07581900787353515, 0.0758760986328125, 0.07586637115478516, 0.07602790069580079, 0.075906494140625, 0.07597932434082032, 0.07592755126953125, 0.07593369293212891, 0.07590444946289063, 0.07599161529541015, 0.07603129577636719, 0.0760551986694336, 0.07595401763916015, 0.07604857635498047, 0.07599718475341796, 0.07602108764648438, 0.07595894622802735, 0.07608115386962891, 0.07604838562011719, 0.07614259338378906, 0.07599088287353516, 0.07603801727294922, 0.07603826904296875, 0.07607097625732422, 0.07599504089355469, 0.07609568023681641, 0.07604838562011719, 0.07617062377929687, 0.07607145690917969, 0.07611708831787109, 0.07634636688232421, 0.07623375701904297, 0.0761108169555664, 0.0761212158203125, 0.07608204650878907, 0.07619993591308594, 0.0760627212524414, 0.0764183349609375, 0.07679254150390626, 0.07627571105957032, 0.07622579193115234, 0.076321533203125, 0.07638620758056641, 0.07635504150390625, 0.0762128677368164, 0.07626943969726563, 0.07631475067138672, 0.08782438659667968, 0.07732838439941406, 0.07653510284423828, 0.07616172790527344, 0.07579853057861329, 0.0758039321899414, 0.07577263641357422, 0.07577190399169922, 0.07595622253417969, 0.07596195220947266, 0.07583580780029296, 0.07585923004150391, 0.07586070251464844, 0.07589584350585937, 0.07588057708740234, 0.07582771301269531, 0.07575382232666016, 0.07587593841552734, 0.075882080078125, 0.07584646606445312, 0.07587020874023437, 0.07587168121337891, 0.07588880157470704, 0.07600374603271484, 0.07592550659179688, 0.0759983367919922, 0.07603289794921875, 0.07604393768310547, 0.0759359359741211, 0.0760014419555664, 0.07602505493164062, 0.07602460479736328, 0.07595417785644532, 0.07600537872314453, 0.07605427551269531, 0.07609091186523438, 0.07617763519287109, 0.0761365737915039, 0.07613069152832032, 0.07614259338378906, 0.07603814697265625, 0.07608934020996094, 0.076046142578125, 0.07622265625, 0.07599718475341796, 0.0760791015625, 0.07731388854980469, 0.07682883453369141, 0.07634278106689453, 0.076136962890625, 0.07617855834960938, 0.07620684814453126, 0.07608537292480469, 0.07619110107421875, 0.07614937591552734, 0.07619945526123047, 0.0761144027709961, 0.07614816284179687, 0.07613040161132813, 0.07631715393066406, 0.07618492889404296, 0.07624361419677735, 0.07633920288085938, 0.08857599639892579, 0.07728742218017579, 0.07655833435058594, 0.07619580841064454, 0.07590406036376954, 0.07573808288574219, 0.07576268768310547, 0.07594496154785156, 0.07572684478759766, 0.07579209899902344, 0.07581465911865234, 0.07587395477294921, 0.0758639373779297, 0.07588934326171876, 0.0759073944091797, 0.07588658905029297, 0.07592294311523437, 0.07594035339355469, 0.07593113708496094, 0.07604605102539062, 0.07596112060546875, 0.07597875213623047, 0.07597398376464844, 0.07597532653808593, 0.07614669036865235, 0.07607004547119141, 0.07608611297607422, 0.07608934020996094, 0.07598284912109375, 0.07606886291503906, 0.07605235290527344, 0.07725990295410157, 0.07670066833496093, 0.07636483001708984, 0.07605961608886719, 0.07765811157226563, 0.07604736328125, 0.07610880279541016, 0.07610723114013672, 0.07613085174560547, 0.07608729553222657, 0.07610345458984374, 0.07613053131103516, 0.07620121765136718, 0.07610777282714844, 0.07616178894042969, 0.07608319854736328, 0.07614463806152344, 0.07603404998779296, 0.07620333099365234, 0.07623264312744141, 0.07757901000976562, 0.0769637451171875, 0.07651126098632813, 0.07620806121826172, 0.07623078155517578, 0.07613235473632812, 0.07616716766357422, 0.07626956939697266, 0.07640064239501954, 0.07624089813232422, 0.07630847930908204, 0.07630982208251953, 0.08873779296875, 0.07729964447021484, 0.07663353729248047, 0.07623648071289063, 0.0758691177368164, 0.0759009246826172, 0.07580671691894532, 0.07580671691894532, 0.0758497314453125, 0.0758472671508789, 0.07583785247802734, 0.0758634262084961, 0.07588432312011718, 0.07589759826660156, 0.07595782470703125, 0.07589299011230469, 0.07588483428955078, 0.07606060791015624, 0.07712509155273438, 0.07657532501220703, 0.07605033874511719, 0.07605606079101562, 0.07602646636962891, 0.07597977447509766, 0.07596927642822265, 0.07600972747802734, 0.07602515411376953, 0.07613510131835938, 0.07608451080322266, 0.0761719970703125, 0.07616716766357422, 0.0760995864868164, 0.07752035522460937, 0.0769249267578125, 0.07648255920410156, 0.07626806640625, 0.0760975341796875, 0.07613423919677734, 0.07612982177734375, 0.0761566390991211, 0.07608822631835938, 0.0762507553100586, 0.07614450836181641, 0.07621273803710937, 0.07608457946777344, 0.07615503692626953, 0.07612652587890625, 0.07767052459716797, 0.07702726745605469, 0.0766541748046875, 0.07615459442138672, 0.07625401306152343, 0.07611961364746093, 0.07617935943603515, 0.07629596710205078, 0.07630870056152343, 0.07614109039306641, 0.07618093109130859, 0.07616307067871093, 0.07636844635009765, 0.07657676696777344, 0.07740745544433594, 0.07704764556884766, 0.08758064270019532, 0.07749839782714844, 0.07681183624267578, 0.07631279754638672, 0.07581104278564453, 0.07586377716064453, 0.0758144302368164, 0.07582713317871094, 0.075830078125, 0.0758661117553711, 0.07591117095947265, 0.07589631652832031, 0.07584409332275391, 0.07602790069580079, 0.07593692779541016, 0.07587312316894532, 0.07586406707763672, 0.077248291015625, 0.07654013061523438, 0.07618745422363281, 0.0758642578125, 0.07593539428710938, 0.0759093780517578, 0.07596246337890625, 0.07590707397460937, 0.07598899078369141, 0.07611186981201172, 0.07605657958984376, 0.07597465515136718, 0.07601545715332031, 0.076091552734375, 0.07671743774414062, 0.07713241577148437, 0.07670374298095703, 0.07612150573730468, 0.07611427307128907, 0.07605068969726562, 0.07610982513427735, 0.07612416076660156, 0.07616710662841797, 0.07625939178466797, 0.07617078399658203, 0.07613488006591797, 0.07746054077148437, 0.0767333755493164, 0.07641088104248046, 0.07613235473632812, 0.07616102600097656, 0.07609728240966797, 0.0761612777709961, 0.07624070739746094, 0.07629843139648437, 0.07612825775146484, 0.07620198059082031, 0.07619503784179688, 0.07621711730957031, 0.07648665618896484, 0.07731609344482422, 0.07688191986083984, 0.07644159698486327, 0.07622799682617187, 0.07625494384765626, 0.07625202941894531]",tokens/s,13.101914941232328,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 28.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 4.12 MiB is free. Process 182916 has 14.73 GiB memory in use. Of the allocated memory 14.49 GiB is allocated by PyTorch, and 137.12 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 80.12 MiB is free. Process 126942 has 14.66 GiB memory in use. Of the allocated memory 14.13 GiB is allocated by PyTorch, and 428.13 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1078.571008,9784.19712,0.0,9388.949504,9304.608768,s,1,33.56804296875,33.56804296875,0.0,33.56804296875,33.56804296875,33.56804296875,33.56804296875,[33.56804296875],,kWh,0.0007570145398666909,8.349687089604282e-05,0.00027338716315400413,0.0011138985739167378,,MB,1442.779136,10197.336064,0.0,9789.505536,9597.896704,s,10,8.024159423828126,0.8024159423828126,0.0037759130374792073,0.8023334350585938,0.8048954711914063,0.8080676086425781,0.8106053186035157,"[0.8041905517578125, 0.803518798828125, 0.7991083984375, 0.81123974609375, 0.8016114501953125, 0.8036441650390626, 0.8002913818359375, 0.7958880615234375, 0.8020947265625, 0.8025721435546875]",tokens/s,319.036532648885,kWh,2.3606744060574178e-05,2.603420795459561e-06,1.4281101168460964e-05,4.0491266024494706e-05,tokens/kWh,6322351.092829152,MB,1471.410176,10197.336064,0.0,9789.505536,9597.899264,s,10,376.05490625,37.605490625,0.1423211389298986,37.6388125,37.76045625,37.78117734375,37.79775421875,"[37.7558515625, 37.45809375, 37.6530859375, 37.8018984375, 37.680734375, 37.5471328125, 37.35794921875, 37.436078125, 37.73954296875, 37.6245390625]",tokens/s,1.6752872772817333,kWh,0.001104775240299009,0.00012186504322589423,0.0004200425518457404,0.0016466828353706438,tokens/kWh,38258.73364728408,,s,630,376.05202130126935,0.5969079703194754,0.00458039882502152,0.5970689392089843,0.6014270935058594,0.6045695404052734,0.6133359204101563,"[0.6004793090820313, 0.59568603515625, 0.5983150024414062, 0.5971712646484375, 0.5964830322265625, 0.5946448364257813, 0.6069822998046875, 0.603926513671875, 0.5973463134765625, 0.6034421997070313, 0.5990032348632812, 0.600175048828125, 0.600375732421875, 0.601417724609375, 0.5993401489257812, 0.598215576171875, 0.5988536376953125, 0.6002418212890624, 0.5966739501953126, 0.5974447631835937, 0.5969735107421875, 0.5988904418945312, 0.5986395263671875, 0.6071869506835937, 0.60400390625, 0.5985402221679688, 0.5982113647460937, 0.5988589477539062, 0.597866943359375, 0.5977251586914063, 0.602064453125, 0.597414306640625, 0.5975443115234375, 0.59811083984375, 0.5987000122070313, 0.5957073974609375, 0.5968655395507813, 0.59815283203125, 0.596900146484375, 0.5984912109375, 0.60626123046875, 0.6044815063476563, 0.600195068359375, 0.5990802001953125, 0.5992078857421875, 0.598090087890625, 0.5971746215820313, 0.6007172241210937, 0.5994456787109375, 0.5960455932617188, 0.59719921875, 0.5961911010742188, 0.5984378662109375, 0.60060986328125, 0.5966236572265625, 0.5973423461914062, 0.603420166015625, 0.605086669921875, 0.6004592895507812, 0.5994905395507812, 0.5986683959960938, 0.5983241577148437, 0.5999329223632812, 0.5987039794921875, 0.6004655151367188, 0.595375732421875, 0.596560302734375, 0.5953699951171875, 0.594818603515625, 0.5980304565429687, 0.5979303588867187, 0.5962640380859375, 0.6027191162109375, 0.5994303588867187, 0.59736962890625, 0.59692236328125, 0.5960028076171875, 0.591583251953125, 0.5895291137695312, 0.5947446899414063, 0.5901292724609375, 0.5896566162109375, 0.5879984130859375, 0.5889564819335937, 0.5868004150390626, 0.5895543823242188, 0.5913001098632813, 0.5909202880859376, 0.5916732177734375, 0.5997586059570312, 0.5964392700195312, 0.5928029174804688, 0.5908571166992187, 0.5948597412109375, 0.5899429931640625, 0.5903914184570312, 0.5945098266601563, 0.59052392578125, 0.5895480346679688, 0.590845947265625, 0.59912939453125, 0.597419921875, 0.5991373901367187, 0.60061669921875, 0.60052685546875, 0.6007255249023438, 0.6045711059570312, 0.6095562133789062, 0.5999685668945313, 0.5974201049804687, 0.5981220092773437, 0.5915159912109375, 0.5926892700195312, 0.5878067016601562, 0.5925120239257813, 0.5952144165039063, 0.58997607421875, 0.5930389404296875, 0.5917305297851563, 0.5916942749023437, 0.588666748046875, 0.5904390258789063, 0.5925928955078125, 0.6007597045898437, 0.5906499633789063, 0.5920354614257812, 0.59857275390625, 0.5967245483398438, 0.6008688354492188, 0.5964656372070313, 0.5970862426757813, 0.5971251220703125, 0.59636328125, 0.596336669921875, 0.5975347290039063, 0.6004019165039063, 0.59886181640625, 0.599341064453125, 0.6138900756835938, 0.6055706176757812, 0.5977914428710938, 0.5973641967773438, 0.5989378051757812, 0.597885009765625, 0.596038818359375, 0.5986885986328125, 0.59691748046875, 0.5977874145507812, 0.5956680908203125, 0.59686181640625, 0.5982411499023438, 0.5929468383789063, 0.5946557006835937, 0.59449365234375, 0.5981058959960938, 0.606013427734375, 0.5958430786132812, 0.59447705078125, 0.5954662475585938, 0.5960722045898438, 0.5968630981445312, 0.59592919921875, 0.59464501953125, 0.5944805297851562, 0.6029273071289063, 0.60008642578125, 0.59781787109375, 0.5953638305664063, 0.5948907470703125, 0.595585205078125, 0.5950830688476563, 0.597391357421875, 0.6060230102539063, 0.5968836669921875, 0.5955005493164063, 0.5992354736328125, 0.5962216186523438, 0.599525390625, 0.6015245361328125, 0.5963460693359375, 0.5948171997070313, 0.595548828125, 0.5993286743164062, 0.5966597900390626, 0.595879638671875, 0.5945802001953125, 0.5940714721679687, 0.5949031372070313, 0.5992950439453125, 0.5966018676757813, 0.5981992797851563, 0.5979851684570312, 0.59512255859375, 0.597765625, 0.5947230834960937, 0.5960410766601563, 0.5953389282226562, 0.5958194580078126, 0.5947756958007813, 0.5951156005859375, 0.6132478637695312, 0.6152252807617188, 0.6057738037109375, 0.6098370361328125, 0.6183765869140625, 0.6193934936523438, 0.6091259765625, 0.6064729614257812, 0.603739013671875, 0.6000545043945312, 0.6011077270507813, 0.5995889282226563, 0.5968084716796875, 0.59753466796875, 0.5952074584960938, 0.59660986328125, 0.5948067626953125, 0.5954007568359375, 0.5988023071289063, 0.5995245971679688, 0.6012404174804687, 0.603479248046875, 0.597592529296875, 0.5988560180664062, 0.5971099853515625, 0.598666015625, 0.5959555053710938, 0.6005142822265624, 0.5991405029296875, 0.599218505859375, 0.5973622436523438, 0.5999540405273438, 0.5991586303710937, 0.5985156860351563, 0.597485595703125, 0.598877685546875, 0.5990526123046875, 0.6079464111328124, 0.5997179565429688, 0.59822119140625, 0.6002606811523438, 0.6012161865234374, 0.598807373046875, 0.5991383056640625, 0.5984088134765625, 0.5982332763671875, 0.596343017578125, 0.5982119140625, 0.598999755859375, 0.5978890380859375, 0.5959164428710938, 0.5959904174804688, 0.6044815063476563, 0.6069657592773438, 0.6004791259765625, 0.5995720825195312, 0.5961649780273437, 0.598435791015625, 0.5975311889648437, 0.5995951538085937, 0.5956484985351562, 0.6011754760742187, 0.5952840576171875, 0.5979202880859374, 0.5974712524414062, 0.5964451293945312, 0.6007623901367187, 0.59650634765625, 0.5972930297851563, 0.6051753540039062, 0.600068359375, 0.6002447509765625, 0.59643701171875, 0.5965925903320313, 0.59482470703125, 0.5983768920898438, 0.596989990234375, 0.5959901733398437, 0.5953162841796875, 0.5955325317382812, 0.59702685546875, 0.5968765869140625, 0.5952971801757813, 0.5957722778320312, 0.5971724853515625, 0.60042919921875, 0.6080061645507813, 0.5949615478515625, 0.5953770141601562, 0.5987880859375, 0.5982637939453125, 0.5975543212890625, 0.5996019897460938, 0.595951416015625, 0.5963368530273437, 0.5959393310546875, 0.594935791015625, 0.5952304077148437, 0.597755859375, 0.604116943359375, 0.5960966796875, 0.5985327758789063, 0.6015114135742188, 0.6076749267578125, 0.5980487670898438, 0.6006435546875, 0.5979259033203125, 0.595861328125, 0.5966128540039063, 0.5995729370117188, 0.5943173217773438, 0.597823486328125, 0.5940162353515624, 0.5958881225585938, 0.5932373657226563, 0.6018928833007813, 0.5966554565429687, 0.5999664306640625, 0.6031705322265625, 0.5981074829101563, 0.5961983032226562, 0.5982576904296875, 0.59667578125, 0.5979472045898437, 0.5966356201171875, 0.5988740844726562, 0.5950750732421874, 0.59464501953125, 0.5959188232421875, 0.597173828125, 0.6030330810546874, 0.59887060546875, 0.5971211547851563, 0.5971533203125, 0.6003432006835937, 0.6133718872070313, 0.595173583984375, 0.5976878662109375, 0.5946596069335938, 0.5977293090820313, 0.590903076171875, 0.5955565185546875, 0.5908944091796875, 0.5900397338867187, 0.5872164916992187, 0.5898387451171875, 0.588695068359375, 0.5992227783203125, 0.5991546630859375, 0.6000045776367188, 0.5997272338867188, 0.6043222045898438, 0.6048569946289063, 0.5994698486328125, 0.6001105346679687, 0.5976450805664062, 0.597142333984375, 0.5974384765625, 0.5970759887695313, 0.5905541381835937, 0.593681396484375, 0.5903206176757813, 0.5895465087890625, 0.5894717407226563, 0.589731689453125, 0.5914810180664063, 0.5898090209960938, 0.5908190307617187, 0.59707421875, 0.5995111694335937, 0.5907072143554688, 0.5919744262695312, 0.58979736328125, 0.594145263671875, 0.5923717041015625, 0.59804296875, 0.5930797119140625, 0.593076171875, 0.5949239501953125, 0.592190673828125, 0.592826416015625, 0.5938594970703125, 0.5946546630859375, 0.5976929321289063, 0.6159707641601563, 0.595388427734375, 0.5930552978515625, 0.596017578125, 0.5924225463867188, 0.5903302001953125, 0.5951565551757813, 0.5902269287109375, 0.5919303588867187, 0.5909688110351563, 0.5929512939453125, 0.5906862182617187, 0.5879285888671875, 0.591690673828125, 0.5901394653320312, 0.592201171875, 0.5956367797851563, 0.6000435180664062, 0.5915443115234374, 0.5951054077148438, 0.591812744140625, 0.5918026123046874, 0.58849072265625, 0.595726318359375, 0.5919313354492187, 0.5972828369140625, 0.5893836669921875, 0.593006591796875, 0.59108740234375, 0.590979248046875, 0.5911411743164062, 0.5916771240234375, 0.5907025756835937, 0.59791357421875, 0.5995593872070313, 0.5906926879882812, 0.5898677368164063, 0.593170166015625, 0.5920870361328125, 0.589391845703125, 0.5958204956054688, 0.590388427734375, 0.5910286254882813, 0.5901746826171875, 0.5900157470703125, 0.5913583374023438, 0.5913128662109375, 0.590477783203125, 0.5902556762695312, 0.5918931884765625, 0.5957220458984375, 0.600690673828125, 0.590388671875, 0.59058642578125, 0.5907967529296875, 0.5913981323242188, 0.59211083984375, 0.5902474365234375, 0.587992919921875, 0.5887446899414063, 0.5863717041015625, 0.5877821655273437, 0.5867615966796875, 0.5886040649414063, 0.5885358276367187, 0.5941903076171875, 0.5941514282226562, 0.5992528686523437, 0.590245361328125, 0.587977294921875, 0.58908984375, 0.5893883056640625, 0.5897506713867188, 0.58909521484375, 0.587362060546875, 0.5910056762695313, 0.5893345336914062, 0.5890919189453125, 0.5894024658203125, 0.58707568359375, 0.588322509765625, 0.5886635131835938, 0.5929758911132812, 0.5966889038085937, 0.5970636596679687, 0.5937377319335938, 0.5945128784179687, 0.5954426879882813, 0.59519970703125, 0.6000829467773438, 0.5966046752929688, 0.5942167358398438, 0.5963052978515625, 0.5975029907226562, 0.5953101196289062, 0.5963593139648438, 0.5953899536132813, 0.594767578125, 0.592407470703125, 0.5954293823242187, 0.60524951171875, 0.600276123046875, 0.5980428466796875, 0.5945698852539063, 0.5956915283203125, 0.5968141479492187, 0.599828125, 0.596664306640625, 0.59563134765625, 0.5962001953125, 0.595198974609375, 0.5977835693359375, 0.5982815551757813, 0.5990254516601563, 0.5986903076171874, 0.6004895629882813, 0.60535888671875, 0.6024540405273437, 0.6028369750976562, 0.598262939453125, 0.598118408203125, 0.59943115234375, 0.5952921752929687, 0.598362060546875, 0.5996151733398437, 0.5961767578125, 0.5982315063476562, 0.5971517333984375, 0.5977986450195313, 0.5980674438476562, 0.5999363403320312, 0.6010046997070313, 0.6022569580078125, 0.6052492065429688, 0.5988505859375, 0.5998998413085938, 0.5965784912109375, 0.5971251220703125, 0.599250732421875, 0.5977110595703125, 0.59770263671875, 0.598725830078125, 0.6026717529296876, 0.5997733764648437, 0.60202392578125, 0.5994004516601562, 0.5970841674804688, 0.59827197265625, 0.6030213012695312, 0.61406005859375, 0.6005678100585937, 0.5983573608398437, 0.6053334350585937, 0.59907666015625, 0.5979268188476563, 0.6012926025390625, 0.5956641845703124, 0.5959152221679688, 0.597344482421875, 0.596909423828125, 0.5984203491210938, 0.5971619873046875, 0.5981572875976563, 0.5977435913085938, 0.597190673828125, 0.60233935546875, 0.604567626953125, 0.5984596557617188, 0.5972875366210938, 0.5958082275390625, 0.5963690795898438, 0.5995562133789063, 0.5963605346679688, 0.5962469482421875, 0.595840576171875, 0.5970195922851562, 0.5972930297851563, 0.5991724243164063, 0.5969491577148438, 0.5959049682617188, 0.5978124389648437, 0.6027620849609375, 0.603598876953125, 0.6010101928710937, 0.595933349609375, 0.5975206909179688, 0.594540283203125, 0.5983009643554688, 0.5924844360351562, 0.5947349853515626, 0.5926602172851563, 0.5940177612304688, 0.59410693359375, 0.5937256469726563, 0.595998779296875, 0.5968008422851563, 0.5963701171875, 0.6013972778320312, 0.6001024780273437, 0.5975904541015625, 0.59519384765625, 0.5953491821289062, 0.595202392578125, 0.5945548706054687, 0.5977111206054687, 0.5951444702148437, 0.5948168334960937, 0.5969163818359375, 0.5929103393554688, 0.5976002807617188, 0.5947371215820313, 0.5980469360351562, 0.5974894409179687, 0.599140380859375, 0.60188671875, 0.6043787231445312, 0.6006676025390625, 0.59879931640625, 0.5957635498046875, 0.5973761596679688, 0.5968289184570312, 0.5975631713867188, 0.5970902099609375, 0.5986216430664062, 0.5950140380859374, 0.5960767822265625, 0.5955543212890625, 0.597099609375, 0.596447265625, 0.5972276000976563, 0.5998764038085938, 0.60377294921875, 0.605216796875, 0.5977272338867188, 0.5971763305664063, 0.5968251342773437, 0.596970458984375, 0.599923828125, 0.5974495849609375, 0.5984010620117187, 0.5963182373046875, 0.5974935913085937, 0.595628173828125, 0.595881103515625, 0.5988564453125, 0.5982308959960938]",tokens/s,1.6753001295405434,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,962.535424,1613.692928,0.0,1218.445312,1206.173696,s,1,9.4604873046875,9.4604873046875,0.0,9.4604873046875,9.4604873046875,9.4604873046875,9.4604873046875,[9.4604873046875],,kWh,6.477771084166382e-05,7.138271905804687e-06,2.3721130088000253e-05,9.563711283546876e-05,,MB,1243.926528,1909.39136,0.0,1501.560832,1463.359488,s,10,1.9908367919921874,0.19908367919921877,0.0004472589274700427,0.1991101303100586,0.19956671447753907,0.19967988739013673,0.19977042572021486,"[0.1997930603027344, 0.19954156494140626, 0.199281982421875, 0.19933203125, 0.19893525695800782, 0.1981090850830078, 0.19911955261230468, 0.19863119506835938, 0.1989923553466797, 0.1991007080078125]",tokens/s,1285.8914453948096,kWh,6.066193267262017e-06,6.68988535059734e-07,4.033217512285902e-06,1.0768399314607655e-05,tokens/kWh,23773264.021954346,MB,1265.979392,1909.39136,0.0,1501.560832,1463.362048,s,10,22.183607177734377,2.2183607177734377,0.018181601864536465,2.2126961669921874,2.240122021484375,2.2506093505859375,2.2589992138671873,"[2.2610966796875, 2.23779150390625, 2.220457275390625, 2.221549072265625, 2.210393310546875, 2.191359619140625, 2.213748046875, 2.206274658203125, 2.209292724609375, 2.211644287109375]",tokens/s,28.399348895446057,kWh,6.437977178898731e-05,7.100976421997975e-06,3.540911959711486e-05,0.00010688986780810015,tokens/kWh,589391.6915783279,,s,630,22.17988812637329,0.03520617162916396,0.0012707433426642676,0.03513360023498535,0.035653885650634765,0.035893260955810545,0.03744401447296143,"[0.03918384170532226, 0.035512928009033204, 0.03554707336425781, 0.03542380905151367, 0.035546783447265626, 0.03576063919067383, 0.03560681533813476, 0.03563315200805664, 0.035434497833251956, 0.03811075210571289, 0.03747273635864258, 0.04433462524414063, 0.035721534729003905, 0.035815582275390626, 0.03562396621704102, 0.035605312347412106, 0.035238048553466794, 0.035318912506103514, 0.035388416290283206, 0.03526614379882813, 0.036552833557128905, 0.03556703948974609, 0.035668033599853516, 0.03566262435913086, 0.035788673400878906, 0.03542425537109375, 0.035454975128173825, 0.03546112060546875, 0.035520511627197264, 0.035581439971923826, 0.03554489517211914, 0.03629536056518555, 0.035506175994873046, 0.035560638427734374, 0.03570880126953125, 0.03562591934204101, 0.03542323303222656, 0.03598233413696289, 0.03555327987670898, 0.03550576019287109, 0.03549983978271484, 0.03569724655151367, 0.03622732925415039, 0.03588889694213867, 0.035639297485351565, 0.03578278350830078, 0.035491455078125, 0.03553955078125, 0.0365871696472168, 0.03544649505615234, 0.035448223114013674, 0.03547030258178711, 0.03580707168579102, 0.03568787384033203, 0.03570751953125, 0.03555123138427734, 0.0356495361328125, 0.03537919998168945, 0.035373313903808594, 0.03560012817382813, 0.03531980895996094, 0.035319297790527344, 0.03558147048950195, 0.035301601409912106, 0.035175968170166015, 0.03517891311645508, 0.03528851318359375, 0.0355164794921875, 0.03611315155029297, 0.035501953125, 0.03525574493408203, 0.03559084701538086, 0.03536857604980469, 0.03564787292480469, 0.035323902130126955, 0.03538438415527344, 0.03536991882324219, 0.03527065658569336, 0.03626617431640625, 0.03622857666015625, 0.035496288299560544, 0.035411201477050784, 0.03554790496826172, 0.03529846572875977, 0.035168384552001955, 0.03539017486572266, 0.0351987190246582, 0.03539788818359375, 0.03574323272705078, 0.03574256134033203, 0.035468673706054686, 0.035366622924804685, 0.03547737503051758, 0.03538771057128906, 0.03519097518920898, 0.03544902420043945, 0.035391616821289065, 0.03541987228393555, 0.03518000030517578, 0.03551916885375977, 0.035600414276123045, 0.035358688354492185, 0.035272705078125, 0.03559833526611328, 0.03524198532104492, 0.03558195114135742, 0.035274974822998045, 0.03536259078979492, 0.03594035339355469, 0.03704966354370117, 0.03577721786499023, 0.03579513549804687, 0.03561248016357422, 0.03561881637573242, 0.03547955322265625, 0.03545718383789063, 0.03567103958129883, 0.03541411209106445, 0.03523455810546875, 0.03544377517700195, 0.03585299301147461, 0.03546886444091797, 0.03526521682739258, 0.03581257629394531, 0.03571382522583008, 0.035366912841796876, 0.03527008056640625, 0.03499852752685547, 0.03547811126708984, 0.035155071258544925, 0.035154624938964846, 0.03512115097045899, 0.035362014770507814, 0.03507193756103515, 0.03494316864013672, 0.03583657455444336, 0.03536896133422852, 0.03524198532104492, 0.03575603103637695, 0.035485599517822264, 0.035178592681884766, 0.03545436859130859, 0.035250785827636716, 0.03524169540405273, 0.035428638458251956, 0.03549593734741211, 0.03606425476074219, 0.03517459106445313, 0.03517663955688476, 0.03535116958618164, 0.035356670379638674, 0.035334110260009766, 0.035079647064208984, 0.03507462310791016, 0.035244129180908204, 0.03522719955444336, 0.03542256164550781, 0.03571654510498047, 0.035592769622802736, 0.03524156951904297, 0.03521523284912109, 0.03524867248535156, 0.03494454574584961, 0.03471007919311524, 0.03501456069946289, 0.03520512008666992, 0.035776512145996094, 0.03606070327758789, 0.035051296234130856, 0.03493548965454102, 0.03495907211303711, 0.035096286773681644, 0.03504195022583008, 0.034985759735107425, 0.03498368072509766, 0.03526079940795898, 0.034977790832519534, 0.03506995010375977, 0.0350464973449707, 0.03502915191650391, 0.03516511917114258, 0.035194496154785156, 0.03510470581054687, 0.03506524658203125, 0.03517536163330078, 0.035092159271240236, 0.03509859085083008, 0.034995712280273435, 0.03512521743774414, 0.03565363311767578, 0.035598655700683594, 0.03512271881103515, 0.035134750366210936, 0.03500921630859375, 0.03563740921020508, 0.035108768463134765, 0.03505936050415039, 0.035226303100585936, 0.03502671813964844, 0.03467689514160156, 0.034584415435791015, 0.03463987350463867, 0.034588481903076174, 0.03444863891601563, 0.034586910247802735, 0.034331520080566405, 0.0342628173828125, 0.03426508712768555, 0.03476889419555664, 0.03471273422241211, 0.034663265228271484, 0.03477673721313477, 0.060598400115966795, 0.034808032989501955, 0.036792320251464845, 0.03516009521484375, 0.03510268783569336, 0.0355676155090332, 0.03496521759033203, 0.03463590240478515, 0.03459718322753906, 0.03478716659545898, 0.03467417526245117, 0.034670753479003905, 0.034540992736816406, 0.034929088592529293, 0.034982177734375, 0.034844001770019534, 0.03468313598632813, 0.034592384338378905, 0.03447907257080078, 0.0343644790649414, 0.034327487945556644, 0.03435712051391602, 0.03411491012573242, 0.0345134391784668, 0.03456963348388672, 0.034626335144042966, 0.034553920745849606, 0.03451638412475586, 0.0348037109375, 0.034878177642822264, 0.034942623138427734, 0.03517871856689453, 0.03502284622192383, 0.03499008178710938, 0.03521945571899414, 0.035020030975341794, 0.034796257019042966, 0.03488358306884766, 0.035059745788574216, 0.03509404754638672, 0.03521772766113281, 0.034969600677490234, 0.035071487426757815, 0.03514556884765625, 0.034998943328857425, 0.035059711456298825, 0.03522124862670899, 0.03511561584472656, 0.04593587112426758, 0.03533456039428711, 0.03514134216308594, 0.03528265762329102, 0.035308223724365234, 0.03539494323730469, 0.0354984016418457, 0.035337696075439455, 0.035641983032226564, 0.035743743896484374, 0.03523161697387695, 0.034971614837646485, 0.03485283279418945, 0.034910465240478514, 0.0351374397277832, 0.03497545623779297, 0.03512966537475586, 0.03492166519165039, 0.03466937637329102, 0.03513350296020508, 0.03457632064819336, 0.03456204986572266, 0.034482078552246095, 0.03463587188720703, 0.03417638397216797, 0.034205345153808596, 0.03439545440673828, 0.034655902862548826, 0.034869247436523435, 0.034991233825683594, 0.03511795043945312, 0.03485190582275391, 0.034786048889160155, 0.03512339019775391, 0.03486105728149414, 0.03473020935058594, 0.03457001495361328, 0.03483622360229492, 0.034836734771728516, 0.03455376052856445, 0.0344169921875, 0.03473417663574219, 0.03472348785400391, 0.03486105728149414, 0.03486854553222656, 0.03555327987670898, 0.03502560043334961, 0.0349224967956543, 0.03459670257568359, 0.03471785736083984, 0.0345904655456543, 0.034455806732177734, 0.03458022308349609, 0.034441471099853516, 0.034476032257080076, 0.03487539291381836, 0.03478838348388672, 0.034398494720458986, 0.034807743072509764, 0.03470003128051758, 0.035093662261962894, 0.035133697509765624, 0.035175006866455076, 0.03511270523071289, 0.03509888076782226, 0.03506585693359375, 0.035127296447753906, 0.035272705078125, 0.035280895233154294, 0.03525820922851562, 0.03516636657714844, 0.03496755218505859, 0.034914398193359376, 0.03510262298583984, 0.035003936767578125, 0.03537148666381836, 0.03506406402587891, 0.0351328010559082, 0.03513177490234375, 0.035286720275878904, 0.036055072784423825, 0.03514748764038086, 0.03526895904541016, 0.035095806121826174, 0.03445859146118164, 0.03410124969482422, 0.03393535995483398, 0.034029121398925784, 0.03415084838867188, 0.03486508941650391, 0.03470476913452149, 0.03446387100219726, 0.034751041412353516, 0.03462758255004883, 0.03477532958984375, 0.03475836944580078, 0.03460435104370117, 0.034766559600830076, 0.034794464111328124, 0.0347770881652832, 0.03440438461303711, 0.03437360000610352, 0.03420751953125, 0.03425302505493164, 0.03429580688476563, 0.03425894546508789, 0.03403366470336914, 0.03408892822265625, 0.03402345657348633, 0.034253822326660154, 0.034966529846191405, 0.03607904052734375, 0.03499679946899414, 0.03463577651977539, 0.034662689208984375, 0.03461705780029297, 0.03430627059936524, 0.03418854522705078, 0.03506572723388672, 0.036149120330810545, 0.036761791229248046, 0.03496633529663086, 0.03476502227783203, 0.03528271865844727, 0.03576764678955078, 0.03531171035766602, 0.03515564727783203, 0.03565283203125, 0.034926239013671874, 0.034955265045166016, 0.03517657470703125, 0.03505855941772461, 0.035326976776123044, 0.03518054580688477, 0.03541756820678711, 0.035451423645019534, 0.035347808837890626, 0.03516227340698242, 0.035278526306152344, 0.03513427352905273, 0.03509619140625, 0.036006271362304686, 0.03529276657104492, 0.03528096008300781, 0.03539542388916016, 0.035377662658691404, 0.03537849426269531, 0.03540176010131836, 0.035485374450683595, 0.03537609481811523, 0.03539494323730469, 0.03532863998413086, 0.03539276885986328, 0.03523660659790039, 0.03527884674072266, 0.03521945571899414, 0.03523279953002929, 0.03507299041748047, 0.034409889221191405, 0.03423088073730469, 0.03509622573852539, 0.03493724822998047, 0.03506988906860352, 0.03589683151245117, 0.03478204727172852, 0.034673694610595704, 0.03468761444091797, 0.034654335021972654, 0.034697086334228515, 0.035065536499023435, 0.03443983840942383, 0.034756256103515626, 0.03490611267089844, 0.03447974395751953, 0.03444083023071289, 0.03477171325683594, 0.03474227142333984, 0.034799713134765625, 0.03493212890625, 0.03486518478393555, 0.03463004684448242, 0.034773567199707034, 0.034524223327636716, 0.034616256713867186, 0.03479347229003906, 0.034758880615234376, 0.034747585296630856, 0.03463433456420899, 0.034269054412841796, 0.03399488067626953, 0.03420774459838867, 0.03405926513671875, 0.03489820861816406, 0.035164894104003905, 0.035095584869384765, 0.035029087066650394, 0.03501055908203125, 0.035340320587158205, 0.03490902328491211, 0.03503104019165039, 0.03527494430541992, 0.035202880859375, 0.03531161499023437, 0.03506108856201172, 0.03508083343505859, 0.035028350830078124, 0.034923168182373045, 0.03485615921020508, 0.035202880859375, 0.03527129745483398, 0.03522563171386719, 0.03528121566772461, 0.03525603103637695, 0.035162399291992184, 0.03537004852294922, 0.0352751693725586, 0.035213504791259766, 0.03737369537353516, 0.035186431884765626, 0.03521760177612305, 0.03550233459472656, 0.03550479888916016, 0.03527791976928711, 0.035146400451660155, 0.03538655853271484, 0.03516704177856445, 0.03516166305541992, 0.035146175384521486, 0.03539174270629883, 0.03536870574951172, 0.035448833465576174, 0.035299327850341795, 0.03491337585449219, 0.03480400085449219, 0.03452156829833984, 0.034523296356201175, 0.034680831909179685, 0.03452928161621094, 0.03448361587524414, 0.03441020965576172, 0.034753406524658206, 0.03491430282592774, 0.034929729461669924, 0.035017120361328126, 0.035776512145996094, 0.03480985641479492, 0.03462105560302734, 0.03455539321899414, 0.03436019134521484, 0.03441254425048828, 0.03456515121459961, 0.03483907318115234, 0.035146366119384764, 0.035123008728027344, 0.03516944122314453, 0.034976608276367185, 0.03505152130126953, 0.03469635009765625, 0.03453763198852539, 0.03428752136230469, 0.03432912063598633, 0.0343732795715332, 0.034548576354980466, 0.03457408142089844, 0.037708927154541015, 0.03682751846313476, 0.035051742553710935, 0.03571852874755859, 0.03504848098754883, 0.03453939056396484, 0.03479046249389649, 0.034760990142822266, 0.03565615844726563, 0.0349285774230957, 0.03517452621459961, 0.03476259231567383, 0.0348059196472168, 0.03476287841796875, 0.034950271606445316, 0.03492534255981445, 0.03514303970336914, 0.034953025817871096, 0.03516953659057617, 0.03498998260498047, 0.03521712112426758, 0.035057472229003905, 0.03545427322387695, 0.035087265014648435, 0.03520483016967774, 0.03515955352783203, 0.03519158554077149, 0.03520415878295898, 0.035232223510742185, 0.03513167953491211, 0.035421630859375, 0.0351833610534668, 0.03523379135131836, 0.03519513702392578, 0.03524787139892578, 0.035116737365722656, 0.035283424377441405, 0.035469215393066404, 0.035289024353027346, 0.03516416168212891, 0.03518463897705078, 0.03483238220214844, 0.0349698257446289, 0.03507791900634766, 0.03493500900268555, 0.03491993713378906, 0.03471206283569336, 0.034531326293945314, 0.034358558654785154, 0.03451363372802734, 0.03439820861816406, 0.0346577262878418, 0.03477107238769531, 0.03590524673461914, 0.035267295837402346, 0.035835105895996096, 0.03495401763916016, 0.03464992141723633, 0.03465628814697266, 0.03493030548095703, 0.03496768188476562, 0.03466857528686523, 0.03485148620605469, 0.03511167907714844, 0.03524512100219727, 0.03495052719116211, 0.03502364730834961, 0.0353889274597168, 0.03613756942749023, 0.03517433547973633, 0.035022335052490236, 0.03487091064453125, 0.03471830368041992, 0.03496550369262695, 0.03498950576782227, 0.0347606086730957, 0.03466239929199219, 0.03478623962402344, 0.03478396987915039, 0.03657155227661133, 0.035160030364990234, 0.035197566986083985, 0.035160064697265625, 0.03500783920288086, 0.034925086975097656, 0.03505062484741211, 0.03504844665527344, 0.034896926879882814, 0.03511395263671875, 0.03505503845214844, 0.034826465606689457, 0.035127647399902345, 0.035125247955322264, 0.03541196823120117, 0.03553887939453125, 0.035397441864013675, 0.0354447021484375, 0.03546345520019531, 0.03610736083984375, 0.0353084487915039, 0.035248031616210936, 0.03523183822631836, 0.03541775894165039, 0.0354185905456543, 0.03542617416381836, 0.03540582275390625]",tokens/s,28.404110805720883,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1036.218368,1613.692928,0.0,1218.445312,1206.173696,s,1,9.4240576171875,9.4240576171875,0.0,9.4240576171875,9.4240576171875,9.4240576171875,9.4240576171875,[9.4240576171875],,kWh,6.484195211666777e-05,7.142521866360762e-06,2.3296685304000087e-05,9.528115928702862e-05,,MB,1293.1072,1909.39136,0.0,1501.560832,1463.359488,s,10,1.9863009643554688,0.19863009643554688,0.0006513418414676444,0.19857725524902345,0.19956176300048828,0.1995944953918457,0.19962068130493166,"[0.19862535095214845, 0.19818499755859376, 0.19772694396972657, 0.19815501403808594, 0.198687744140625, 0.19785023498535156, 0.19852915954589845, 0.19962722778320313, 0.1995544891357422, 0.19935980224609376]",tokens/s,1288.8278493237754,kWh,6.058258744047592e-06,6.68115788599339e-07,3.998846736489836e-06,1.0725221269136766e-05,tokens/kWh,23868971.425016064,MB,1315.4304,1909.39136,0.0,1501.560832,1463.362048,s,10,22.126185302734378,2.2126185302734376,0.014006153154463854,2.218113891601562,2.2226205810546875,2.2277609741210935,2.2318732885742185,"[2.221420654296875, 2.203199462890625, 2.187265625, 2.189372314453125, 2.221478271484375, 2.220020751953125, 2.2196083984375, 2.216619384765625, 2.2329013671875, 2.214299072265625]",tokens/s,28.473050884290657,kWh,6.384394182636952e-05,7.0419013335482064e-06,3.5243406879511035e-05,0.00010612925003942876,tokens/kWh,593615.8031512939,,s,630,22.122512981414793,0.03511509997049967,0.0005503760512095682,0.03513702392578125,0.03562043075561524,0.035890344810485836,0.03708655132293702,"[0.035834239959716796, 0.035659454345703126, 0.035242206573486326, 0.035146846771240234, 0.03531635284423828, 0.03540768051147461, 0.03556204986572266, 0.035209217071533204, 0.035073280334472656, 0.03533484649658203, 0.034916416168212894, 0.03506995010375977, 0.035235679626464844, 0.03523139190673828, 0.03513967895507813, 0.03523215866088867, 0.03520060729980469, 0.03481020736694336, 0.03487500762939453, 0.034996673583984374, 0.03522150421142578, 0.03492409515380859, 0.03467308807373047, 0.03489129638671875, 0.035133377075195316, 0.03483292770385742, 0.03468902587890625, 0.034856704711914065, 0.035118495941162106, 0.03517436981201172, 0.03491315078735351, 0.03473612976074219, 0.03485696029663086, 0.0348485107421875, 0.0351541748046875, 0.0352685432434082, 0.0347413444519043, 0.03473712158203125, 0.035237342834472656, 0.03513753509521484, 0.03538383865356445, 0.03544009780883789, 0.03530806350708008, 0.03572531127929687, 0.036751361846923826, 0.03548345565795898, 0.035423614501953124, 0.035881248474121094, 0.03550467300415039, 0.03538739013671875, 0.03532329559326172, 0.03539206314086914, 0.03564547348022461, 0.03535257720947266, 0.03560784149169922, 0.035512577056884764, 0.03548617553710937, 0.035487743377685545, 0.03549388885498047, 0.03547891235351563, 0.03547366333007813, 0.03540620803833008, 0.035372127532958986, 0.03523632049560547, 0.03520959854125977, 0.03506105422973633, 0.035186561584472656, 0.03567494583129883, 0.03514572906494141, 0.03512934494018555, 0.03498364639282227, 0.03507952117919922, 0.03519174575805664, 0.035020801544189455, 0.03487948989868164, 0.03496755218505859, 0.03484262466430664, 0.03511040115356445, 0.03519744110107422, 0.03541401672363281, 0.03523379135131836, 0.03517440032958984, 0.035170143127441406, 0.03481411361694336, 0.034744129180908204, 0.03488172912597656, 0.034576385498046876, 0.034557823181152345, 0.03424063873291015, 0.034154495239257815, 0.03441788864135742, 0.03438467025756836, 0.037029888153076174, 0.035211231231689455, 0.03552608108520508, 0.03515043258666992, 0.034770942687988284, 0.03414220809936523, 0.033941505432128906, 0.03404569625854492, 0.03401753616333008, 0.03417292785644531, 0.034430526733398435, 0.034678848266601565, 0.03433871841430664, 0.03454729461669922, 0.03484572982788086, 0.03494083023071289, 0.03496134567260742, 0.035099967956542966, 0.0349376335144043, 0.03477078247070312, 0.0350208625793457, 0.03505871963500977, 0.03474428939819336, 0.03481292724609375, 0.03486649703979492, 0.03511711883544922, 0.03524179077148438, 0.03532185745239258, 0.035574337005615235, 0.035531009674072266, 0.03546316909790039, 0.035846145629882815, 0.035606529235839846, 0.035343486785888674, 0.0354645767211914, 0.03505420684814453, 0.035176448822021485, 0.03544268798828125, 0.03518873596191406, 0.03529520034790039, 0.03529321670532227, 0.03527679824829102, 0.03489712142944336, 0.03487619018554688, 0.034926273345947265, 0.03464172744750976, 0.03451894378662109, 0.03409366226196289, 0.03397359848022461, 0.034136447906494144, 0.03403744125366211, 0.03415884780883789, 0.034567806243896486, 0.03496623992919922, 0.03539731216430664, 0.03526278305053711, 0.03448982238769531, 0.03434921646118164, 0.03457267379760742, 0.03461119842529297, 0.03432598495483399, 0.0340968017578125, 0.0341635856628418, 0.03441254425048828, 0.0341319694519043, 0.03420687866210938, 0.034417503356933596, 0.034432926177978516, 0.03453785705566406, 0.03518230438232422, 0.03462963104248047, 0.034336769104003906, 0.03411465454101562, 0.03420662307739258, 0.03413174438476563, 0.03431212615966797, 0.03434883117675781, 0.0346690559387207, 0.03433865737915039, 0.0342935676574707, 0.034386238098144534, 0.034772544860839846, 0.0350700798034668, 0.03532015991210938, 0.035237823486328125, 0.03465017700195312, 0.03433865737915039, 0.03420284652709961, 0.0343623046875, 0.03435238265991211, 0.034241279602050784, 0.03429935836791992, 0.034777599334716795, 0.035188766479492185, 0.036036609649658206, 0.038499744415283206, 0.03520719909667969, 0.03517129516601562, 0.03504537582397461, 0.03523993682861328, 0.035055583953857425, 0.03514780807495117, 0.03512115097045899, 0.035160064697265625, 0.03531161499023437, 0.03544220733642578, 0.03553327941894531, 0.035563518524169925, 0.034909759521484375, 0.035103168487548825, 0.03527065658569336, 0.035200191497802735, 0.035218238830566406, 0.03514108657836914, 0.035113502502441406, 0.03550207901000976, 0.03462144088745117, 0.03440639877319336, 0.034659488677978516, 0.03475129699707031, 0.034773025512695316, 0.03483852767944336, 0.03457583999633789, 0.03449494552612305, 0.03449043273925781, 0.03453952026367187, 0.03450435256958008, 0.03427363204956055, 0.03486537551879883, 0.033971134185791015, 0.03411145782470703, 0.03395651245117187, 0.0341662712097168, 0.03451337432861328, 0.03471811294555664, 0.03532783889770508, 0.03506380844116211, 0.03426303863525391, 0.03401520156860351, 0.03400668716430664, 0.03406476974487305, 0.03417683029174805, 0.034801856994628906, 0.03445916748046875, 0.034283103942871096, 0.0340447998046875, 0.03425238418579102, 0.03481209564208984, 0.03537468719482422, 0.035100318908691405, 0.03515203094482422, 0.03454035186767578, 0.034122913360595704, 0.034229057312011715, 0.0340992317199707, 0.03402547073364258, 0.03421120071411133, 0.035737823486328125, 0.035210784912109376, 0.035257217407226565, 0.03504127883911133, 0.03499008178710938, 0.03491132736206055, 0.0348675537109375, 0.03491897583007812, 0.03445897674560547, 0.03441651153564453, 0.03712080001831055, 0.035399681091308595, 0.03523583984375, 0.036782081604003904, 0.037109695434570315, 0.03582287979125977, 0.03554563140869141, 0.035506431579589846, 0.035389438629150394, 0.035415294647216794, 0.035413791656494144, 0.03547548675537109, 0.03545622253417969, 0.035366622924804685, 0.035364479064941404, 0.03538275146484375, 0.03532217788696289, 0.03549193572998047, 0.035594753265380856, 0.03574745559692383, 0.03625203323364258, 0.03552774429321289, 0.03558060836791992, 0.03555763244628906, 0.03495305633544922, 0.03484457778930664, 0.0346769905090332, 0.034590721130371094, 0.03502284622192383, 0.034991584777832034, 0.03473174285888672, 0.03564764785766601, 0.035060192108154295, 0.03518483352661133, 0.03528195190429687, 0.03533513641357422, 0.03527801513671875, 0.03480384063720703, 0.034746623992919924, 0.03530387115478516, 0.03487321472167969, 0.03476831817626953, 0.034695552825927733, 0.03476095962524414, 0.03472339248657227, 0.034872127532958985, 0.03513516616821289, 0.03524198532104492, 0.03591696166992187, 0.03546335983276367, 0.03511324691772461, 0.03474879837036133, 0.03508838272094727, 0.0347770881652832, 0.0350511360168457, 0.03505599975585937, 0.03571875381469727, 0.035061504364013674, 0.03500838470458984, 0.03453212738037109, 0.03480575942993164, 0.0349306869506836, 0.03484000015258789, 0.03533881759643555, 0.03509833526611328, 0.03489616012573242, 0.0349268798828125, 0.03509219360351563, 0.035198272705078124, 0.03533075332641602, 0.03537705612182617, 0.03622739028930664, 0.03563875198364258, 0.03515203094482422, 0.03519705581665039, 0.03530704116821289, 0.03549542236328125, 0.035326366424560544, 0.035379806518554685, 0.03522294235229492, 0.03531235122680664, 0.03549580764770508, 0.03590332794189453, 0.035435871124267576, 0.035369407653808596, 0.035387775421142575, 0.03563888168334961, 0.03552297592163086, 0.035501888275146484, 0.03539081573486328, 0.03534499359130859, 0.0353771858215332, 0.03547558212280273, 0.03528099060058594, 0.035393024444580076, 0.03529369735717774, 0.037386302947998044, 0.035485313415527346, 0.03571744155883789, 0.0357367057800293, 0.03525875091552735, 0.035153854370117185, 0.03526099014282227, 0.034840576171875, 0.0347147216796875, 0.03497804641723633, 0.0349697265625, 0.03470956802368164, 0.034603519439697264, 0.03449238586425781, 0.034506752014160154, 0.03481804656982422, 0.03508224105834961, 0.03503440093994141, 0.034878177642822264, 0.034979808807373045, 0.03486313629150391, 0.03493427276611328, 0.035059326171875, 0.035415935516357425, 0.03494051361083984, 0.03489440155029297, 0.035221790313720705, 0.03553091049194336, 0.035016254425048826, 0.0348205451965332, 0.03479904174804688, 0.034681407928466794, 0.03489382553100586, 0.03592182540893555, 0.0352789421081543, 0.03539948654174805, 0.03545721435546875, 0.035363998413085934, 0.0354920654296875, 0.03530307388305664, 0.0352655029296875, 0.03509360122680664, 0.035120033264160154, 0.03504451370239258, 0.03522022247314453, 0.03515135955810547, 0.035243648529052735, 0.03523273468017578, 0.03518668746948242, 0.035266559600830076, 0.034987648010253905, 0.03502726364135742, 0.03532396697998047, 0.03542809677124024, 0.035696384429931644, 0.03579248046875, 0.03556444931030273, 0.03528704071044922, 0.03539936065673828, 0.03528940963745117, 0.03530956649780274, 0.035356670379638674, 0.0354977912902832, 0.03547926330566406, 0.03548003387451172, 0.035350528717041016, 0.03551027297973633, 0.03543212890625, 0.035630943298339844, 0.035633121490478516, 0.035452735900878905, 0.035364639282226565, 0.03544566345214844, 0.03556265640258789, 0.03561510467529297, 0.0349920654296875, 0.03508892822265625, 0.034659713745117185, 0.034736766815185546, 0.03424415969848633, 0.03446623992919922, 0.03449856185913086, 0.03477459335327148, 0.0355615348815918, 0.03523827362060547, 0.03491779327392578, 0.03475545501708984, 0.03476070404052734, 0.03522355270385742, 0.0367646713256836, 0.03509891128540039, 0.03554377746582031, 0.034874366760253905, 0.0345241584777832, 0.03429580688476563, 0.034342910766601564, 0.0344567985534668, 0.0344699821472168, 0.03467129516601562, 0.03467193603515625, 0.034624191284179685, 0.03455385589599609, 0.034624671936035155, 0.034640609741210936, 0.03495686340332031, 0.03533062362670898, 0.03586969757080078, 0.03521638488769531, 0.03489177703857422, 0.034862529754638674, 0.03449033737182617, 0.034226463317871096, 0.03459427261352539, 0.034336990356445315, 0.03495337677001953, 0.03497622299194336, 0.03616358566284179, 0.035243358612060544, 0.0354699821472168, 0.0353546257019043, 0.03530752182006836, 0.03498393630981445, 0.03502489471435547, 0.03458819198608398, 0.03580096054077148, 0.0356192626953125, 0.035313121795654295, 0.036536510467529294, 0.035461631774902344, 0.03524784088134766, 0.035247936248779296, 0.03529331207275391, 0.03537516784667969, 0.03537539291381836, 0.0355491828918457, 0.035776512145996094, 0.03547750473022461, 0.03540316772460937, 0.0353364143371582, 0.03523417663574219, 0.035383296966552735, 0.03549967956542969, 0.03543689727783203, 0.03537209701538086, 0.03613792037963867, 0.03610201644897461, 0.0354837760925293, 0.035532768249511716, 0.03546268844604492, 0.035348480224609374, 0.03739852905273437, 0.03537919998168945, 0.035896320343017575, 0.03534038543701172, 0.03503811264038086, 0.035034175872802734, 0.03514054489135742, 0.035964641571044925, 0.03623555374145508, 0.03521852874755859, 0.035772865295410156, 0.034969470977783204, 0.035022815704345706, 0.035076736450195316, 0.03474771118164063, 0.035490497589111325, 0.034887680053710936, 0.03568435287475586, 0.03553843307495117, 0.03615212631225586, 0.035347999572753905, 0.03517987060546875, 0.03491904067993164, 0.035001953125, 0.03480806350708008, 0.03579939270019531, 0.034927806854248046, 0.03495139312744141, 0.034834144592285156, 0.03512368011474609, 0.034861473083496096, 0.03504467010498047, 0.034966209411621096, 0.035074047088623043, 0.03502284622192383, 0.03560857772827149, 0.035279006958007814, 0.03615727996826172, 0.03494297790527344, 0.03499980926513672, 0.034943359375, 0.03521484756469727, 0.035172065734863284, 0.03604777526855469, 0.03783712005615234, 0.03567728042602539, 0.035101280212402344, 0.03517747116088867, 0.03513651275634765, 0.035272705078125, 0.03517011260986328, 0.03841574478149414, 0.035221790313720705, 0.036342304229736326, 0.03538307189941406, 0.035786975860595704, 0.03555942535400391, 0.03530947113037109, 0.035375198364257815, 0.035438591003417966, 0.03531980895996094, 0.03537100982666016, 0.03551855850219727, 0.035337791442871094, 0.03531955337524414, 0.035340545654296875, 0.03538985443115234, 0.035019935607910155, 0.03544460678100586, 0.034786270141601565, 0.03489980697631836, 0.03477065658569336, 0.03491680145263672, 0.034796897888183596, 0.034934944152832034, 0.034926975250244144, 0.03606560134887695, 0.034899776458740234, 0.0346943359375, 0.03472409439086914, 0.03470937728881836, 0.03452384185791016, 0.03446783828735352, 0.03472150421142578, 0.0349268798828125, 0.035143680572509765, 0.03602022552490235, 0.035323070526123046, 0.03536729431152344, 0.03506016159057617, 0.03491561508178711, 0.034767742156982424, 0.03481379318237305, 0.03486492919921875, 0.034930912017822266, 0.034842079162597656, 0.03534646224975586, 0.035000831604003906, 0.03500425720214844, 0.03494879913330078, 0.03503766250610352, 0.03518409729003906, 0.034957855224609376, 0.034961406707763674, 0.03590550231933594, 0.03496758270263672, 0.03478694534301758, 0.03472592163085937, 0.03471193695068359, 0.03536816024780273, 0.034773696899414064, 0.03620678329467773, 0.03587392044067383, 0.03559436798095703, 0.03541862487792969, 0.035883041381835935, 0.03528409576416015, 0.0352402572631836, 0.03512960052490234, 0.03550249481201172, 0.035054977416992185, 0.03525081634521485, 0.03535257720947266, 0.03579289627075195, 0.03538460922241211]",tokens/s,28.477777390356394,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 232.12 MiB is free. Process 20979 has 14.51 GiB memory in use. Of the allocated memory 14.07 GiB is allocated by PyTorch, and 332.11 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1183.674368,8106.47552,0.0,7711.227904,7603.953664,s,1,18.6817109375,18.6817109375,0.0,18.6817109375,18.6817109375,18.6817109375,18.6817109375,[18.6817109375],,kWh,0.00033669053976249566,3.7132231749903233e-05,0.00012545315591799833,0.0004992759274303972,,MB,1253.863424,9870.180352,0.0,9462.349824,8756.635648,s,10,16.76980871582031,1.6769808715820311,0.007806340836157971,1.6786487426757812,1.6843089477539064,1.6844797302246093,1.6846163562011718,"[1.657221923828125, 1.6715146484375, 1.6737459716796874, 1.6762406005859376, 1.681699951171875, 1.6785615234375, 1.68427099609375, 1.6787359619140625, 1.6846505126953124, 1.6831666259765625]",tokens/s,152.65528923922346,kWh,4.876559852499895e-05,5.377804635790192e-06,3.2354359216800466e-05,8.64977623775896e-05,tokens/kWh,2959614.1329353754,MB,1275.916288,9870.180352,0.0,9462.349824,8756.638208,s,10,85.37864160156249,8.53786416015625,0.02907519521229494,8.53781201171875,8.56888759765625,8.574248486328125,8.578537197265625,"[8.4894189453125, 8.5044462890625, 8.509966796875, 8.5240771484375, 8.54167578125, 8.5339482421875, 8.56262109375, 8.5676962890625, 8.565181640625, 8.579609375]",tokens/s,7.378894629643188,kWh,0.0002507171967387508,2.7656671500138203e-05,0.0001667083000331998,0.00044508216827208894,tokens/kWh,141546.89738431995,,s,630,85.37453198242181,0.13551513013082836,0.0017829660825156956,0.13535097503662108,0.1369326644897461,0.13733006439208986,0.14550091888427735,"[0.14503750610351562, 0.1334913330078125, 0.13315423583984376, 0.13340736389160157, 0.13329714965820313, 0.13323365783691407, 0.13481744384765626, 0.1373098907470703, 0.13452137756347657, 0.13359321594238283, 0.13402316284179688, 0.1335377960205078, 0.13362995910644532, 0.1343098907470703, 0.13658262634277343, 0.13452546691894532, 0.1343544921875, 0.13419155883789063, 0.13361497497558594, 0.13364083862304688, 0.13375709533691407, 0.136114013671875, 0.1344893798828125, 0.13402799987792968, 0.13494595336914061, 0.1335386505126953, 0.13431298828125, 0.13384332275390626, 0.13475202941894532, 0.1352056884765625, 0.13469255065917968, 0.13409837341308595, 0.13504396057128906, 0.13362789916992188, 0.1340968933105469, 0.13449830627441406, 0.13580224609375, 0.1355679931640625, 0.13494886779785156, 0.13395968627929689, 0.13430400085449218, 0.13443193054199218, 0.13516473388671876, 0.1347574005126953, 0.13545053100585938, 0.1344514617919922, 0.13368173217773438, 0.13453926086425783, 0.1356366424560547, 0.13461538696289063, 0.13555917358398437, 0.13463282775878907, 0.13620903015136718, 0.13417266845703124, 0.13549977111816405, 0.1350963134765625, 0.134944580078125, 0.13505964660644532, 0.13539564514160157, 0.13575538635253906, 0.1352458953857422, 0.13436671447753906, 0.13441424560546875, 0.14488575744628907, 0.13253984069824218, 0.13325164794921876, 0.1340702667236328, 0.13340467834472655, 0.13323622131347657, 0.13425100708007812, 0.13889564514160158, 0.134712646484375, 0.1334024963378906, 0.1339449005126953, 0.1336658172607422, 0.1336524200439453, 0.13396995544433593, 0.1373159942626953, 0.1350575714111328, 0.13430764770507814, 0.13389596557617187, 0.13401350402832032, 0.13368896484375, 0.1335422668457031, 0.13611007690429688, 0.13640908813476563, 0.13427285766601563, 0.13401884460449218, 0.13508872985839843, 0.13504620361328126, 0.13383456420898437, 0.13468150329589842, 0.13643775939941405, 0.1355018310546875, 0.13456793212890625, 0.13526425170898437, 0.13365440368652343, 0.1347624969482422, 0.13535606384277343, 0.13669424438476563, 0.13499122619628906, 0.1353877716064453, 0.1350200958251953, 0.1350528564453125, 0.13455859375, 0.13430374145507812, 0.13581517028808593, 0.13485874938964842, 0.1353502655029297, 0.1351127014160156, 0.13484831237792969, 0.13406553649902345, 0.13567674255371093, 0.13539251708984376, 0.1359797821044922, 0.1345966033935547, 0.13577830505371094, 0.13502204895019532, 0.1343877716064453, 0.13564915466308594, 0.13477743530273437, 0.1358561248779297, 0.13440614318847657, 0.13477389526367187, 0.13546524047851563, 0.1355078125, 0.145870849609375, 0.13322239685058593, 0.13324070739746094, 0.13331263732910156, 0.13322650146484374, 0.13332275390625, 0.13514956665039063, 0.13808396911621093, 0.13474234008789063, 0.1348811492919922, 0.1334417266845703, 0.13365843200683594, 0.13427558898925782, 0.13500384521484374, 0.13699468994140626, 0.13470118713378906, 0.13458822631835937, 0.13401231384277343, 0.1332825927734375, 0.13429756164550782, 0.1338880310058594, 0.13574758911132812, 0.1359418487548828, 0.13554495239257813, 0.13464796447753907, 0.133998046875, 0.1344027862548828, 0.133899169921875, 0.13530767822265624, 0.1354874267578125, 0.13577069091796876, 0.13495706176757813, 0.13450650024414063, 0.13514051818847655, 0.13405270385742188, 0.13530316162109374, 0.13647021484375, 0.1350863952636719, 0.13499801635742187, 0.13406617736816406, 0.13445712280273436, 0.1345364532470703, 0.13512188720703125, 0.1357864990234375, 0.13500355529785157, 0.1350469512939453, 0.13492921447753906, 0.13460479736328124, 0.1352948760986328, 0.13447177124023438, 0.13629849243164063, 0.1360341796875, 0.13469503784179687, 0.13681990051269532, 0.13467936706542968, 0.13575733947753907, 0.1345970916748047, 0.13604249572753907, 0.1353502655029297, 0.13544972229003907, 0.1349866180419922, 0.13528268432617188, 0.1358233642578125, 0.1459324493408203, 0.13329661560058595, 0.13322650146484374, 0.13314387512207032, 0.13316365051269533, 0.1335665283203125, 0.13628005981445312, 0.13920637512207032, 0.13460304260253905, 0.1342270965576172, 0.13344650268554686, 0.1332791748046875, 0.13422239685058593, 0.13487872314453125, 0.13769520568847657, 0.13548597717285157, 0.13462937927246094, 0.13324208068847657, 0.1344438018798828, 0.1339842529296875, 0.1348239288330078, 0.13693084716796874, 0.1371345672607422, 0.13525814819335938, 0.13387977600097656, 0.13432797241210936, 0.13362985229492189, 0.13380038452148438, 0.1358473663330078, 0.13662879943847656, 0.13572096252441407, 0.1353236541748047, 0.1346007080078125, 0.1339330596923828, 0.13425654602050782, 0.13563818359375, 0.1359014434814453, 0.13708912658691405, 0.13549411010742188, 0.13508221435546874, 0.13429318237304688, 0.1345410919189453, 0.1359302978515625, 0.13624876403808595, 0.13499040222167968, 0.135888671875, 0.13586044311523438, 0.13530490112304688, 0.13391065979003905, 0.13595440673828124, 0.13593212890625, 0.13600358581542968, 0.13556687927246094, 0.13495578002929687, 0.13535308837890625, 0.13442726135253907, 0.13619580078125, 0.13503961181640625, 0.13604454040527345, 0.13646553039550782, 0.13585087585449218, 0.1356216278076172, 0.13602706909179688, 0.1452974395751953, 0.13383775329589845, 0.13357466125488282, 0.13369139099121094, 0.13388121032714845, 0.13359907531738283, 0.13654095458984375, 0.1398190155029297, 0.13443685913085937, 0.13391871643066405, 0.13370994567871095, 0.13338201904296876, 0.1333634490966797, 0.13611958312988282, 0.13869052124023437, 0.13641612243652343, 0.1351619873046875, 0.13416435241699218, 0.1345108184814453, 0.13352540588378906, 0.1342319030761719, 0.1381643829345703, 0.13608982849121093, 0.13523741149902344, 0.13490322875976563, 0.13481837463378907, 0.13437132263183593, 0.13480140686035155, 0.13680230712890626, 0.1365166778564453, 0.1360987548828125, 0.13479437255859375, 0.13604135131835937, 0.1338040313720703, 0.13411705017089845, 0.13706681823730468, 0.13585594177246094, 0.13595242309570313, 0.13627769470214843, 0.13578073120117187, 0.13446102905273438, 0.13472982788085938, 0.1360993347167969, 0.13635401916503906, 0.13623773193359376, 0.13641731262207032, 0.1369490203857422, 0.13575241088867188, 0.13432217407226563, 0.13574925231933593, 0.13638217163085936, 0.13664527893066405, 0.13620223999023437, 0.1355091552734375, 0.1345211486816406, 0.1363113250732422, 0.1346068420410156, 0.13503427124023437, 0.1351399383544922, 0.13730816650390626, 0.1362960968017578, 0.13546678161621092, 0.1354040985107422, 0.14780621337890626, 0.13320838928222656, 0.13316432189941407, 0.13317161560058594, 0.133212158203125, 0.13333241271972657, 0.13649772644042968, 0.13985980224609376, 0.13513743591308594, 0.13435699462890624, 0.13371186828613282, 0.1334640655517578, 0.1334878387451172, 0.13588493347167968, 0.1373231658935547, 0.13557501220703125, 0.13497708129882813, 0.13426502990722655, 0.13324688720703126, 0.13393577575683593, 0.1358031005859375, 0.13588070678710937, 0.1371279296875, 0.13555302429199217, 0.13468812561035157, 0.13397782897949218, 0.13536349487304689, 0.13538304138183593, 0.13604591369628907, 0.13689248657226563, 0.13678236389160156, 0.13455162048339844, 0.13492544555664063, 0.1339646759033203, 0.13538713073730468, 0.13589442443847657, 0.1361634521484375, 0.13719920349121092, 0.13508082580566405, 0.13513113403320312, 0.13438473510742188, 0.13481053161621093, 0.13586534118652344, 0.13508819580078124, 0.13612742614746093, 0.13577830505371094, 0.13442658996582033, 0.13456591796875, 0.13475401306152343, 0.13650767517089843, 0.13672242736816406, 0.13512908935546875, 0.1371893768310547, 0.1351334686279297, 0.13545443725585937, 0.13485865783691406, 0.13498774719238282, 0.13648696899414062, 0.13594834899902344, 0.1353191680908203, 0.13643405151367188, 0.13543177795410155, 0.13473423767089843, 0.14608828735351562, 0.1337523193359375, 0.1340894775390625, 0.13474319458007813, 0.13424234008789063, 0.1340155487060547, 0.1370951690673828, 0.13830450439453126, 0.13548646545410156, 0.13473178100585936, 0.13463763427734374, 0.13404736328125, 0.13431634521484376, 0.1371249542236328, 0.13918505859375, 0.1361297607421875, 0.13467884826660156, 0.135004638671875, 0.13536869812011718, 0.13458009338378907, 0.13550563049316405, 0.13770515441894532, 0.13660643005371093, 0.13503231811523436, 0.135531005859375, 0.13522262573242189, 0.13402793884277345, 0.13648895263671876, 0.13669187927246093, 0.1369124755859375, 0.135180419921875, 0.1355076141357422, 0.1353955841064453, 0.13501817321777343, 0.1361864013671875, 0.13557942199707032, 0.13668783569335938, 0.1360747528076172, 0.13549618530273438, 0.13612031555175783, 0.13466447448730468, 0.1353516845703125, 0.1362660827636719, 0.13597894287109374, 0.13594834899902344, 0.13562617492675783, 0.13677215576171875, 0.13488931274414062, 0.1359687042236328, 0.13596627807617187, 0.13632374572753905, 0.13536627197265624, 0.13524755859375, 0.1369647979736328, 0.1362821044921875, 0.1362530517578125, 0.13645558166503907, 0.13577279663085937, 0.1361862030029297, 0.1370091552734375, 0.13520889282226561, 0.13632313537597657, 0.1368046112060547, 0.1455840301513672, 0.1336012725830078, 0.13409280395507814, 0.13309336853027343, 0.13383648681640625, 0.13548985290527343, 0.13782591247558593, 0.13838128662109375, 0.13501072692871094, 0.13514483642578126, 0.13361602783203125, 0.13509039306640624, 0.1341881866455078, 0.1371300506591797, 0.1392893829345703, 0.13580650329589844, 0.1352073974609375, 0.13523269653320313, 0.13443267822265625, 0.13363189697265626, 0.13652479553222657, 0.13731021118164063, 0.1376922607421875, 0.13528883361816407, 0.13543721008300783, 0.1345840301513672, 0.13496726989746094, 0.13634959411621095, 0.13623741149902344, 0.13667129516601562, 0.13594009399414062, 0.134619140625, 0.13478501892089845, 0.13531706237792968, 0.13591127014160156, 0.13635382080078126, 0.13691542053222655, 0.13547938537597656, 0.13529405212402343, 0.1360086669921875, 0.13586119079589845, 0.13474620056152345, 0.13733570861816408, 0.13639488220214843, 0.13710322570800781, 0.13610598754882813, 0.135546875, 0.13673193359375, 0.13557411193847657, 0.1362248992919922, 0.13619200134277343, 0.13749261474609376, 0.1364253387451172, 0.13604249572753907, 0.1358028869628906, 0.13529866027832033, 0.13682730102539062, 0.13697433471679688, 0.13724172973632812, 0.1361580810546875, 0.1359862060546875, 0.13632406616210938, 0.1354764404296875, 0.145797119140625, 0.13538099670410156, 0.13376307678222657, 0.13361561584472656, 0.13489884948730468, 0.13364720153808593, 0.13726934814453126, 0.13888050842285157, 0.13574700927734376, 0.13483721923828124, 0.13424339294433593, 0.13479417419433593, 0.13563285827636717, 0.13633251953125, 0.13727008056640624, 0.1366585235595703, 0.1357619171142578, 0.13385093688964844, 0.13528643798828124, 0.13527340698242188, 0.13575814819335938, 0.13584317016601563, 0.13656857299804687, 0.13531607055664063, 0.1358356475830078, 0.13492617797851564, 0.1351273651123047, 0.13570236206054687, 0.1363066864013672, 0.13705421447753907, 0.13637632751464843, 0.13619319152832032, 0.13608636474609376, 0.13488517761230467, 0.13519891357421876, 0.1350672607421875, 0.136255615234375, 0.13580227661132813, 0.13620903015136718, 0.13567202758789063, 0.13520828247070313, 0.13586051940917968, 0.1360388488769531, 0.136406982421875, 0.13674085998535157, 0.13538453674316406, 0.13651766967773438, 0.13479974365234376, 0.1364026184082031, 0.13526060485839844, 0.13714019775390626, 0.13637420654296875, 0.1359228515625, 0.1363527069091797, 0.1353564147949219, 0.13555302429199217, 0.13667123413085938, 0.13603021240234375, 0.13699026489257812, 0.13567840576171875, 0.1362124786376953, 0.13600563049316405, 0.1367403564453125, 0.14699113464355468, 0.13387472534179687, 0.13391357421875, 0.13472767639160158, 0.1345425262451172, 0.13478175354003907, 0.13681869506835936, 0.13851852416992189, 0.13541171264648438, 0.13432421875, 0.13475978088378907, 0.13447251892089843, 0.13559178161621094, 0.1364357452392578, 0.13861474609375, 0.13665267944335938, 0.135243896484375, 0.13519155883789064, 0.13405206298828126, 0.13541030883789062, 0.13636778259277343, 0.13727798461914062, 0.13643568420410157, 0.13672857666015625, 0.13542588806152345, 0.13488758850097657, 0.13487103271484374, 0.13561155700683594, 0.1368603515625, 0.13672486877441406, 0.1361784973144531, 0.13545916748046874, 0.13529356384277344, 0.13487309265136718, 0.13665823364257812, 0.13656544494628906, 0.13702554321289062, 0.13704937744140624, 0.13605142211914062, 0.13554278564453126, 0.13460887145996095, 0.13723651123046876, 0.1362879638671875, 0.1377421417236328, 0.1365768280029297, 0.13636265563964844, 0.13587455749511718, 0.13572402954101562, 0.13536358642578125, 0.1364910125732422, 0.13646847534179687, 0.13640106201171875, 0.1365215606689453, 0.13566278076171875, 0.13716966247558593, 0.13668768310546875, 0.13660563659667968, 0.13590739440917968, 0.1374720001220703, 0.13620428466796874, 0.13658067321777342, 0.1365852508544922, 0.13644023132324218]",tokens/s,7.379249822765804,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,1054.191616,3785.228288,0.0,3399.483392,3384.12032,s,1,12.161453125,12.161453125,0.0,12.161453125,12.161453125,12.161453125,12.161453125,[12.161453125],,kWh,0.00014610010388747317,1.6107199817231327e-05,4.783142715399091e-05,0.0002100387308586954,,MB,1446.629376,4227.72736,0.0,3812.622336,3664.402944,s,10,6.187497619628907,0.6187497619628907,0.0025141291697302723,0.6176084594726563,0.6229623840332031,0.6230375579833984,0.6230976971435547,"[0.6183540649414062, 0.6168628540039063, 0.6167472534179688, 0.616707763671875, 0.6165150146484375, 0.6231127319335937, 0.6163646240234375, 0.619190185546875, 0.6206974487304687, 0.6229456787109375]",tokens/s,413.73753290486684,kWh,1.8456895592188024e-05,2.0346756635753968e-06,1.2258464667875592e-05,3.275003592363901e-05,tokens/kWh,7816785.318858808,MB,1479.397376,4227.72736,0.0,3812.622336,3664.405504,s,10,45.193626953125,4.5193626953125,0.006956637912706627,4.517989501953124,4.529841015625,4.53167880859375,4.53314904296875,"[4.51815234375, 4.51326318359375, 4.51503271484375, 4.51782666015625, 4.52277392578125, 4.51835546875, 4.51574365234375, 4.50952978515625, 4.5335166015625, 4.5294326171875]",tokens/s,13.940018592741811,kWh,0.00013098096358447642,1.4448730752151676e-05,7.79907464203233e-05,0.0002234204407569514,tokens/kWh,281979.5708331573,,s,630,45.18917457580562,0.07172884853302487,0.0010552417404854225,0.07143799972534179,0.07228681640625,0.0736268222808838,0.07721973121643066,"[0.07689440155029297, 0.07173824310302734, 0.07149459075927735, 0.0712640609741211, 0.07132418823242187, 0.07128352355957031, 0.07134259033203125, 0.07130076599121093, 0.07107859039306641, 0.07107695770263672, 0.07133241271972657, 0.07124412536621094, 0.07136051177978515, 0.07136592102050782, 0.0711789779663086, 0.07189504241943359, 0.07418470764160157, 0.07203411102294922, 0.07270761871337891, 0.07127884674072266, 0.07129049682617188, 0.07160111999511719, 0.07146425628662109, 0.0713752670288086, 0.0714937286376953, 0.07124355316162109, 0.07139759826660157, 0.07116118621826172, 0.07138380432128906, 0.07115766143798828, 0.07164313507080078, 0.07138435363769531, 0.07144096374511719, 0.07111811065673829, 0.07127129364013672, 0.07161167907714844, 0.07152230072021484, 0.07126457977294921, 0.07107561492919921, 0.07610022735595703, 0.07182720184326172, 0.07120921325683593, 0.07130931091308594, 0.07121305847167969, 0.07153052520751953, 0.07209776306152343, 0.07150563049316407, 0.07161430358886718, 0.07162319946289063, 0.07142390441894532, 0.07149890899658203, 0.07199830627441406, 0.07129078674316407, 0.07160431671142578, 0.07194175720214843, 0.07133808135986328, 0.07175606536865234, 0.0715857925415039, 0.0719092788696289, 0.07181935882568359, 0.07258214569091796, 0.07190630340576172, 0.07176601409912109, 0.07717536163330078, 0.07207936096191406, 0.07132681274414063, 0.07106610870361328, 0.07080387115478516, 0.07474995422363281, 0.07166537475585938, 0.07141110229492187, 0.07100447845458985, 0.07105747222900391, 0.0715125732421875, 0.07068281555175782, 0.07105721282958985, 0.07104528045654297, 0.07108553314208985, 0.07104966735839843, 0.07145651245117188, 0.07126649475097656, 0.07098368072509766, 0.07157324981689453, 0.07130694580078124, 0.07101042938232421, 0.07125446319580078, 0.07127449798583985, 0.07202611541748047, 0.07323033905029297, 0.07141094207763672, 0.07109913635253906, 0.07107379150390625, 0.07309926605224609, 0.07114342498779297, 0.07175740814208985, 0.07112131500244141, 0.0717633285522461, 0.07111504364013672, 0.07130915069580078, 0.07158175659179687, 0.07133055877685547, 0.07127827453613281, 0.07139328002929687, 0.07162210845947266, 0.0719993896484375, 0.07156995391845702, 0.071955810546875, 0.07130182647705079, 0.07149747467041015, 0.07139974212646484, 0.07334012603759765, 0.07160502624511719, 0.07126802825927735, 0.07147756958007813, 0.07153049468994141, 0.07165440368652344, 0.07150643157958984, 0.07165984344482422, 0.07149747467041015, 0.0714285430908203, 0.07202374267578125, 0.07147737884521485, 0.07145491027832031, 0.07144652557373046, 0.071970947265625, 0.07152012634277344, 0.0773038101196289, 0.07134553527832031, 0.07155315399169922, 0.07189965057373048, 0.07134534454345703, 0.07106166076660156, 0.07110313415527343, 0.07114899444580078, 0.07120857238769532, 0.07127900695800782, 0.0711193618774414, 0.07103011322021484, 0.07359334564208984, 0.07239289855957032, 0.07130726623535157, 0.07108198547363281, 0.07171209716796875, 0.07233993530273437, 0.07148921966552735, 0.07120464324951171, 0.07136927795410156, 0.07119478607177734, 0.07131900787353515, 0.071349853515625, 0.07123174285888671, 0.07145731353759766, 0.07135167694091797, 0.07191363525390625, 0.07133452606201172, 0.07119590759277344, 0.0754840316772461, 0.07208946990966797, 0.07111475372314453, 0.07128678131103515, 0.07099187469482422, 0.07694131469726563, 0.07139238739013672, 0.07140060424804688, 0.0713864974975586, 0.07119725036621094, 0.07128348541259766, 0.07178956604003907, 0.07109043121337891, 0.07164697265625, 0.07096486663818359, 0.07125030517578125, 0.07113113403320312, 0.071372802734375, 0.07134413146972657, 0.07105661010742187, 0.07201606750488282, 0.07135014343261718, 0.07134076690673828, 0.07130726623535157, 0.07127040100097656, 0.07148953247070312, 0.07110620880126953, 0.07113747406005859, 0.07125417327880859, 0.07142822265625, 0.07152588653564453, 0.07160034942626953, 0.07131948852539062, 0.07827446746826172, 0.07679395294189453, 0.07145452880859375, 0.07157891082763672, 0.07155190277099609, 0.07134617614746094, 0.07173292541503906, 0.07170899200439453, 0.07151119995117187, 0.07122211456298828, 0.07119667053222656, 0.07110771179199218, 0.07104716491699219, 0.07134623718261719, 0.07118726348876953, 0.07172003173828125, 0.07217411041259765, 0.07144409942626953, 0.07173193359375, 0.07129090881347656, 0.07138636779785157, 0.07494937896728515, 0.07139942169189453, 0.07124559783935547, 0.07121913909912109, 0.0717537612915039, 0.07107097625732423, 0.07112115478515625, 0.07109273529052734, 0.07125631713867188, 0.071110595703125, 0.07144454193115235, 0.07156886291503907, 0.07149152374267578, 0.07113072204589843, 0.07119564819335937, 0.07107373046875, 0.07129430389404297, 0.07100185394287109, 0.07147618865966797, 0.07186815643310547, 0.07138329315185547, 0.07170662689208984, 0.07361126708984375, 0.07148073577880859, 0.07123983764648438, 0.07136914825439453, 0.07134355163574219, 0.07150035095214843, 0.07156735992431641, 0.07133734130859375, 0.07147993469238281, 0.0716779556274414, 0.07331788635253907, 0.07138687896728516, 0.0710233612060547, 0.0712580795288086, 0.0717168960571289, 0.07153663635253907, 0.07104086303710938, 0.07113948822021485, 0.0714260482788086, 0.07127027130126953, 0.07725312042236328, 0.07148544311523437, 0.07132736206054688, 0.07148377227783204, 0.07144992065429688, 0.07146566772460937, 0.07114281463623047, 0.07121094512939453, 0.07483209228515625, 0.07171878051757813, 0.07197990417480468, 0.0714668197631836, 0.07121497344970704, 0.07125401306152344, 0.071761474609375, 0.07175827026367188, 0.07138508605957031, 0.07164272308349609, 0.0716082534790039, 0.07151152038574218, 0.07136358642578125, 0.07187395477294922, 0.07191613006591797, 0.07154483032226562, 0.0717619171142578, 0.07154589080810547, 0.07143523406982422, 0.07164518737792969, 0.07179264068603515, 0.07141785430908203, 0.07157321929931641, 0.07168029022216797, 0.07388979339599609, 0.0718704605102539, 0.07119615936279297, 0.07172761535644531, 0.07135417938232422, 0.07195053100585938, 0.07199964904785157, 0.07205875396728516, 0.07193186950683594, 0.0721082534790039, 0.0715296630859375, 0.07203286743164063, 0.07138098907470704, 0.07133507537841798, 0.07136547088623046, 0.07179779052734375, 0.07157049560546876, 0.07159539031982422, 0.07182099151611328, 0.07179939270019531, 0.07147955322265626, 0.07168614196777344, 0.0718130874633789, 0.07217155456542969, 0.07165721893310546, 0.07158732604980468, 0.071272705078125, 0.07148716735839844, 0.0713707504272461, 0.0715374755859375, 0.07143955230712891, 0.07763302612304687, 0.07178240203857422, 0.07142861175537109, 0.07145622253417969, 0.07135469055175782, 0.07136892700195313, 0.0720447998046875, 0.07191455841064454, 0.07130182647705079, 0.07131712341308594, 0.07129718780517579, 0.07132710266113282, 0.071994140625, 0.07126624298095703, 0.07127884674072266, 0.07135833740234375, 0.07112499237060547, 0.07454710388183594, 0.07214403533935547, 0.07144921875, 0.07260537719726562, 0.07152082824707032, 0.07217577362060547, 0.07158953857421875, 0.07457974243164063, 0.07195875549316406, 0.07135462188720704, 0.0711720962524414, 0.071446044921875, 0.07150758361816406, 0.07135343933105469, 0.07131314849853515, 0.07195648193359375, 0.07153443145751953, 0.07143644714355468, 0.0722841567993164, 0.07181887817382812, 0.0712034912109375, 0.07125373077392579, 0.07222067260742188, 0.07172710418701173, 0.0712069091796875, 0.07126761627197266, 0.07121993255615235, 0.07138508605957031, 0.07148060607910156, 0.07113597106933593, 0.07145401763916015, 0.07137133026123046, 0.07202828979492187, 0.07201990509033203, 0.07137820434570312, 0.07141385650634766, 0.07141024017333984, 0.0716628189086914, 0.07127747344970703, 0.07143628692626953, 0.07114057922363282, 0.07126006317138672, 0.07141670227050781, 0.07098553466796875, 0.07120883178710938, 0.07134579467773437, 0.07768492889404296, 0.07141532897949218, 0.07092022705078126, 0.07138563537597656, 0.07404729461669922, 0.07151430511474609, 0.07142809295654297, 0.0708682861328125, 0.07135475158691407, 0.07106310272216797, 0.07132796478271484, 0.07110409545898437, 0.07100511932373046, 0.07094477081298828, 0.07322380828857422, 0.07121075439453126, 0.07099449920654297, 0.07158895874023438, 0.07135731506347656, 0.0719152603149414, 0.07086665344238281, 0.07110921478271484, 0.07100418853759766, 0.07098777770996094, 0.07151942443847656, 0.07171360015869141, 0.07120896148681641, 0.07108214569091797, 0.07117606353759766, 0.07485027313232422, 0.07100211334228515, 0.07120857238769532, 0.07082415771484375, 0.07213314819335938, 0.0715383071899414, 0.07112089538574219, 0.07169010925292969, 0.07198925018310547, 0.07199686431884765, 0.07162131500244141, 0.07162060546875, 0.07180697631835938, 0.07152230072021484, 0.07227801513671875, 0.07147119903564453, 0.07164636993408204, 0.07168281555175782, 0.07231075286865235, 0.07214870452880859, 0.07180233764648437, 0.07182566070556641, 0.07131196594238282, 0.07185215759277344, 0.07127542114257812, 0.07120790100097656, 0.07213056182861328, 0.07123353576660156, 0.07121510314941407, 0.07109942626953125, 0.0716456298828125, 0.07109481811523438, 0.07199263763427734, 0.07210626983642578, 0.07723785400390625, 0.07126838684082032, 0.07111238098144532, 0.07170035552978515, 0.07115238189697265, 0.07124588775634766, 0.07168137359619141, 0.07104783630371093, 0.07210790252685546, 0.07097328186035157, 0.07126044464111328, 0.07093762969970703, 0.07105152130126953, 0.0734788818359375, 0.07159577941894531, 0.0712357406616211, 0.07125001525878906, 0.07127177429199219, 0.07236675262451171, 0.07249305725097656, 0.07119388580322265, 0.07111753845214844, 0.07173490905761719, 0.07105305480957032, 0.07149378967285157, 0.07199689483642578, 0.07115878295898438, 0.07175965118408204, 0.0711759033203125, 0.07117459106445312, 0.07111481475830078, 0.07153868865966796, 0.07275724792480469, 0.07170662689208984, 0.0714134750366211, 0.07322799682617187, 0.07128121948242187, 0.07095091247558594, 0.07116384124755859, 0.0709521255493164, 0.0710478744506836, 0.07153887939453125, 0.07099187469482422, 0.07117414093017578, 0.07101837158203125, 0.0710513916015625, 0.0728453140258789, 0.07129702758789062, 0.07194729614257812, 0.0712754898071289, 0.07134617614746094, 0.0714949722290039, 0.07138969421386719, 0.07113113403320312, 0.07133817291259766, 0.0717166748046875, 0.07110880279541015, 0.07136051177978515, 0.07123260498046875, 0.07120988464355468, 0.07179216003417968, 0.07207574462890624, 0.07125606536865234, 0.07797904205322266, 0.07296288299560547, 0.072474365234375, 0.07415110778808594, 0.07121929931640625, 0.071325439453125, 0.0720291519165039, 0.07135667419433593, 0.07155852508544921, 0.07122515106201172, 0.07166944122314453, 0.07156211090087891, 0.07141974639892579, 0.07119500732421875, 0.07131871795654297, 0.07194598388671875, 0.07183241271972657, 0.07167935943603515, 0.0729769287109375, 0.07454105377197266, 0.07175161743164063, 0.07242070770263671, 0.0718279037475586, 0.07112899017333985, 0.07150588989257813, 0.07477216339111328, 0.07165798187255859, 0.07161209869384766, 0.07121778869628906, 0.07126236724853516, 0.07338143920898438, 0.07160185241699218, 0.07143440246582031, 0.07146131134033203, 0.07188070678710938, 0.07282278442382813, 0.07140882873535156, 0.07150012969970704, 0.07124547576904297, 0.07149241638183594, 0.0710359649658203, 0.07136553955078125, 0.07105948638916015, 0.07141171264648437, 0.0714629135131836, 0.07235385894775391, 0.0715552978515625, 0.07142396545410157, 0.07154195404052735, 0.07136723327636718, 0.07162879943847657, 0.07148457336425781, 0.07124668884277344, 0.07154198455810547, 0.0712425308227539, 0.07132364654541015, 0.07111270141601563, 0.071372802734375, 0.07200911712646485, 0.07204035186767578, 0.07166012573242188, 0.07322633361816407, 0.07478067016601563, 0.07714476776123047, 0.07211984252929687, 0.0714060516357422, 0.0713641586303711, 0.0713070068359375, 0.0715107192993164, 0.07217491149902344, 0.07120486450195312, 0.07128339385986328, 0.0736395492553711, 0.07153907012939453, 0.07521894073486328, 0.0737600326538086, 0.07144905853271484, 0.072347900390625, 0.07309926605224609, 0.07158067321777344, 0.07115376281738281, 0.07150479888916016, 0.07117945861816406, 0.07252051544189453, 0.07127817535400391, 0.07121715545654297, 0.07164150238037109, 0.07124787139892579, 0.0712069091796875, 0.07111081695556641, 0.07113712310791015, 0.07159766387939454, 0.07183590698242187, 0.07193558502197266, 0.07282752227783203, 0.07179580688476563, 0.07139775848388671, 0.07106748962402344, 0.0713317413330078, 0.07142678070068359, 0.07172300720214844, 0.07114739227294922, 0.07144051361083985, 0.07127449798583985, 0.07119667053222656, 0.07135027313232421, 0.07189299011230468, 0.07137689971923829, 0.0710489273071289, 0.07168348693847656, 0.07120166778564453, 0.07122943878173828, 0.07151821136474609, 0.0714728012084961, 0.07485884857177734, 0.07485440063476563, 0.07129817962646484, 0.07152934265136719, 0.07114137268066406, 0.0722656021118164, 0.0720426254272461, 0.07196057891845703, 0.07221043395996093, 0.07152448272705078, 0.07119776153564453, 0.07197113800048828]",tokens/s,13.941392068207916,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,1037.221888,6230.50752,0.0,5861.53984,5858.796544,s,1,14.133232421875,14.133232421875,0.0,14.133232421875,14.133232421875,14.133232421875,14.133232421875,[14.133232421875],,kWh,0.00020913278738332036,2.3061573469734615e-05,6.795505436398008e-05,0.00030014941521703506,,MB,1475.702784,6754.79552,0.0,6339.690496,6318.422528,s,10,9.733820678710938,0.9733820678710938,0.00579981673283569,0.9735043029785155,0.9791325134277343,0.9810867340087891,0.9826501104736328,"[0.9625725708007813, 0.96815673828125, 0.9676315307617187, 0.972113037109375, 0.9724314575195312, 0.9745771484375, 0.9785181274414062, 0.9760808715820313, 0.9830409545898438, 0.9786982421875]",tokens/s,263.00053026444533,kWh,2.8459431357953578e-05,3.138614690853327e-06,1.8863752464725906e-05,5.0461798513532806e-05,tokens/kWh,5073144.587411923,MB,1508.184064,6756.892672,0.0,6341.787648,6318.425088,s,10,53.561607910156255,5.3561607910156255,0.005410950005979772,5.357543212890626,5.360969042968749,5.36099013671875,5.36100701171875,"[5.34248486328125, 5.35098876953125, 5.35708642578125, 5.35778564453125, 5.354880859375, 5.36096435546875, 5.36076904296875, 5.3583359375, 5.35730078125, 5.36101123046875]",tokens/s,11.762156226839869,kWh,0.00015686635578037686,1.7303487839481614e-05,0.0001035547343588755,0.00027772457797873396,tokens/kWh,226843.44489245766,,s,630,53.55870824432375,0.08501382261003768,0.0014577066917174687,0.08476433563232422,0.08593532180786133,0.0861896842956543,0.09451225646972657,"[0.09599795532226563, 0.08606924438476563, 0.08550911712646485, 0.08450355529785156, 0.0840860824584961, 0.08352130889892578, 0.0846406707763672, 0.0851028823852539, 0.08465789031982422, 0.08453068542480469, 0.08405398559570312, 0.08475494384765625, 0.08520297241210938, 0.08448818969726563, 0.0843325424194336, 0.08364559936523437, 0.08454147338867188, 0.08509884643554687, 0.08495350646972656, 0.08427286529541016, 0.08379167938232422, 0.08469145965576172, 0.08505980682373047, 0.08497747039794921, 0.08428950500488282, 0.0838120346069336, 0.08354825592041015, 0.0850250244140625, 0.08479456329345703, 0.08403846740722656, 0.08369356536865234, 0.08448323059082032, 0.0850051498413086, 0.08523980712890625, 0.08438784027099609, 0.08462540435791016, 0.08435302734375, 0.08529443359375, 0.08486563110351562, 0.08424249267578125, 0.08475852966308593, 0.08429977416992188, 0.08501964569091797, 0.08470563507080078, 0.08512579345703125, 0.08400486755371094, 0.08385065460205078, 0.08569712066650391, 0.08485478210449218, 0.08438909149169922, 0.08372659301757812, 0.08482844543457031, 0.08615334320068359, 0.08485404968261719, 0.08471965026855469, 0.0842791976928711, 0.08487004852294922, 0.08544572448730468, 0.08450710296630859, 0.08409951782226563, 0.083736572265625, 0.08483430480957031, 0.08525974273681641, 0.09517670440673828, 0.08519065856933594, 0.08488317108154297, 0.08407689666748047, 0.0835296630859375, 0.08332643127441407, 0.08450656127929687, 0.0850289306640625, 0.08436367797851563, 0.08579666900634765, 0.08458681488037109, 0.08520909118652344, 0.08469206237792969, 0.08459766387939453, 0.08390758514404296, 0.08447897338867187, 0.0858062744140625, 0.08452505493164063, 0.0841193618774414, 0.08381308746337891, 0.08448233795166016, 0.08643110656738281, 0.08502476501464844, 0.08393791961669922, 0.08441814422607422, 0.08436163330078125, 0.085951904296875, 0.08443170928955078, 0.0841233901977539, 0.0844775390625, 0.08439030456542969, 0.08526399993896484, 0.08454796600341796, 0.08467993927001953, 0.08438579559326172, 0.08510076904296875, 0.08493830108642578, 0.08467680358886719, 0.08563996887207032, 0.08484044647216797, 0.08577228546142578, 0.08455548858642578, 0.08410329437255859, 0.08358467102050782, 0.08470374298095704, 0.08604876708984376, 0.08457215881347656, 0.08413120269775391, 0.08502950286865234, 0.08492845153808594, 0.08593177795410156, 0.08449056243896484, 0.08459468841552735, 0.08457215881347656, 0.08505958557128906, 0.08532588958740234, 0.08475027465820313, 0.0850917739868164, 0.08499052429199219, 0.08630818939208984, 0.08472172546386719, 0.08509449768066406, 0.08462592315673828, 0.0943410873413086, 0.08492156982421875, 0.0842515869140625, 0.08468070220947266, 0.08543612670898437, 0.0844188461303711, 0.08409661102294921, 0.08338082885742187, 0.08449209594726563, 0.08580886077880859, 0.08455197143554688, 0.08384921264648437, 0.08438569641113282, 0.08575190734863282, 0.0852152328491211, 0.0852643814086914, 0.08426290893554687, 0.08374476623535156, 0.08504227447509766, 0.08594230651855468, 0.08486182403564453, 0.08421171569824219, 0.0838635482788086, 0.08504319763183593, 0.08599142456054687, 0.08461721801757813, 0.08451615905761718, 0.08408771514892578, 0.08596662139892577, 0.08553266906738281, 0.08499148559570313, 0.08409881591796875, 0.08510447692871094, 0.08639183807373046, 0.08488333129882812, 0.08427523040771484, 0.08471753692626953, 0.08444844818115234, 0.08614995574951172, 0.08458035278320312, 0.08410022735595703, 0.0851767349243164, 0.08575539398193359, 0.08535958099365235, 0.08457389068603516, 0.08425878143310547, 0.08502003479003906, 0.08618697357177735, 0.08474819183349609, 0.08426505279541016, 0.0849444808959961, 0.0848957748413086, 0.0852995834350586, 0.08629283142089844, 0.08466390228271484, 0.08489615631103516, 0.08612767791748047, 0.08481037139892578, 0.08426870727539063, 0.08362540435791016, 0.08517926025390625, 0.08536608123779296, 0.08482476806640625, 0.09428787231445312, 0.08480319976806641, 0.08403132629394532, 0.08435711669921875, 0.08488195037841798, 0.08529920196533203, 0.08443289947509766, 0.0851968002319336, 0.08480982208251953, 0.0859948501586914, 0.08450863647460938, 0.08407619476318359, 0.08445804595947265, 0.08532403564453125, 0.08504742431640624, 0.0843285140991211, 0.08430995178222657, 0.08473804473876953, 0.08445692443847656, 0.08561235046386718, 0.08447392272949218, 0.08380278778076172, 0.08441036987304687, 0.08517769622802734, 0.08611641693115234, 0.08476127624511719, 0.08446537780761719, 0.08496556854248047, 0.08529824066162109, 0.0852103042602539, 0.08433424377441406, 0.08475657653808594, 0.08438508605957032, 0.08601261138916015, 0.08476467132568359, 0.08478227233886719, 0.08417158508300782, 0.08516422271728516, 0.08528646087646484, 0.08445158386230468, 0.08384512329101562, 0.08504319763183593, 0.08536598205566406, 0.08529388427734375, 0.08432841491699218, 0.08403298950195312, 0.08518096160888672, 0.0860549087524414, 0.08465760040283203, 0.08425939178466797, 0.08380416107177735, 0.08515750122070312, 0.08661446380615234, 0.0850777587890625, 0.08453753662109376, 0.08510259246826171, 0.08591875457763672, 0.086432861328125, 0.08469286346435546, 0.0850145263671875, 0.0851230697631836, 0.08548499298095703, 0.08469152069091797, 0.09438502502441407, 0.08460832214355468, 0.08401734161376953, 0.0846404800415039, 0.08598233795166016, 0.0842738265991211, 0.08385708618164063, 0.08490975952148437, 0.08469158172607422, 0.08525414276123047, 0.08416802978515625, 0.08444790649414062, 0.08416770935058594, 0.08490617370605469, 0.08564320373535156, 0.0844276123046875, 0.08411341094970703, 0.08468685150146485, 0.08436665344238281, 0.0856495361328125, 0.0844560317993164, 0.08405203247070313, 0.08694364929199219, 0.08533551788330078, 0.0844632339477539, 0.0840478744506836, 0.08351017761230468, 0.08481571197509766, 0.08611856079101562, 0.0848179168701172, 0.08455913543701171, 0.08519676971435547, 0.08519757080078125, 0.08598320007324219, 0.08481590270996094, 0.08388153839111329, 0.08510713958740235, 0.08596275329589843, 0.08487452697753907, 0.08412454223632812, 0.08386656188964844, 0.08514979553222657, 0.08616585540771485, 0.08466185760498048, 0.08410582733154297, 0.08455149078369141, 0.08496790313720703, 0.08512006378173828, 0.08442771148681641, 0.08396800231933593, 0.08703180694580079, 0.08586854553222656, 0.08475849914550782, 0.08413801574707032, 0.08384102630615234, 0.08591155242919922, 0.08605197143554688, 0.08449542236328125, 0.08472147369384765, 0.08426815795898437, 0.08531785583496093, 0.08510736083984374, 0.08464895629882813, 0.09504367828369141, 0.08468070220947266, 0.0847357406616211, 0.08544649505615234, 0.08486713409423828, 0.0841891860961914, 0.08459827423095703, 0.08422486114501954, 0.08518041229248047, 0.08481177520751954, 0.08394892883300781, 0.08374524688720703, 0.08445148468017578, 0.08506988525390625, 0.08518460845947265, 0.08436310577392578, 0.08435097503662109, 0.08497561645507813, 0.0845428466796875, 0.08577101135253906, 0.08445734405517578, 0.08484454345703125, 0.08459468841552735, 0.08657011413574218, 0.08485363006591796, 0.08586454772949219, 0.08528047943115234, 0.08609788513183594, 0.08454956817626953, 0.08410761260986328, 0.08359110260009765, 0.08466432189941406, 0.08649123382568359, 0.08461302185058593, 0.08427283477783203, 0.08371785736083984, 0.08525885009765626, 0.08596012878417969, 0.0846006088256836, 0.08407119750976562, 0.08505865478515626, 0.08520534515380859, 0.08540013122558594, 0.08433190155029296, 0.084553955078125, 0.0844415054321289, 0.08519270324707032, 0.08573715209960937, 0.0846841278076172, 0.0850370864868164, 0.0854721908569336, 0.08604444885253906, 0.08471574401855468, 0.08421949005126952, 0.08470979309082032, 0.08574771118164062, 0.08625971221923828, 0.08484659576416016, 0.08506572723388672, 0.0852328338623047, 0.0863927001953125, 0.08474531555175781, 0.08493449401855468, 0.09593561553955078, 0.08593510437011719, 0.08531100463867188, 0.08457228851318359, 0.08422025299072265, 0.08498585510253906, 0.08550605010986329, 0.0841891860961914, 0.08439318084716797, 0.08407734680175781, 0.08567212677001954, 0.08527439880371093, 0.08421129608154297, 0.08388448333740234, 0.0850577621459961, 0.08619190216064453, 0.0845475845336914, 0.08395980834960938, 0.08493641662597656, 0.086038818359375, 0.08584153747558594, 0.08479148864746094, 0.08445257568359375, 0.08470217895507813, 0.08618598175048828, 0.08475631713867188, 0.08404144287109375, 0.08396790313720703, 0.08506333160400391, 0.08552313232421875, 0.08483039855957031, 0.08404930877685547, 0.08439385223388672, 0.08486275482177734, 0.08537110137939453, 0.08449097442626953, 0.08391065979003906, 0.0848384017944336, 0.08550297546386719, 0.08517241668701171, 0.0846078109741211, 0.084, 0.08522579193115234, 0.08593833923339844, 0.08499024200439453, 0.08503929901123047, 0.08531539154052735, 0.08540160369873047, 0.0850823974609375, 0.08450182342529297, 0.08442524719238281, 0.08480140686035156, 0.08542345428466797, 0.0853828125, 0.08481279754638672, 0.08418303680419922, 0.08541184234619141, 0.08546527862548828, 0.08476399993896484, 0.08435350036621093, 0.08453446197509766, 0.08522940826416016, 0.08593302154541016, 0.09456422424316406, 0.084840576171875, 0.08523948669433594, 0.08544230651855468, 0.08527094268798828, 0.08450678253173828, 0.08454064178466797, 0.08475536346435547, 0.08492428588867187, 0.08621260833740234, 0.0842742691040039, 0.08398477172851562, 0.08531929779052734, 0.08556784057617188, 0.08443452453613282, 0.08452175903320312, 0.08477458953857422, 0.08506623840332031, 0.0856943359375, 0.08440435028076172, 0.0838287353515625, 0.08444108581542968, 0.08476995086669922, 0.08525475311279297, 0.08427340698242188, 0.08382637023925782, 0.0847300796508789, 0.08498162841796875, 0.08559347534179687, 0.08442169952392578, 0.08399440002441406, 0.08460237121582032, 0.08489615631103516, 0.08543446350097657, 0.08421580505371094, 0.08525833892822265, 0.08465193939208984, 0.08624918365478515, 0.08456361389160157, 0.083976318359375, 0.08453782653808593, 0.08504537963867187, 0.08590255737304688, 0.0847092514038086, 0.08397702026367188, 0.08570470428466796, 0.08555084991455078, 0.08503250885009765, 0.08453135681152343, 0.08404854583740234, 0.08476249694824219, 0.0851599349975586, 0.08519475555419923, 0.0846192626953125, 0.08464080047607422, 0.08468905639648437, 0.08639266967773437, 0.08508719635009766, 0.0841702423095703, 0.08499046325683594, 0.08523088073730468, 0.08614358520507813, 0.08560857391357422, 0.09606098937988282, 0.08538912200927734, 0.08443305969238281, 0.08400383758544921, 0.08481292724609375, 0.08463382720947266, 0.0865445785522461, 0.08435282897949219, 0.08382720184326171, 0.08491212463378907, 0.08493651580810548, 0.0849205093383789, 0.08398847961425782, 0.08365869140625, 0.08439817810058593, 0.08500016021728515, 0.08551385498046875, 0.08455411529541015, 0.08398643493652344, 0.08487907409667969, 0.08449814605712891, 0.08613935852050782, 0.0848016357421875, 0.08410521697998047, 0.0844114532470703, 0.08474720001220704, 0.08627340698242188, 0.08468748474121093, 0.08394342041015625, 0.08458995056152344, 0.08503734588623046, 0.08517871856689453, 0.08436121368408203, 0.08405197143554688, 0.08506572723388672, 0.0861386260986328, 0.08525644683837891, 0.08537702178955078, 0.08536815643310547, 0.08561545562744141, 0.08497132873535156, 0.08465513610839843, 0.08414749145507812, 0.08435279846191407, 0.0851434555053711, 0.08511897277832031, 0.08446771240234376, 0.08403558349609375, 0.08508035278320313, 0.08593727874755859, 0.08568460845947265, 0.08456214141845703, 0.08482947540283203, 0.08494153594970703, 0.08585939025878907, 0.0847390365600586, 0.08434102630615234, 0.08382022094726563, 0.0848897247314453, 0.08675849914550782, 0.08476547241210937, 0.0844185562133789, 0.08504934692382812, 0.09650080108642578, 0.0850789794921875, 0.0844062728881836, 0.08663174438476562, 0.0843554916381836, 0.08517250823974609, 0.08513929748535157, 0.08452950286865234, 0.08375868988037109, 0.08467424011230469, 0.08535504150390626, 0.08502067565917969, 0.08413788604736328, 0.08535004425048828, 0.08539385223388672, 0.08563878631591797, 0.08437388610839844, 0.08437983703613282, 0.08450761413574219, 0.08460374450683594, 0.08620396423339843, 0.08452345275878906, 0.08445471954345703, 0.08407299041748047, 0.08458243560791015, 0.08542825317382813, 0.08446575927734375, 0.08446566772460938, 0.08471347045898438, 0.08449836730957032, 0.08606111907958984, 0.08506310272216797, 0.08449449920654296, 0.08496736145019532, 0.08516409301757813, 0.08682771301269532, 0.08463270568847656, 0.08461167907714844, 0.08425167846679688, 0.08513839721679688, 0.08512716674804688, 0.08428543853759765, 0.08465126037597656, 0.08438451385498047, 0.08587468719482422, 0.08510873413085937, 0.08452864074707031, 0.08475494384765625, 0.084674560546875, 0.08571903991699219, 0.08550399780273438, 0.08448226928710938, 0.08481974029541016, 0.08475583648681641, 0.08573107147216796, 0.08447090911865235, 0.08427037048339844, 0.08512889862060546, 0.0853770523071289, 0.08579920196533203, 0.08459622192382812, 0.0841819839477539, 0.08485887908935547]",tokens/s,11.762793029400008,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,2549.338112,11826.823168,0.0,11431.575552,10953.091072,s,1,22.083626953125,22.083626953125,0.0,22.083626953125,22.083626953125,22.083626953125,22.083626953125,[22.083626953125],,kWh,0.00042438384648334394,4.6805431210738926e-05,0.00015918096067799037,0.0006303702383720733,,MB,1918.636032,12722.307072,0.0,12314.476544,11624.259584,s,10,19.116585571289065,1.9116585571289062,0.006993612625285195,1.9133917846679687,1.9185032348632811,1.919634002685547,1.9205386169433594,"[1.897379150390625, 1.902699462890625, 1.90814013671875, 1.909576904296875, 1.911366943359375, 1.9154166259765626, 1.91584521484375, 1.9171444091796874, 1.918251953125, 1.9207647705078126]",tokens/s,133.91512780633948,kWh,5.5732573974582165e-05,6.145645191757081e-06,3.701130738680281e-05,9.888952655314205e-05,tokens/kWh,2588747.351949639,MB,1922.7648,12724.404224,0.0,12316.573696,11624.262144,s,10,94.21018066406249,9.42101806640625,0.025053647449437427,9.42879052734375,9.44482841796875,9.445260400390625,9.445605986328124,"[9.3704345703125, 9.3852255859375, 9.40620703125, 9.4164833984375, 9.4247998046875, 9.43278125, 9.4416455078125, 9.4421787109375, 9.444732421875, 9.4456923828125]",tokens/s,6.687175372760116,kWh,0.00027596498239625583,3.0441381840470628e-05,0.00018339111893499736,0.0004897974831717238,tokens/kWh,128624.58906901344,,s,630,94.20603550720212,0.14953338969397165,0.0020462793337056094,0.14950914764404297,0.15111215820312499,0.15163519210815432,0.16009703063964842,"[0.16060806274414063, 0.14670208740234375, 0.14601875305175782, 0.14674493408203124, 0.14774313354492188, 0.1459621124267578, 0.15375244140625, 0.14825053405761718, 0.1471837158203125, 0.14752310180664063, 0.14631753540039064, 0.14619606018066406, 0.14904960632324218, 0.1505774383544922, 0.14821340942382813, 0.14878767395019532, 0.14591180419921876, 0.14713845825195312, 0.14888560485839844, 0.14901248168945314, 0.14934962463378906, 0.14868553161621093, 0.14756253051757812, 0.14836898803710938, 0.1469444122314453, 0.1486684112548828, 0.150724609375, 0.14895423889160156, 0.1495040588378906, 0.14795245361328124, 0.14724908447265625, 0.14920211791992188, 0.14875736999511718, 0.14877212524414063, 0.14951712036132814, 0.1475782470703125, 0.14903053283691406, 0.1475729217529297, 0.1484738311767578, 0.14982585144042967, 0.1487548828125, 0.1505536346435547, 0.14810585021972655, 0.1473161926269531, 0.14818342590332031, 0.14907373046875, 0.15020697021484375, 0.1489409942626953, 0.14891416931152343, 0.1492064666748047, 0.14695001220703124, 0.1493162841796875, 0.14908146667480468, 0.14946981811523438, 0.14889369201660158, 0.1488497314453125, 0.1505494384765625, 0.1476259765625, 0.1487626190185547, 0.1492900848388672, 0.14863043212890625, 0.15067074584960938, 0.14942678833007814, 0.15974668884277343, 0.1473144989013672, 0.145940673828125, 0.1466429443359375, 0.1480494384765625, 0.1463537902832031, 0.15428489685058594, 0.14886912536621094, 0.14752153015136718, 0.14823834228515625, 0.14589132690429688, 0.14756863403320314, 0.15134300231933595, 0.15032534790039062, 0.1501114501953125, 0.14701962280273437, 0.14706375122070312, 0.1485312042236328, 0.14752153015136718, 0.15014451599121093, 0.1499632568359375, 0.14844435119628907, 0.14915632629394532, 0.14640777587890624, 0.14795365905761718, 0.14988902282714844, 0.14902444458007813, 0.15089491271972658, 0.14842271423339845, 0.14776722717285157, 0.1468408966064453, 0.14810499572753907, 0.1500142059326172, 0.15020831298828125, 0.14902735900878905, 0.1482570495605469, 0.1474391326904297, 0.14824089050292968, 0.14802943420410156, 0.15064901733398436, 0.15023085021972657, 0.14874214172363281, 0.15001603698730467, 0.1470023651123047, 0.148179931640625, 0.15065203857421874, 0.1501766357421875, 0.150614013671875, 0.14891375732421874, 0.14783247375488281, 0.14728863525390626, 0.14922566223144532, 0.15096832275390626, 0.14965965270996093, 0.15020442199707032, 0.14783897399902343, 0.14780621337890626, 0.15004019165039062, 0.14899862670898437, 0.15063031005859376, 0.1492618865966797, 0.1489514923095703, 0.14838099670410157, 0.15995619201660155, 0.14741285705566406, 0.1466479949951172, 0.14689010620117188, 0.14781817626953125, 0.14709648132324218, 0.15391941833496095, 0.14886431884765625, 0.14756886291503907, 0.14874467468261718, 0.1461207733154297, 0.14679859924316407, 0.1520025634765625, 0.150476806640625, 0.15000973510742188, 0.1485323486328125, 0.147704833984375, 0.1464930877685547, 0.14855203247070312, 0.14992720031738282, 0.14944650268554688, 0.14984077453613281, 0.14753555297851562, 0.14751571655273438, 0.14724649047851562, 0.14990806579589844, 0.14971200561523437, 0.15064505004882814, 0.14916192626953126, 0.15003712463378907, 0.14657699584960937, 0.1478697204589844, 0.15068966674804687, 0.1502799072265625, 0.15042448425292967, 0.14940147399902343, 0.14886495971679686, 0.1467342987060547, 0.14909674072265625, 0.15073139953613282, 0.15011581420898437, 0.15108316040039063, 0.1487293701171875, 0.14904998779296874, 0.1473204803466797, 0.14937753295898437, 0.1514352264404297, 0.15026524353027343, 0.1499900207519531, 0.14873330688476563, 0.1485727996826172, 0.14837350463867188, 0.15057817077636718, 0.15104109191894532, 0.14954275512695311, 0.15053219604492188, 0.14835232543945312, 0.14807533264160155, 0.14945794677734375, 0.15097030639648437, 0.15024630737304687, 0.15054847717285155, 0.15010610961914062, 0.16076780700683593, 0.14740089416503907, 0.14671401977539061, 0.14782861328125, 0.14761964416503906, 0.14693263244628907, 0.15456646728515624, 0.14924986267089843, 0.14748463439941406, 0.1476848907470703, 0.14725619506835938, 0.1476259765625, 0.150761474609375, 0.15083059692382814, 0.15019865417480469, 0.14743916320800782, 0.14850502014160155, 0.14685606384277344, 0.1481031036376953, 0.15020448303222655, 0.15069325256347657, 0.14898255920410156, 0.14895872497558593, 0.14736367797851563, 0.14943238830566405, 0.14852549743652343, 0.14992790222167968, 0.15109738159179686, 0.14875238037109376, 0.15001190185546875, 0.14777958679199218, 0.14798439025878907, 0.14986239624023437, 0.150108154296875, 0.15065087890625, 0.15007334899902344, 0.1506570281982422, 0.146808837890625, 0.14803558349609375, 0.15090074157714845, 0.14996255493164062, 0.15091116333007812, 0.14929408264160157, 0.15004978942871094, 0.1475747833251953, 0.14901043701171876, 0.15021868896484375, 0.15079200744628907, 0.15038284301757812, 0.15061750793457032, 0.1501292419433594, 0.14777548217773437, 0.1487298583984375, 0.15075155639648438, 0.15004383850097655, 0.15093618774414064, 0.1493943328857422, 0.15111062622070312, 0.14844927978515626, 0.14958706665039062, 0.1490252227783203, 0.15041990661621094, 0.15023049926757812, 0.16084786987304686, 0.14708531188964843, 0.14729420471191407, 0.1472041015625, 0.14789836120605468, 0.14821334838867187, 0.15471075439453125, 0.14891180419921876, 0.14951423645019532, 0.1473597412109375, 0.1472368621826172, 0.14933811950683593, 0.1500037078857422, 0.15201895141601562, 0.14942562866210937, 0.14781494140625, 0.14778770446777345, 0.1471426544189453, 0.14931974792480468, 0.15128684997558595, 0.15012550354003906, 0.1489644775390625, 0.14848924255371093, 0.15055241394042967, 0.1466798095703125, 0.14954920959472656, 0.1494034881591797, 0.14997914123535155, 0.14966700744628905, 0.14913548278808594, 0.1509383087158203, 0.1468538818359375, 0.14921929931640626, 0.15028755187988282, 0.14996156311035155, 0.1506570281982422, 0.149494873046875, 0.14963600158691406, 0.14705459594726564, 0.1498091583251953, 0.1506078643798828, 0.15050460815429687, 0.15080703735351564, 0.14874774169921876, 0.15000665283203124, 0.14741299438476563, 0.1508922576904297, 0.15007096862792968, 0.15040982055664062, 0.1504500732421875, 0.1491005401611328, 0.15146783447265624, 0.1486801300048828, 0.14930415344238282, 0.14956480407714845, 0.149826171875, 0.15015936279296874, 0.1498927001953125, 0.1508356170654297, 0.14828282165527343, 0.15065965270996093, 0.14974566650390625, 0.1500958709716797, 0.16015455627441405, 0.14722047424316406, 0.1471056365966797, 0.14801705932617187, 0.14729434204101563, 0.14731805419921876, 0.15568162536621094, 0.14911628723144532, 0.149712646484375, 0.14771865844726562, 0.14823785400390624, 0.14828834533691407, 0.14956544494628907, 0.15234197998046875, 0.14890652465820312, 0.15017575073242187, 0.14777507019042968, 0.14834722900390626, 0.14940780639648438, 0.15077491760253905, 0.15159794616699218, 0.14888531494140625, 0.15043545532226563, 0.14744342041015626, 0.14846243286132813, 0.14993318176269532, 0.15106343078613282, 0.14993408203125, 0.1500010528564453, 0.15119017028808593, 0.1474722900390625, 0.14792819213867187, 0.15009065246582032, 0.15064012145996095, 0.15029231262207032, 0.14970742797851563, 0.15063449096679687, 0.14765875244140625, 0.14886912536621094, 0.1502019500732422, 0.14997123718261718, 0.15062754821777344, 0.14943125915527344, 0.15080441284179688, 0.14766079711914062, 0.14990121459960937, 0.1492706298828125, 0.1506385955810547, 0.1490403594970703, 0.15020492553710937, 0.15011561584472657, 0.1482425994873047, 0.15192256164550783, 0.14913020324707033, 0.15151046752929687, 0.1490946807861328, 0.1501510772705078, 0.14844912719726563, 0.15026435852050782, 0.14982144165039063, 0.150687744140625, 0.15027200317382813, 0.1495224304199219, 0.16215461730957031, 0.14909593200683594, 0.14641552734375, 0.14799728393554687, 0.148748291015625, 0.14641273498535157, 0.15466966247558595, 0.15010838317871095, 0.1508659210205078, 0.14722377014160157, 0.14810723876953125, 0.14874911499023438, 0.14871888732910157, 0.15131024169921875, 0.15025234985351563, 0.14909645080566405, 0.1474453430175781, 0.14836585998535157, 0.15063027954101563, 0.1488504638671875, 0.15085591125488282, 0.14958796691894533, 0.14915330505371094, 0.1477145233154297, 0.14880960083007813, 0.1509470977783203, 0.14991241455078125, 0.15116697692871095, 0.1496303405761719, 0.14852117919921876, 0.14806851196289061, 0.14984422302246095, 0.15040921020507814, 0.1501880340576172, 0.15113731384277343, 0.14910768127441407, 0.1507368927001953, 0.14773881530761718, 0.14952566528320313, 0.15034538269042969, 0.14996719360351562, 0.1510386199951172, 0.14994178771972655, 0.15044451904296874, 0.14890188598632811, 0.1501407012939453, 0.14945263671875, 0.15112594604492188, 0.1498853759765625, 0.14992166137695312, 0.14828556823730468, 0.15008111572265626, 0.15005941772460937, 0.15062956237792968, 0.15191737365722657, 0.14974156188964843, 0.15141888427734376, 0.1486929931640625, 0.14989859008789064, 0.14984463500976564, 0.15096421813964844, 0.1495653076171875, 0.15069970703125, 0.16067295837402343, 0.14796627807617188, 0.14791661071777343, 0.14806002807617188, 0.1470492858886719, 0.1481134033203125, 0.15501519775390624, 0.14963095092773437, 0.14951423645019532, 0.14811651611328125, 0.14753590393066407, 0.14831663513183593, 0.15114828491210938, 0.1519048614501953, 0.14974310302734375, 0.150442138671875, 0.14739039611816407, 0.14907830810546874, 0.14803286743164062, 0.15137033081054688, 0.150249755859375, 0.14963101196289064, 0.15034739685058593, 0.14729049682617187, 0.14910415649414063, 0.1489058837890625, 0.1513846435546875, 0.1504950714111328, 0.15006121826171875, 0.15045018005371094, 0.14760089111328126, 0.14965402221679688, 0.1491005401611328, 0.15047488403320314, 0.14957554626464845, 0.15042684936523437, 0.14875631713867188, 0.14877996826171874, 0.14998733520507812, 0.1498787841796875, 0.1510726776123047, 0.15014874267578124, 0.15019013977050782, 0.14839849853515624, 0.1491394500732422, 0.14930943298339844, 0.15014230346679688, 0.15135174560546874, 0.15020828247070311, 0.15068351745605468, 0.14807212829589844, 0.15022988891601563, 0.14904226684570313, 0.15133139038085938, 0.14960220336914062, 0.15011856079101563, 0.14935888671875, 0.15014912414550782, 0.15050880432128907, 0.15042636108398438, 0.1516477508544922, 0.14942665100097657, 0.15204296875, 0.15941836547851562, 0.14711958312988282, 0.14898348999023436, 0.14644291687011718, 0.14857395935058593, 0.150712158203125, 0.15332415771484376, 0.1503272705078125, 0.1482845458984375, 0.14819622802734375, 0.14686968994140626, 0.1490458221435547, 0.15335789489746093, 0.1511367950439453, 0.14981111145019532, 0.1480067901611328, 0.148172607421875, 0.14755247497558593, 0.1504085693359375, 0.15230435180664062, 0.15046861267089845, 0.14987826538085938, 0.14792950439453126, 0.14946517944335938, 0.14776316833496095, 0.15155807495117188, 0.15067916870117187, 0.15004083251953124, 0.14994435119628907, 0.14808493041992188, 0.15060992431640624, 0.14790640258789062, 0.1508717498779297, 0.1494342041015625, 0.15003692626953125, 0.14953082275390625, 0.14907379150390626, 0.1506793212890625, 0.14891030883789064, 0.15103581237792968, 0.14979647827148437, 0.15000198364257813, 0.14881794738769533, 0.14942233276367187, 0.14994432067871094, 0.14954086303710937, 0.1516195831298828, 0.14893875122070313, 0.1517690887451172, 0.1495572509765625, 0.15134515380859376, 0.14895225524902345, 0.1500618896484375, 0.1498787841796875, 0.15085494995117188, 0.1491871337890625, 0.15019334411621094, 0.14959100341796874, 0.1497266845703125, 0.15141737365722657, 0.15013186645507812, 0.1515262451171875, 0.1500913848876953, 0.1605960998535156, 0.14707347106933594, 0.14708358764648438, 0.1492139892578125, 0.14697305297851562, 0.14864979553222657, 0.15545330810546876, 0.1491721954345703, 0.14867709350585936, 0.1474534454345703, 0.1493731231689453, 0.14692965698242189, 0.1516198425292969, 0.15215664672851562, 0.14939459228515625, 0.14927548217773437, 0.14761984252929689, 0.14804374694824218, 0.1492532501220703, 0.15129078674316407, 0.15103794860839845, 0.1492500457763672, 0.15040080261230468, 0.14732514953613282, 0.14898367309570312, 0.15053651428222656, 0.1512807312011719, 0.15014749145507814, 0.14991725158691407, 0.14970489501953124, 0.14796624755859375, 0.15015965270996093, 0.15045529174804687, 0.15064166259765624, 0.15012454223632812, 0.1496268768310547, 0.14853529357910156, 0.14912821960449218, 0.15132931518554688, 0.15042604064941406, 0.15148646545410155, 0.1499279327392578, 0.14963711547851563, 0.14816029357910157, 0.14991792297363282, 0.1504965057373047, 0.1503445129394531, 0.1514126739501953, 0.14908210754394532, 0.1507060546875, 0.1488605194091797, 0.1499202880859375, 0.15023513793945312, 0.15087359619140625, 0.14966534423828126, 0.14958688354492186, 0.15028355407714844, 0.14994505310058595, 0.15148252868652343, 0.14961442565917968, 0.15116192626953126, 0.14994309997558594, 0.15027827453613282]",tokens/s,6.68746961495727,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 110.12 MiB is free. Process 167342 has 14.63 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 51.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 436.12 MiB is free. Process 174096 has 14.31 GiB memory in use. Of the allocated memory 14.14 GiB is allocated by PyTorch, and 61.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,1043.406848,5037.228032,0.0,4634.70592,4621.451776,s,1,14.5569150390625,14.5569150390625,0.0,14.5569150390625,14.5569150390625,14.5569150390625,14.5569150390625,[14.5569150390625],,kWh,0.00020359975043334846,2.24512509358292e-05,6.792283211599914e-05,0.0002939738334851768,,MB,1344.303104,5911.740416,0.0,5496.635392,5337.002496,s,10,9.430091003417969,0.9430091003417967,0.004016338338506861,0.9434315490722656,0.9475170654296875,0.9486765625,0.9496041601562499,"[0.9350957641601563, 0.9436987915039062, 0.9439705200195313, 0.9393595581054688, 0.943164306640625, 0.94009912109375, 0.9416194458007813, 0.9498360595703125, 0.9472593994140625, 0.945988037109375]",tokens/s,271.4713992762233,kWh,2.7483738558712364e-05,3.0309794922599e-06,1.8280494422364728e-05,4.8795212473336996e-05,tokens/kWh,5246416.339305526,MB,1401.081856,5913.837568,0.0,5496.635392,5337.005056,s,10,50.873770507812495,5.0873770507812495,0.0128487359401144,5.08795947265625,5.09986904296875,5.105783642578125,5.110515322265625,"[5.08163134765625, 5.09122900390625, 5.06926220703125, 5.071123046875, 5.07480419921875, 5.095923828125, 5.08468994140625, 5.09485400390625, 5.1116982421875, 5.0985546875]",tokens/s,12.383591656593515,kWh,0.0001488463528554562,1.641833582762712e-05,9.600945812063467e-05,0.0002612741468037179,tokens/kWh,241126.03857177158,,s,630,50.871336517333994,0.08074815320211744,0.0012559569604315007,0.08051854705810546,0.08123368911743165,0.08146673469543457,0.08965459014892578,"[0.09022844696044922, 0.08232227325439453, 0.08083660888671874, 0.08053097534179687, 0.0812077407836914, 0.08055197143554688, 0.08018329620361328, 0.0799477767944336, 0.08024285125732422, 0.08000809478759766, 0.08000198364257813, 0.080216064453125, 0.08036351776123046, 0.08029933166503907, 0.08015737915039063, 0.08152857971191406, 0.08069760131835937, 0.08047821044921875, 0.08031378936767578, 0.08035334777832032, 0.08033071899414063, 0.08127875518798829, 0.08084352111816406, 0.08048844909667968, 0.08046562957763671, 0.08022188568115235, 0.08030473327636718, 0.08030413055419922, 0.08024473571777344, 0.0802606430053711, 0.08023401641845704, 0.08011856079101562, 0.0803165740966797, 0.08030729675292969, 0.08030223846435547, 0.08045849609375, 0.08044089508056641, 0.08047660827636718, 0.08038297271728516, 0.08039936065673828, 0.08046514892578124, 0.08037872314453125, 0.08056748962402344, 0.08055391693115234, 0.08057315063476563, 0.08064768218994141, 0.08050540924072265, 0.08045104217529297, 0.08051152038574219, 0.08067254638671875, 0.08063410949707031, 0.08062732696533204, 0.08076262664794921, 0.08055033874511719, 0.08053366088867188, 0.08055398559570312, 0.08045516967773438, 0.08048486328125, 0.08052867126464844, 0.0804751968383789, 0.08050841522216796, 0.08059439849853516, 0.08067465972900391, 0.09018617248535156, 0.08179727935791016, 0.08105574035644532, 0.08054573059082032, 0.0804588165283203, 0.08032918548583984, 0.08019993591308594, 0.08018265533447265, 0.08053852844238281, 0.08011804962158203, 0.08013212585449218, 0.08003334045410156, 0.080648193359375, 0.0804901123046875, 0.0805565414428711, 0.08037789154052734, 0.08038742065429688, 0.08080857849121094, 0.08047737884521484, 0.08039711761474609, 0.08044544219970703, 0.08030003356933593, 0.08020172882080077, 0.08028272247314452, 0.08021846771240235, 0.08028377532958984, 0.08038361358642578, 0.0805261459350586, 0.08032876586914063, 0.0809450912475586, 0.08087363433837891, 0.08214307403564453, 0.08042700958251953, 0.08046562957763671, 0.08060342407226563, 0.0817456283569336, 0.08101696014404297, 0.08074050903320312, 0.08064733123779297, 0.08058866882324218, 0.0805693130493164, 0.08068710327148437, 0.08089424133300781, 0.08064982604980468, 0.08051465606689454, 0.08040067291259766, 0.08074409484863282, 0.08082463836669922, 0.08120304107666015, 0.08057695770263672, 0.08046367645263672, 0.08351558685302735, 0.08134041595458984, 0.08083865356445312, 0.08048639678955079, 0.08036579132080078, 0.08037478637695313, 0.0804441909790039, 0.08049247741699218, 0.08045171356201172, 0.08056416320800781, 0.08066681671142578, 0.08099001312255859, 0.08953311920166016, 0.08120137786865235, 0.08082128143310546, 0.080321533203125, 0.08016041564941406, 0.0801119384765625, 0.08021987152099609, 0.07999862670898437, 0.08003062438964843, 0.0802240982055664, 0.08016780853271484, 0.08023737335205078, 0.08005990600585937, 0.0800693130493164, 0.08032041931152344, 0.08048758697509766, 0.08041388702392578, 0.08037350463867188, 0.08030111694335937, 0.08037677001953125, 0.0803051528930664, 0.08025395202636719, 0.08033484649658203, 0.08034406280517578, 0.08022029113769531, 0.08024269104003906, 0.08023129272460937, 0.08045308685302735, 0.08051136016845703, 0.08024285125732422, 0.08012518310546875, 0.08007737731933594, 0.0804335708618164, 0.08015980529785156, 0.08085574340820313, 0.08039222717285156, 0.08061542510986328, 0.08023638153076172, 0.08038006591796874, 0.0802344970703125, 0.08038947296142578, 0.0804985580444336, 0.08063046264648438, 0.0803960952758789, 0.08053113555908203, 0.08029039764404297, 0.08032428741455078, 0.08015904235839844, 0.08013823699951172, 0.08018745422363281, 0.0802581787109375, 0.08025727844238281, 0.08043958282470703, 0.08024502563476563, 0.08035123443603516, 0.08013622283935547, 0.08016687774658203, 0.08026435089111328, 0.08034185791015624, 0.08026924896240234, 0.08022022247314453, 0.08010377502441406, 0.08033859252929687, 0.0895567398071289, 0.08123212432861328, 0.08069097900390625, 0.08018466949462891, 0.0806143341064453, 0.08128505706787109, 0.08010956573486328, 0.07977574157714844, 0.07983219146728515, 0.08063065338134766, 0.07992115020751953, 0.07974864196777344, 0.07966515350341796, 0.07998694610595704, 0.08002365112304688, 0.07986502075195312, 0.07980707550048828, 0.0797003173828125, 0.07987535858154297, 0.07989462280273438, 0.07996018981933593, 0.0800588150024414, 0.08042201232910157, 0.08015353393554688, 0.08005760192871093, 0.08003151702880859, 0.08127919769287109, 0.080853759765625, 0.08044953918457032, 0.08042240142822266, 0.08043981170654296, 0.08052326202392578, 0.08047984313964844, 0.08029634857177734, 0.08052326202392578, 0.0803737564086914, 0.08034697723388672, 0.08033500671386719, 0.08039218902587891, 0.08094105529785156, 0.08022835540771485, 0.08011177825927734, 0.080174560546875, 0.08032498931884766, 0.08032624053955079, 0.08021177673339844, 0.080399169921875, 0.08054688262939454, 0.0804065933227539, 0.08044528198242187, 0.08055270385742187, 0.08048646545410157, 0.08083251190185547, 0.0805351333618164, 0.08052982330322266, 0.0804805450439453, 0.08047382354736328, 0.08049868774414062, 0.08050019073486328, 0.08057001495361328, 0.08057683563232422, 0.08054637145996094, 0.08039833831787109, 0.08946431732177734, 0.0810582046508789, 0.0806995849609375, 0.08015248107910156, 0.08009318542480469, 0.08024269104003906, 0.08025638580322265, 0.08008972930908204, 0.08006050872802735, 0.08045161437988281, 0.08009667205810547, 0.08019107055664063, 0.08011804962158203, 0.08008688354492187, 0.08029261016845703, 0.08027855682373047, 0.08020476531982422, 0.08001126098632813, 0.08032608032226562, 0.08008966064453125, 0.08016316986083985, 0.0799764175415039, 0.07994528198242187, 0.07992345428466797, 0.08021004486083984, 0.08018099212646485, 0.08035919952392578, 0.0804838104248047, 0.08006630706787109, 0.08011641693115235, 0.08006912231445312, 0.07999091339111328, 0.08035295867919921, 0.08009315490722656, 0.080200927734375, 0.08012995147705078, 0.08018326568603516, 0.08016316986083985, 0.08019004821777344, 0.0804491195678711, 0.0803309097290039, 0.08025142669677734, 0.08028166198730469, 0.08009693145751953, 0.08006886291503906, 0.08003533172607422, 0.08035763549804688, 0.08035456085205078, 0.08053622436523437, 0.08032617950439454, 0.08042345428466798, 0.08041474914550781, 0.08107008361816406, 0.08045478057861329, 0.08317632293701172, 0.08141414642333984, 0.08135884857177735, 0.08120259094238282, 0.08059964752197266, 0.08106598663330078, 0.0810533447265625, 0.08074082946777343, 0.08144384002685547, 0.090378173828125, 0.08283757019042969, 0.08203059387207032, 0.08073958587646485, 0.080408447265625, 0.08045359802246094, 0.08052035522460937, 0.0804615707397461, 0.08041203308105468, 0.08048214721679688, 0.08055683135986329, 0.08042082977294922, 0.08121887969970704, 0.08052809906005859, 0.0805292510986328, 0.0803034896850586, 0.08064694213867188, 0.08049254608154296, 0.08037785339355469, 0.08048255920410156, 0.08060288238525391, 0.08052941131591797, 0.08046793365478516, 0.08043318176269532, 0.08033507537841797, 0.08034457397460938, 0.08048668670654296, 0.08040038299560547, 0.08040067291259766, 0.08081356811523438, 0.0803435516357422, 0.08037923431396485, 0.08034137725830078, 0.08038604736328125, 0.08088371276855469, 0.08138137817382812, 0.08087312316894531, 0.08049494171142578, 0.080650146484375, 0.08063190460205077, 0.08073757171630859, 0.08102515411376954, 0.08142256164550782, 0.08096310424804687, 0.08061424255371094, 0.08068422698974609, 0.08041104125976563, 0.08041923522949218, 0.08067657470703125, 0.08053379058837891, 0.08066252899169922, 0.08090009307861327, 0.0807710723876953, 0.08082434844970703, 0.08076873779296875, 0.08072013092041015, 0.08081126403808593, 0.0811773452758789, 0.08148502349853516, 0.08098281860351562, 0.08086732482910156, 0.0816978530883789, 0.08106060791015625, 0.0896945571899414, 0.08142342376708984, 0.0807514877319336, 0.08054083251953124, 0.08027228546142579, 0.08027983856201172, 0.08027954864501953, 0.08024755096435547, 0.08020381164550781, 0.08001808166503906, 0.08018358612060547, 0.08026316833496094, 0.08022342681884766, 0.08033567810058594, 0.08031590270996093, 0.08035951995849609, 0.08041433715820312, 0.08018428802490235, 0.08023238372802734, 0.08023455810546876, 0.08016671752929687, 0.08036147308349609, 0.08050460815429687, 0.08042313385009765, 0.0803594207763672, 0.08042908477783203, 0.08053068542480468, 0.08039875030517578, 0.08101856231689453, 0.08116083526611328, 0.08102861022949219, 0.08049427032470703, 0.08049542236328125, 0.0815588150024414, 0.08049088287353516, 0.08027760314941407, 0.0802265625, 0.08049203491210938, 0.0808852767944336, 0.08058159637451172, 0.08076227569580079, 0.08063139343261719, 0.0804505615234375, 0.08026035308837891, 0.0802455062866211, 0.08033484649658203, 0.0811769256591797, 0.08074575805664062, 0.08055232238769532, 0.0806904296875, 0.08089622497558593, 0.08176399993896484, 0.08082316589355469, 0.08109670257568359, 0.08083251190185547, 0.08066252899169922, 0.0807701416015625, 0.08053043365478516, 0.08062761688232421, 0.0805580825805664, 0.08054598236083985, 0.08053126525878906, 0.08062684631347657, 0.09090048217773437, 0.08158182525634766, 0.08073651123046875, 0.08075878143310547, 0.08015872192382813, 0.08003926086425782, 0.08080633544921875, 0.08017292785644531, 0.0803147201538086, 0.0804823989868164, 0.0804494400024414, 0.08045069122314454, 0.08045452880859374, 0.08038329315185547, 0.08044409942626952, 0.080572509765625, 0.08088326263427735, 0.08036937713623046, 0.08049282836914062, 0.08039820861816406, 0.08052169799804687, 0.08041062164306641, 0.08034015655517578, 0.0806487045288086, 0.081004638671875, 0.08083849334716797, 0.08065184020996094, 0.08063369750976562, 0.08087615966796875, 0.08084310150146484, 0.08062566375732422, 0.08072803497314453, 0.081328125, 0.08127279663085937, 0.08091651153564453, 0.08076255798339843, 0.08070178985595704, 0.08091648101806641, 0.08089600372314452, 0.0806924819946289, 0.08068924713134766, 0.08057923126220704, 0.08056960296630859, 0.08063897705078125, 0.08069491577148437, 0.08060070037841797, 0.08072243499755859, 0.08055398559570312, 0.08089190673828126, 0.08144438171386718, 0.08105622100830077, 0.08100582122802734, 0.0812938232421875, 0.08092684936523438, 0.080595458984375, 0.08051673889160156, 0.08055193328857421, 0.08052531433105468, 0.08054956817626953, 0.08066079711914062, 0.08066047668457031, 0.08152019500732421, 0.08091910552978515, 0.09030242919921876, 0.08140595245361328, 0.08128921508789062, 0.08094310760498047, 0.081328125, 0.08047235107421875, 0.08038780975341797, 0.08071778869628907, 0.08046546936035157, 0.08031427001953124, 0.08027603149414063, 0.08050482940673828, 0.08063999938964844, 0.08068300628662109, 0.08058060455322266, 0.0806924819946289, 0.08088038635253907, 0.08060108947753906, 0.08068505859375, 0.08054771423339843, 0.08068828582763672, 0.08092301177978516, 0.08082697296142578, 0.08099174499511719, 0.08137779235839844, 0.08247296142578125, 0.08115977478027343, 0.08071119689941406, 0.08100543975830078, 0.08141747283935546, 0.08105856323242187, 0.08057453155517579, 0.08073516845703126, 0.08058573150634765, 0.08105577850341797, 0.08132809448242187, 0.08081574249267579, 0.0816316146850586, 0.08119910430908203, 0.08091033935546875, 0.0810758056640625, 0.08165590667724609, 0.08105551910400391, 0.0808156509399414, 0.08074121856689453, 0.08082038116455079, 0.08124777221679688, 0.08138502502441407, 0.08095123291015625, 0.08128406524658204, 0.08149795532226563, 0.08098729705810546, 0.0809013442993164, 0.08098365020751953, 0.0815617904663086, 0.08097138977050782, 0.08112985229492188, 0.08142550659179687, 0.08098032379150391, 0.08111353302001953, 0.08134636688232422, 0.08106835174560546, 0.08128102111816406, 0.08985260772705078, 0.0812743377685547, 0.08074089813232421, 0.08034019470214844, 0.08032937622070313, 0.08044898986816407, 0.08056678771972656, 0.08041069030761719, 0.08044454193115234, 0.08057545471191406, 0.08056832122802735, 0.08053533172607422, 0.08044156646728516, 0.0804853744506836, 0.080681884765625, 0.08044963073730468, 0.08044748687744141, 0.08051097869873047, 0.08046150207519531, 0.08042550659179687, 0.08037709045410156, 0.08048233795166015, 0.08057878112792968, 0.08049600219726563, 0.08058358764648438, 0.08118271636962891, 0.08057379150390626, 0.08061814117431641, 0.08063795471191407, 0.08059613037109375, 0.0804435806274414, 0.08058294677734375, 0.080512451171875, 0.08052774047851563, 0.08067046356201171, 0.08074527740478515, 0.08081990051269532, 0.08094969940185547, 0.08123104095458984, 0.0810340805053711, 0.0808194580078125, 0.08081283569335937, 0.08083993530273438, 0.08081590270996093, 0.08115894317626954, 0.08141619110107422, 0.08089164733886718, 0.08081871795654297, 0.08080355072021485, 0.08099635314941406, 0.08089190673828126, 0.0813017578125, 0.08140473937988281, 0.08192479705810547, 0.08108831787109375, 0.08078505706787109, 0.08127686309814453, 0.0809603500366211, 0.08097586822509766, 0.0810304946899414, 0.08157225799560547, 0.08118287658691406, 0.08090742492675781]",tokens/s,12.38418416204443,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 272.12 MiB is free. Process 84181 has 14.47 GiB memory in use. Of the allocated memory 14.35 GiB is allocated by PyTorch, and 13.83 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 89369 has 14.71 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 28.11 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,1198.444544,10582.09792,0.0,10179.575808,10067.3536,s,1,22.452044921875,22.452044921875,0.0,22.452044921875,22.452044921875,22.452044921875,22.452044921875,[22.452044921875],,kWh,0.0004358892054416667,4.807473573073978e-05,0.00014503817158600246,0.000629002112758409,,MB,1408.946176,12521.96352,0.0,12106.858496,11264.997888,s,10,22.025080078125,2.2025080078125,0.012151026773434104,2.2043836669921877,2.2159740234375,2.216520703125,2.216958046875,"[2.176212158203125, 2.1932548828125, 2.192836669921875, 2.19731787109375, 2.204373779296875, 2.2043935546875, 2.210161865234375, 2.2170673828125, 2.2158525390625, 2.213609375]",tokens/s,116.2311324598795,kWh,6.432398427874924e-05,7.094660104198045e-06,4.286047873280005e-05,0.00011427912311574734,tokens/kWh,2240129.194382346,MB,1433.993216,12526.157824,0.0,12108.955648,11265.000448,s,10,108.8452109375,10.88452109375,0.02712600890432365,10.89675048828125,10.912278515625001,10.91612021484375,10.919193574218749,"[10.8512158203125, 10.8384873046875, 10.850037109375, 10.89648828125, 10.87683203125, 10.9199619140625, 10.90212109375, 10.9016298828125, 10.8970126953125, 10.9114248046875]",tokens/s,5.788036006120217,kWh,0.0003188599458741688,3.517261090855641e-05,0.00021167561378480083,0.0005657081705675261,tokens/kWh,111364.84017333097,,s,630,108.84104246520995,0.17276355946858724,0.0019008705304642802,0.1726689453125,0.17437670898437502,0.17481485061645508,0.18287327545166018,"[0.18123143005371095, 0.16884124755859375, 0.1698864288330078, 0.1694535675048828, 0.16980400085449218, 0.17391766357421876, 0.17219821166992189, 0.17003506469726562, 0.16972137451171876, 0.17039984130859376, 0.17010252380371094, 0.17304460144042969, 0.17199090576171874, 0.17009458923339843, 0.1698870086669922, 0.17112351989746094, 0.17137481689453124, 0.17271737670898438, 0.17186444091796876, 0.17133506774902343, 0.16930400085449218, 0.1692410888671875, 0.17280184936523438, 0.17293142700195313, 0.17225100708007812, 0.17232920837402343, 0.17161322021484374, 0.17191542053222655, 0.17265061950683594, 0.17347254943847656, 0.17347552490234375, 0.17267030334472655, 0.17283917236328125, 0.17308511352539063, 0.17524365234375, 0.1728419189453125, 0.17325059509277344, 0.17280242919921876, 0.171177734375, 0.17142640686035157, 0.17394688415527343, 0.172600830078125, 0.17188832092285156, 0.17245472717285157, 0.17216511535644533, 0.1722341766357422, 0.17210595703125, 0.17246421813964843, 0.17310508728027343, 0.17247673034667968, 0.17183949279785157, 0.17445401000976563, 0.17218832397460937, 0.17191535949707032, 0.1733632049560547, 0.17340937805175782, 0.17244866943359374, 0.1732052459716797, 0.173459716796875, 0.17327513122558594, 0.17274819946289063, 0.1745432586669922, 0.17215731811523438, 0.18242326354980468, 0.16967298889160157, 0.1696485137939453, 0.16967868041992187, 0.1691374053955078, 0.17443746948242186, 0.17374365234375, 0.16972840881347656, 0.17142098999023436, 0.170411865234375, 0.17159036254882812, 0.17380982971191405, 0.1727519073486328, 0.17074374389648436, 0.16964697265625, 0.17044085693359376, 0.1725028839111328, 0.17299148559570313, 0.17232371520996093, 0.17219766235351563, 0.1706724853515625, 0.17059840393066406, 0.17255833435058593, 0.17263597106933593, 0.1717220458984375, 0.17154905700683593, 0.17078057861328125, 0.17217391967773438, 0.17323426818847656, 0.17258486938476564, 0.1723408660888672, 0.17135446166992188, 0.17094044494628907, 0.17189718627929687, 0.17279971313476564, 0.17159776306152344, 0.17197900390625, 0.17137747192382813, 0.17130812072753906, 0.1721180419921875, 0.17249261474609376, 0.17197666931152344, 0.1719207000732422, 0.17092169189453124, 0.1718691864013672, 0.17255558776855467, 0.17226617431640626, 0.17223785400390626, 0.17203724670410156, 0.1714683837890625, 0.1731394500732422, 0.17237068176269532, 0.17178419494628908, 0.17258610534667967, 0.17182768249511718, 0.17225935363769532, 0.1725068817138672, 0.17211199951171874, 0.17232333374023437, 0.17258918762207032, 0.17306771850585936, 0.17303103637695313, 0.17321994018554687, 0.1830570831298828, 0.1692051239013672, 0.16866543579101562, 0.1706273956298828, 0.1694146270751953, 0.17474745178222656, 0.17273365783691405, 0.17148588562011718, 0.17113731384277345, 0.17039535522460938, 0.17113661193847657, 0.17392230224609376, 0.17260809326171875, 0.17085244750976564, 0.17160018920898437, 0.17135789489746095, 0.1725560302734375, 0.17246438598632813, 0.17185702514648438, 0.1710101776123047, 0.1706483154296875, 0.16993913269042968, 0.1722360382080078, 0.17167129516601562, 0.16999494934082032, 0.17109619140625, 0.17114070129394532, 0.17120297241210938, 0.17269132995605468, 0.1722574005126953, 0.17241702270507814, 0.17235968017578124, 0.17108522033691406, 0.1728287353515625, 0.17208192443847656, 0.17285916137695312, 0.17331199645996093, 0.17191526794433593, 0.1718970947265625, 0.17297113037109374, 0.1732010498046875, 0.17220851135253906, 0.17206947326660157, 0.17180467224121093, 0.17271192932128906, 0.1730908203125, 0.17206019592285157, 0.1729192657470703, 0.17183538818359376, 0.17184259033203125, 0.17311155700683595, 0.1729666290283203, 0.1727651824951172, 0.17352703857421875, 0.17219564819335936, 0.17253190612792968, 0.17262339782714844, 0.1728409881591797, 0.1727229461669922, 0.1740100555419922, 0.17296115112304689, 0.1731402587890625, 0.17305349731445313, 0.1853082580566406, 0.17081394958496093, 0.1701604461669922, 0.17026792907714844, 0.1698310089111328, 0.17575343322753906, 0.17244979858398438, 0.1716071319580078, 0.17046211242675782, 0.16919349670410155, 0.17160124206542968, 0.17533116149902345, 0.1725570831298828, 0.17214463806152344, 0.170446044921875, 0.17196112060546875, 0.17337522888183593, 0.17317298889160157, 0.17373933410644532, 0.17158624267578124, 0.1710672607421875, 0.17147421264648438, 0.17332940673828126, 0.17405039978027342, 0.17357632446289062, 0.17208604431152344, 0.17202128601074218, 0.17228367614746093, 0.17370780944824218, 0.17391001892089844, 0.17267916870117186, 0.17141123962402344, 0.17189222717285157, 0.17369906616210937, 0.17362757873535156, 0.17408883666992186, 0.1724144287109375, 0.17272047424316406, 0.17252362060546875, 0.1740597381591797, 0.173993408203125, 0.17379568481445312, 0.17256375122070314, 0.17261366271972656, 0.17380630493164062, 0.17351644897460938, 0.1744878692626953, 0.17404937744140625, 0.1721383056640625, 0.17273199462890626, 0.17382406616210938, 0.17305751037597655, 0.1742834930419922, 0.1718105010986328, 0.1720530548095703, 0.1734185028076172, 0.17295155334472656, 0.17447663879394532, 0.17287799072265625, 0.1732861785888672, 0.17324617004394532, 0.17430528259277345, 0.17438082885742187, 0.1816338806152344, 0.17068850708007813, 0.17040380859375, 0.168974365234375, 0.16962258911132813, 0.17552275085449218, 0.17239840698242187, 0.17102464294433595, 0.16939328002929688, 0.17024287414550782, 0.17165721130371095, 0.17437625122070313, 0.1731826934814453, 0.17163885498046874, 0.16991317749023438, 0.17148956298828125, 0.17299139404296876, 0.17458425903320313, 0.17228147888183593, 0.1720811767578125, 0.1713766326904297, 0.17118841552734376, 0.17448953247070312, 0.17361683654785157, 0.17207107543945313, 0.17141993713378906, 0.17140296936035157, 0.17253631591796875, 0.17350440979003906, 0.1725314636230469, 0.17216770935058595, 0.1728738555908203, 0.1724805145263672, 0.17308876037597656, 0.1738236083984375, 0.1726297607421875, 0.17188832092285156, 0.17124659729003905, 0.17338975524902345, 0.17315411376953124, 0.1729438018798828, 0.17337628173828126, 0.17332733154296875, 0.17302236938476562, 0.17478732299804686, 0.17393894958496095, 0.17353919982910157, 0.17146611022949218, 0.1722058868408203, 0.17481602478027344, 0.17557196044921874, 0.1734051818847656, 0.17318911743164062, 0.171863525390625, 0.1715328369140625, 0.17295677185058594, 0.17288841247558595, 0.17217088317871093, 0.17266578674316407, 0.1724559326171875, 0.17270364379882813, 0.1736846466064453, 0.17292921447753906, 0.18456454467773437, 0.16991439819335938, 0.17091941833496094, 0.17065621948242188, 0.17115213012695313, 0.17795989990234376, 0.17275830078125, 0.1715494384765625, 0.17105340576171876, 0.17086842346191405, 0.1720343322753906, 0.17609695434570313, 0.17358233642578125, 0.17158575439453125, 0.17124957275390626, 0.17145440673828125, 0.17333418273925782, 0.17373843383789062, 0.17346278381347657, 0.1719727020263672, 0.1712004089355469, 0.17220077514648438, 0.17323114013671875, 0.17415245056152343, 0.17446092224121093, 0.17134547424316407, 0.171078369140625, 0.1731715850830078, 0.1748017578125, 0.17316249084472657, 0.17172476196289063, 0.17259933471679687, 0.175177734375, 0.1736212463378906, 0.17459814453125, 0.17373114013671875, 0.17222930908203124, 0.17304495239257814, 0.17266758728027343, 0.17354351806640625, 0.1747538604736328, 0.1733031005859375, 0.17366464233398438, 0.173523193359375, 0.1747250213623047, 0.17485340881347655, 0.17385072326660156, 0.17280809020996094, 0.17280029296875, 0.17376045227050782, 0.172775390625, 0.17453097534179687, 0.17343446350097655, 0.17403692626953124, 0.17207366943359376, 0.17320755004882812, 0.17422726440429687, 0.17457171630859375, 0.1736681213378906, 0.17449168395996092, 0.17336058044433594, 0.17424050903320312, 0.1752241668701172, 0.18394454956054687, 0.17117893981933593, 0.1701965789794922, 0.17147740173339843, 0.17156256103515624, 0.17700703430175782, 0.17229580688476562, 0.172044677734375, 0.17031318664550782, 0.17063392639160158, 0.17210330200195312, 0.17562646484375, 0.17343283081054686, 0.17244979858398438, 0.17172402954101562, 0.17108822631835938, 0.1733470458984375, 0.17425836181640625, 0.17243341064453124, 0.17299871826171875, 0.17084614562988282, 0.17236941528320313, 0.17309336853027343, 0.17435647583007813, 0.17219110107421876, 0.17165171813964844, 0.17160911560058595, 0.17283193969726562, 0.17345747375488282, 0.17379913330078126, 0.17272146606445313, 0.17248941040039062, 0.17156626892089843, 0.17358447265625, 0.17360520935058593, 0.17359295654296875, 0.1722076416015625, 0.17221372985839845, 0.17318748474121093, 0.17318669128417968, 0.17482406616210938, 0.17306227111816405, 0.17286697387695313, 0.17195606994628906, 0.17295663452148438, 0.17385177612304686, 0.1730487060546875, 0.173295166015625, 0.1723410186767578, 0.17246412658691407, 0.17391081237792969, 0.1736143341064453, 0.1735350036621094, 0.17268234252929687, 0.17246797180175782, 0.173486083984375, 0.17351884460449218, 0.17297383117675783, 0.1731832275390625, 0.1744066619873047, 0.17315090942382813, 0.17470086669921875, 0.1747162628173828, 0.1832654724121094, 0.1705184326171875, 0.17141349792480468, 0.17093597412109374, 0.17148287963867187, 0.17794422912597657, 0.17450694274902342, 0.17143807983398437, 0.1714456024169922, 0.170990234375, 0.17288729858398438, 0.17481341552734375, 0.17339447021484375, 0.1720908203125, 0.17165132141113282, 0.17121542358398437, 0.1735615997314453, 0.17446269226074218, 0.17293061828613282, 0.17211056518554688, 0.17217741394042968, 0.17171229553222656, 0.17345472717285157, 0.17353610229492186, 0.17181007385253907, 0.1715063934326172, 0.1713880615234375, 0.17226634216308595, 0.17299971008300782, 0.17357513427734375, 0.17216677856445312, 0.17032435607910157, 0.17277714538574218, 0.17359698486328126, 0.17373423767089843, 0.17237571716308595, 0.17242489624023438, 0.17248605346679688, 0.17332083129882814, 0.17379971313476564, 0.1732515869140625, 0.17302630615234374, 0.1721750030517578, 0.1730194854736328, 0.17428889465332031, 0.17292288208007814, 0.17330390930175782, 0.17245919799804688, 0.1719385223388672, 0.17332345581054687, 0.1730806121826172, 0.17302793884277343, 0.17293125915527344, 0.1738936309814453, 0.1735925750732422, 0.17398988342285157, 0.17308502197265624, 0.17386691284179687, 0.17336090087890624, 0.17472671508789062, 0.17318547058105468, 0.17415577697753906, 0.17411891174316407, 0.18339814758300782, 0.171156005859375, 0.17198208618164063, 0.17031011962890624, 0.17186611938476562, 0.17667071533203124, 0.17309500122070312, 0.17112086486816405, 0.1704117126464844, 0.17079855346679687, 0.17282928466796876, 0.17576275634765626, 0.1725603485107422, 0.17109674072265624, 0.17124114990234374, 0.1716862030029297, 0.173764892578125, 0.17356361389160158, 0.1734261474609375, 0.17196479797363282, 0.17200682067871093, 0.17286607360839842, 0.17440403747558594, 0.17344284057617188, 0.17232691955566407, 0.17197261047363283, 0.17152204895019532, 0.17380787658691407, 0.173586181640625, 0.17397555541992188, 0.17260365295410157, 0.1713026580810547, 0.1730723876953125, 0.17270182800292969, 0.17265037536621095, 0.17329971313476564, 0.17364157104492187, 0.17218576049804687, 0.17244309997558593, 0.1741923522949219, 0.1739350128173828, 0.17252700805664062, 0.17270271301269532, 0.17262310791015625, 0.17229696655273438, 0.174339111328125, 0.17264906311035155, 0.17233552551269532, 0.17112451171875, 0.17222496032714843, 0.17357183837890625, 0.17446826171875, 0.17273942565917969, 0.1720863037109375, 0.1717884521484375, 0.17247894287109375, 0.17373219299316406, 0.17492985534667968, 0.17321171569824217, 0.1731995849609375, 0.17289193725585938, 0.17460415649414063, 0.17341786193847655, 0.1835221405029297, 0.1702010955810547, 0.17080099487304687, 0.17115335083007813, 0.17137277221679686, 0.17748127746582032, 0.17407955932617186, 0.1716413116455078, 0.17181715393066407, 0.17105123901367186, 0.17231820678710938, 0.17534413146972655, 0.17350198364257813, 0.17169378662109375, 0.17116236877441407, 0.17148931884765625, 0.17276719665527343, 0.17443020629882813, 0.1743953857421875, 0.17137586975097657, 0.17075177001953126, 0.17271609497070312, 0.17337641906738283, 0.17424998474121095, 0.17324832153320313, 0.17139651489257812, 0.17221209716796876, 0.1727804412841797, 0.17430908203125, 0.17395321655273438, 0.17265040588378905, 0.17186834716796875, 0.1722326965332031, 0.17415350341796876, 0.17486006164550782, 0.17386131286621093, 0.17311308288574218, 0.1726650848388672, 0.17331724548339844, 0.1742872314453125, 0.17370777893066405, 0.17301324462890624, 0.17346450805664063, 0.17224739074707032, 0.17319679260253906, 0.17313912963867187, 0.1729954833984375, 0.17319786071777343, 0.1719134063720703, 0.17226908874511718, 0.17374855041503906, 0.17503421020507812, 0.17401084899902344, 0.17349221801757814, 0.17274674987792968, 0.1738079376220703, 0.17379884338378906, 0.1748074188232422, 0.1738648376464844, 0.17301609802246093, 0.17296701049804689, 0.17279945373535155, 0.17417042541503908]",tokens/s,5.788257680473556,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,887.35744,1044.250624,0.0,641.728512,581.889536,s,1,8.7263193359375,8.7263193359375,0.0,8.7263193359375,8.7263193359375,8.7263193359375,8.7263193359375,[8.7263193359375],,kWh,4.407297918335947e-05,4.854433396980365e-06,1.347778856000681e-05,6.240520114034664e-05,,MB,1304.858624,1165.88544,0.0,750.780416,716.975104,s,10,1.153962646484375,0.11539626464843751,0.00046645972954271564,0.11539945602416993,0.1158919548034668,0.11598940010070802,0.11606735633850099,"[0.11483174133300782, 0.11508988952636719, 0.11581670379638671, 0.11576486206054687, 0.11496800231933593, 0.11608684539794922, 0.11570902252197265, 0.11490831756591798, 0.11587030029296876, 0.11491696166992188]",tokens/s,2218.442692056985,kWh,3.4991903885421987e-06,3.857358533403213e-07,2.3202994752852785e-06,6.205225717167798e-06,tokens/kWh,41255550.026445135,MB,1338.51136,1314.783232,0.0,899.678208,716.977664,s,10,30.320919677734384,3.0320919677734377,0.014583514950958752,3.0297841796875,3.0361183349609377,3.0549134155273436,3.0699494799804685,"[3.028439697265625, 3.031941650390625, 3.031885009765625, 3.031259033203125, 3.01861962890625, 3.02945361328125, 3.025375244140625, 3.03011474609375, 3.02012255859375, 3.07370849609375]",tokens/s,20.777733877994116,kWh,8.88030020589508e-05,9.795180323529446e-06,3.454859113091841e-05,0.00013314677351339868,tokens/kWh,473162.0476981386,,s,630,30.315046634674072,0.0481191216423398,0.0006226521015654161,0.04798129653930664,0.0488615608215332,0.049287743949890134,0.05041647171020509,"[0.04924415969848633, 0.04870467376708985, 0.04860566329956055, 0.04856460952758789, 0.04815647888183594, 0.04840288162231445, 0.04949798583984375, 0.04820774459838867, 0.04751116943359375, 0.04749081420898438, 0.04744787216186523, 0.04730716705322266, 0.04749558258056641, 0.047489025115966796, 0.047621505737304684, 0.04787468719482422, 0.04784076690673828, 0.04769126510620117, 0.04768793487548828, 0.0481635856628418, 0.04842617416381836, 0.04823052978515625, 0.048390304565429684, 0.0478438720703125, 0.04751750564575195, 0.0474505615234375, 0.047767105102539065, 0.048441184997558596, 0.04813033676147461, 0.04768569564819336, 0.047898624420166014, 0.04786329650878906, 0.0481080322265625, 0.04799283218383789, 0.04773875045776367, 0.04839641571044922, 0.047982593536376954, 0.04767359924316406, 0.0478061752319336, 0.047475807189941405, 0.04805318450927734, 0.04847203063964844, 0.04928422546386719, 0.04831075286865234, 0.048157024383544925, 0.048207969665527345, 0.048584705352783204, 0.048831905364990234, 0.04944540786743164, 0.04886495971679688, 0.04822975921630859, 0.04784854507446289, 0.04823030471801758, 0.048140289306640625, 0.047632030487060543, 0.047642974853515624, 0.0477836799621582, 0.04785110473632812, 0.04766787338256836, 0.04756060791015625, 0.047661151885986325, 0.047756641387939454, 0.04781913757324219, 0.04871123123168945, 0.048080799102783206, 0.04780495834350586, 0.04955161666870117, 0.04841411209106445, 0.04789078521728515, 0.04771430587768555, 0.04794777679443359, 0.0478023681640625, 0.0476866569519043, 0.04762726211547851, 0.047734527587890624, 0.04812384033203125, 0.04794713592529297, 0.04794182586669922, 0.04794240188598633, 0.04797644805908203, 0.0478289909362793, 0.04769792175292969, 0.04880713653564453, 0.04844240188598633, 0.04799081420898438, 0.0476607666015625, 0.047683582305908204, 0.04900009536743164, 0.048094913482666014, 0.047980190277099606, 0.04755148696899414, 0.04819747161865234, 0.04794611358642578, 0.04834630584716797, 0.04854009628295899, 0.04791462326049805, 0.04771295928955078, 0.0478185920715332, 0.047691070556640625, 0.04782735824584961, 0.05088639831542969, 0.04846031951904297, 0.04796207809448242, 0.04801948928833008, 0.04801955032348633, 0.048008350372314455, 0.04965830230712891, 0.047932960510253905, 0.048519966125488284, 0.04799103927612305, 0.04761996841430664, 0.047492095947265625, 0.047569950103759764, 0.04789184188842773, 0.04757497787475586, 0.04880550384521484, 0.04853833770751953, 0.04950239944458008, 0.04835145568847656, 0.04805814361572266, 0.04815427017211914, 0.04789059066772461, 0.04751174545288086, 0.04767670440673828, 0.04757987213134766, 0.04798204803466797, 0.04779811096191406, 0.047935806274414065, 0.048231422424316404, 0.04765372848510742, 0.047668415069580077, 0.047940414428710935, 0.04764057540893555, 0.04765670394897461, 0.04777804946899414, 0.04777321624755859, 0.047833568572998045, 0.04751871871948242, 0.04753919982910156, 0.049668128967285154, 0.0478853759765625, 0.049070175170898435, 0.04895129776000977, 0.048007999420166016, 0.047820510864257815, 0.04878483200073242, 0.04835414505004883, 0.048748542785644534, 0.04849488067626953, 0.04782233428955078, 0.0476899528503418, 0.047726593017578124, 0.05049244689941406, 0.048069503784179686, 0.048429153442382813, 0.047570945739746094, 0.047701568603515626, 0.04761849594116211, 0.048250335693359375, 0.04831628799438477, 0.048263839721679684, 0.04824883270263672, 0.048043838500976564, 0.04799916839599609, 0.047871200561523435, 0.04816928100585938, 0.04856675338745117, 0.04796185684204102, 0.048045921325683597, 0.04846163177490234, 0.04806671905517578, 0.0476860466003418, 0.047659072875976566, 0.04817878341674805, 0.04796044921875, 0.04818694305419922, 0.04828531265258789, 0.04806943893432617, 0.04789788818359375, 0.047880126953125, 0.04765776062011719, 0.0484119987487793, 0.04755945587158203, 0.047703937530517576, 0.04755606460571289, 0.048759326934814454, 0.04789657592773437, 0.04885504150390625, 0.04886032104492188, 0.04884444808959961, 0.04801087951660156, 0.0480008316040039, 0.04848505783081054, 0.04865251159667969, 0.048691200256347655, 0.04839833450317383, 0.04848838424682617, 0.04945721435546875, 0.04816896057128906, 0.047900543212890626, 0.04827353668212891, 0.04814847946166992, 0.04772643280029297, 0.04772259140014649, 0.047941631317138675, 0.047814720153808596, 0.04766310501098633, 0.04871500778198242, 0.048543968200683595, 0.04800131225585937, 0.04780838394165039, 0.04779363250732422, 0.0480285758972168, 0.04792649459838867, 0.04771456146240234, 0.04774895858764648, 0.047786689758300784, 0.048097278594970705, 0.04785385513305664, 0.04781027221679687, 0.048089088439941405, 0.04804729461669922, 0.048255809783935545, 0.04794777679443359, 0.048295551300048825, 0.048005504608154295, 0.048105472564697264, 0.04799081420898438, 0.04875443267822266, 0.04840784072875977, 0.04811257553100586, 0.04798278427124023, 0.047793792724609374, 0.04828998565673828, 0.04816006469726562, 0.047819454193115236, 0.04793958282470703, 0.047724704742431644, 0.047816257476806644, 0.04782640075683594, 0.04781343841552734, 0.04841471862792969, 0.04796960067749023, 0.04788294219970703, 0.04798054504394531, 0.04800307083129883, 0.047851520538330077, 0.04811980819702148, 0.04788598251342773, 0.048285728454589845, 0.048537311553955076, 0.04838256072998047, 0.04755251312255859, 0.04759078216552735, 0.047693824768066405, 0.04744905471801758, 0.047519390106201174, 0.04745830535888672, 0.047683135986328125, 0.04760416030883789, 0.04750851058959961, 0.04745273590087891, 0.04783555221557617, 0.04762217712402344, 0.048330848693847656, 0.04887750244140625, 0.04845795059204101, 0.04909791946411133, 0.048667102813720706, 0.04880710220336914, 0.048425857543945315, 0.048324638366699216, 0.048637374877929684, 0.048614177703857425, 0.04810521697998047, 0.04792729568481445, 0.04760371017456055, 0.04751564788818359, 0.047505409240722656, 0.04755046463012695, 0.04777391815185547, 0.04746451187133789, 0.04755222320556641, 0.047437824249267575, 0.04745638275146485, 0.047529281616210936, 0.04745388793945313, 0.04773926544189453, 0.047775264739990234, 0.04738256072998047, 0.047752128601074216, 0.047967647552490236, 0.047626049041748046, 0.04768783950805664, 0.04760591888427734, 0.047505889892578125, 0.04740095901489258, 0.047388671875, 0.04746956634521484, 0.04747776031494141, 0.04781014251708984, 0.05017436981201172, 0.04794963073730469, 0.047599807739257816, 0.04756387329101563, 0.04747766494750977, 0.047806049346923826, 0.05046031951904297, 0.04776937484741211, 0.0483645133972168, 0.04873337554931641, 0.048057151794433595, 0.04783443069458008, 0.048081214904785154, 0.04753635025024414, 0.04872403335571289, 0.05088275146484375, 0.04841036987304687, 0.0476607666015625, 0.047839519500732425, 0.04743926239013672, 0.047438079833984376, 0.04730915069580078, 0.04726169586181641, 0.047339519500732424, 0.04733747100830078, 0.048149665832519534, 0.04748953628540039, 0.04753238296508789, 0.04792127990722656, 0.048121726989746094, 0.04878518295288086, 0.048074977874755856, 0.04857040023803711, 0.048656158447265625, 0.0488656005859375, 0.04912521743774414, 0.048646430969238284, 0.04893468856811523, 0.0487567024230957, 0.048578399658203125, 0.04822428894042969, 0.04793967819213867, 0.04843727874755859, 0.04800921630859375, 0.04785718536376953, 0.04769740676879883, 0.04790476989746094, 0.048300926208496096, 0.047723617553710934, 0.04825347137451172, 0.0480252799987793, 0.04806943893432617, 0.04806447982788086, 0.048091136932373046, 0.04792115020751953, 0.04776345443725586, 0.047695873260498046, 0.04773897552490235, 0.04831785583496094, 0.048361824035644534, 0.04840259170532227, 0.04801852798461914, 0.047593952178955075, 0.047970367431640626, 0.04763071823120117, 0.04761929702758789, 0.04756150436401367, 0.04765491104125977, 0.047892223358154296, 0.047982078552246094, 0.047823616027832035, 0.04802560043334961, 0.048162815093994144, 0.048121856689453124, 0.048113662719726565, 0.04784332656860352, 0.04821129608154297, 0.04737046432495117, 0.048184928894042967, 0.04766534423828125, 0.0477154541015625, 0.04801811218261719, 0.04757727813720703, 0.04743993759155273, 0.04761372756958008, 0.04761411285400391, 0.04799628829956055, 0.0479422721862793, 0.04920284652709961, 0.04917203140258789, 0.04804278564453125, 0.04784560012817383, 0.048740127563476565, 0.04909465789794922, 0.04835855865478516, 0.0484299201965332, 0.05012275314331055, 0.04955356979370117, 0.048253921508789065, 0.04805923080444336, 0.04863593673706055, 0.04833599853515625, 0.04840480041503906, 0.04842079925537109, 0.04797020721435547, 0.04781129455566406, 0.047400894165039065, 0.0476693115234375, 0.04761923217773437, 0.04756937789916992, 0.04835123062133789, 0.048039936065673826, 0.04741772842407226, 0.04750131225585937, 0.047874305725097654, 0.04878720092773438, 0.04825094223022461, 0.04811974334716797, 0.04782620620727539, 0.047624095916748044, 0.04764723205566406, 0.04767366409301758, 0.0482973747253418, 0.04763299179077148, 0.04813584136962891, 0.04750473785400391, 0.047408126831054685, 0.04751516723632813, 0.04803427124023438, 0.0474521598815918, 0.047470592498779295, 0.047898624420166014, 0.04780364990234375, 0.047919776916503905, 0.04768982315063477, 0.048100383758544925, 0.048005599975585934, 0.047806976318359375, 0.04765491104125977, 0.047562751770019535, 0.047479137420654294, 0.04746345520019531, 0.04753414535522461, 0.0475513916015625, 0.04762623977661133, 0.04735171127319336, 0.047556705474853515, 0.04769164657592773, 0.047460159301757815, 0.04742915344238281, 0.04781740951538086, 0.04760601425170898, 0.04780835342407227, 0.047589054107666014, 0.047946048736572267, 0.048353279113769534, 0.04832422256469727, 0.047901344299316403, 0.047976158142089845, 0.048011489868164066, 0.048113441467285155, 0.048244255065917965, 0.04975199890136719, 0.04904745483398437, 0.04921744155883789, 0.04926537704467773, 0.048500545501708986, 0.04856406402587891, 0.04948806381225586, 0.04822793579101563, 0.04818387222290039, 0.04819302368164063, 0.0486036491394043, 0.04816617584228516, 0.04786374282836914, 0.04782380676269531, 0.04810531234741211, 0.04828483200073242, 0.04789673614501953, 0.04856614303588867, 0.04936787033081055, 0.04817932891845703, 0.048000896453857425, 0.047830944061279294, 0.04806991958618164, 0.048147262573242186, 0.04800630569458008, 0.04824124908447266, 0.048046337127685544, 0.047698944091796876, 0.04782211303710938, 0.04758499145507813, 0.04755843353271484, 0.04759574508666992, 0.04760371017456055, 0.047398433685302735, 0.0475118408203125, 0.04760307312011719, 0.04812268829345703, 0.04775872039794922, 0.048466560363769534, 0.05030912017822266, 0.04803724670410156, 0.0473106575012207, 0.04879363250732422, 0.04762486267089844, 0.047725982666015625, 0.047776351928710936, 0.04750771331787109, 0.0473515510559082, 0.047523105621337894, 0.047676128387451173, 0.04759747314453125, 0.047672481536865235, 0.047356864929199216, 0.048680286407470706, 0.050823585510253906, 0.048736286163330075, 0.04836070251464844, 0.04843414306640625, 0.0478392333984375, 0.047957664489746095, 0.04777801513671875, 0.04820348739624023, 0.04771881484985351, 0.04772991943359375, 0.04764339065551758, 0.04764057540893555, 0.04784860610961914, 0.04772313690185547, 0.047285472869873044, 0.0476431999206543, 0.04772723388671875, 0.047640384674072264, 0.04755660629272461, 0.04748287963867188, 0.04726998519897461, 0.04757494354248047, 0.04751513671875, 0.04772313690185547, 0.04747622299194336, 0.04754265594482422, 0.04734799957275391, 0.048123615264892575, 0.04813619232177734, 0.0476030387878418, 0.04783580780029297, 0.048242496490478515, 0.04832275390625, 0.04817916870117187, 0.047890464782714845, 0.047642623901367184, 0.0478474235534668, 0.04809334564208984, 0.04790195083618164, 0.04815087890625, 0.04786819076538086, 0.04789478302001953, 0.04789744186401367, 0.04846681594848633, 0.04897792053222656, 0.048531135559082034, 0.048083263397216795, 0.04800102233886719, 0.04821158218383789, 0.04873868942260742, 0.048103584289550784, 0.04853366470336914, 0.04827888107299805, 0.048228702545166015, 0.0483164176940918, 0.048271358489990236, 0.04851244735717773, 0.048415264129638674, 0.0483326416015625, 0.04844972610473633, 0.04847183990478516, 0.04843132781982422, 0.049227134704589844, 0.04914432144165039, 0.048806015014648436, 0.0487092170715332, 0.04904816055297852, 0.048143840789794924, 0.04830243301391601, 0.04845091247558594, 0.04885129547119141, 0.0490022087097168, 0.04864470291137695, 0.04887142562866211, 0.05209702301025391, 0.04877107238769531, 0.048939231872558595, 0.049304737091064456, 0.049203201293945314, 0.049175167083740236, 0.0490332145690918, 0.05225471878051758, 0.04895123291015625, 0.048244991302490235, 0.049166145324707033, 0.049559169769287106, 0.04953535842895508, 0.049544448852539065, 0.049500926971435544, 0.04929062271118164, 0.049363327026367185, 0.05016166305541992, 0.049411937713623046, 0.04929753494262695, 0.04918636703491211, 0.049259231567382815, 0.049334270477294925, 0.0488611831665039, 0.048318462371826174, 0.04810355377197266, 0.048097152709960934, 0.04807884979248047, 0.04795391845703125, 0.04783705520629883, 0.04778166580200195, 0.048033760070800784, 0.048434814453125, 0.04808377456665039, 0.04788422393798828, 0.047636478424072266, 0.047685630798339845, 0.048051551818847654, 0.04817782211303711]",tokens/s,20.781759223138113,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 344.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 302.12 MiB is free. Process 158712 has 14.44 GiB memory in use. Of the allocated memory 14.20 GiB is allocated by PyTorch, and 132.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 58.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 54.12 MiB is free. Process 153724 has 14.69 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 203.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,825.176064,8535.277568,0.0,8132.755456,7824.681472,s,1,19.355048828125,19.355048828125,0.0,19.355048828125,19.355048828125,19.355048828125,19.355048828125,[19.355048828125],,kWh,0.000357421612099976,3.941356905418048e-05,0.00011472342511199829,0.0005115586062661547,,MB,1362.427904,9539.813376,0.0,9124.708352,8500.632064,s,10,17.740904541015624,1.7740904541015623,0.008706226093104712,1.7761414184570312,1.7815432250976564,1.7821738952636719,1.7826784313964845,"[1.7506591796875, 1.770615234375, 1.7712784423828125, 1.774149658203125, 1.7746822509765625, 1.77799755859375, 1.7776005859375, 1.7797139892578124, 1.7828045654296876, 1.781403076171875]",tokens/s,144.2992939892932,kWh,5.1742666187920645e-05,5.706854829461947e-06,3.4431388656198936e-05,9.188090967358153e-05,tokens/kWh,2786215.3401557747,MB,1399.656448,9544.00768,0.0,9126.805504,8500.634624,s,10,83.37062890625,8.337062890625,0.018678424742809827,8.340683105468749,8.35447490234375,8.354972802734375,8.355371123046876,"[8.294720703125, 8.3147724609375, 8.33323828125, 8.333140625, 8.334890625, 8.3464755859375, 8.3532841796875, 8.3543642578125, 8.350271484375, 8.355470703125]",tokens/s,7.556618059202035,kWh,0.00024398087841749578,2.6913000317783666e-05,0.00016191376841980242,0.00043280764715508186,tokens/kWh,145561.1988700054,,s,630,83.36781452941898,0.13232986433241103,0.0018008641998273776,0.132398681640625,0.1334996597290039,0.1338594841003418,0.14334174850463868,"[0.14481919860839843, 0.13183203125, 0.13035484313964843, 0.13028848266601561, 0.13021385192871093, 0.1302890167236328, 0.13022682189941406, 0.13251763916015624, 0.1320978240966797, 0.13074070739746094, 0.13034495544433594, 0.13034214782714842, 0.13036416625976563, 0.13039599609375, 0.13181149291992186, 0.13171452331542968, 0.13223939514160157, 0.13073373413085937, 0.13047042846679688, 0.13036976623535157, 0.13046588134765624, 0.13146316528320312, 0.1329747772216797, 0.13314186096191405, 0.1311072998046875, 0.1306929931640625, 0.13060957336425782, 0.13063343811035155, 0.1307667236328125, 0.13167015075683594, 0.13287628173828125, 0.13268992614746095, 0.1311270751953125, 0.13076502990722655, 0.13065420532226563, 0.13071286010742186, 0.13123606872558594, 0.132789794921875, 0.13278717041015625, 0.13275712585449218, 0.13237901306152344, 0.1311436767578125, 0.1308037109375, 0.13079756164550782, 0.13156338500976564, 0.1324701385498047, 0.13255552673339843, 0.1323489532470703, 0.1310866241455078, 0.13267628479003907, 0.13136224365234375, 0.13083705139160157, 0.1315392303466797, 0.13240031433105467, 0.1327222137451172, 0.13117543029785156, 0.13212672424316407, 0.13261415100097657, 0.1312027587890625, 0.13106434631347658, 0.13149366760253905, 0.1326878662109375, 0.13134025573730468, 0.14261453247070313, 0.13204074096679688, 0.13062696838378907, 0.13035891723632811, 0.13025173950195312, 0.13035282897949219, 0.13036166381835937, 0.13219020080566407, 0.13318553161621094, 0.13207347106933592, 0.13052281188964843, 0.13042105102539062, 0.1304239044189453, 0.13115676879882812, 0.130648193359375, 0.13253439331054687, 0.13253004455566406, 0.132495361328125, 0.13174732971191405, 0.130861572265625, 0.1305006103515625, 0.1311846466064453, 0.13174745178222655, 0.1330873260498047, 0.1328926696777344, 0.1320880889892578, 0.131557373046875, 0.13202348327636718, 0.1306337890625, 0.13061570739746095, 0.13254876708984376, 0.13292294311523437, 0.13267750549316407, 0.1319058837890625, 0.13119679260253905, 0.13080426025390626, 0.13111465454101562, 0.13256297302246095, 0.13212255859375, 0.13303176879882814, 0.13228419494628907, 0.13244288635253906, 0.1311928253173828, 0.13123379516601563, 0.13253330993652343, 0.13170375061035156, 0.13366886901855468, 0.13290701293945312, 0.13170072937011718, 0.1315752716064453, 0.1314343719482422, 0.13278880310058594, 0.13131085205078125, 0.13277679443359375, 0.13312205505371094, 0.132710205078125, 0.13129344177246094, 0.13107574462890625, 0.13160791015625, 0.1327277374267578, 0.1313278045654297, 0.1329189453125, 0.133555908203125, 0.1440358428955078, 0.13177778625488282, 0.130403076171875, 0.13034291076660157, 0.13029478454589843, 0.13028582763671875, 0.13075942993164064, 0.13374642944335938, 0.13345928955078126, 0.13242051696777343, 0.1309450225830078, 0.13046170043945313, 0.13038584899902345, 0.130418212890625, 0.133093017578125, 0.1321355895996094, 0.1328450927734375, 0.13297232055664063, 0.13147433471679687, 0.13058047485351562, 0.13047398376464844, 0.13190757751464843, 0.1331793975830078, 0.13338021850585938, 0.13299848937988282, 0.13253237915039062, 0.13205337524414062, 0.13074771118164064, 0.13115461730957031, 0.1325322265625, 0.13195797729492187, 0.13312643432617188, 0.13273344421386718, 0.13229884338378906, 0.13083177185058595, 0.13130607604980468, 0.13257081604003906, 0.13241981506347655, 0.1325240936279297, 0.13199559020996093, 0.13318553161621094, 0.1319710693359375, 0.13196697998046875, 0.1323970489501953, 0.13190757751464843, 0.13334323120117186, 0.13215461730957032, 0.13297311401367187, 0.13138691711425782, 0.13157398986816407, 0.1334291229248047, 0.1321922607421875, 0.13272543334960937, 0.13210202026367188, 0.1327389373779297, 0.13337408447265625, 0.13213603210449218, 0.13184912109375, 0.13263258361816407, 0.13255795288085936, 0.13331546020507812, 0.1322079620361328, 0.13329679870605468, 0.14353855895996093, 0.13185565185546874, 0.13040211486816405, 0.1302349090576172, 0.13026345825195312, 0.13043096923828126, 0.1306378173828125, 0.13441261291503906, 0.1324397430419922, 0.13136802673339842, 0.13200271606445313, 0.13061734008789064, 0.13037930297851563, 0.13051747131347657, 0.1333387145996094, 0.13267808532714845, 0.13269804382324218, 0.13187893676757811, 0.13093843078613282, 0.13194216918945312, 0.13078099060058593, 0.13164627075195312, 0.13247286987304688, 0.13285580444335937, 0.13196444702148438, 0.13237837219238283, 0.13172808837890626, 0.13232333374023436, 0.13102694702148437, 0.13251171875, 0.1326790771484375, 0.13268553161621094, 0.13198019409179687, 0.1323225860595703, 0.13160450744628907, 0.1322236785888672, 0.13190931701660155, 0.1331838073730469, 0.13266738891601562, 0.13291427612304688, 0.13123043823242186, 0.13266249084472656, 0.13146826171875, 0.13243807983398437, 0.132837158203125, 0.13228662109375, 0.13244825744628907, 0.13255885314941407, 0.13247283935546875, 0.13221478271484374, 0.13260418701171875, 0.13266102600097657, 0.13224748229980468, 0.1326796875, 0.13247897338867187, 0.13312205505371094, 0.1325875244140625, 0.1324400634765625, 0.13224453735351563, 0.1327687072753906, 0.1313420867919922, 0.13324029541015625, 0.13341981506347655, 0.14375071716308593, 0.131745849609375, 0.13029986572265626, 0.13022694396972656, 0.13069424438476562, 0.1302864990234375, 0.1308734130859375, 0.13475013732910157, 0.13320124816894532, 0.13155091857910156, 0.130396484375, 0.13034153747558594, 0.1304015350341797, 0.131046142578125, 0.13290447998046875, 0.1334639434814453, 0.13318328857421874, 0.13195138549804689, 0.13054754638671875, 0.13071990966796876, 0.13089955139160156, 0.13243801879882813, 0.1333784637451172, 0.13364134216308593, 0.13259190368652343, 0.13123440551757812, 0.1306419219970703, 0.13058026123046876, 0.1315919647216797, 0.13288479614257812, 0.1339412841796875, 0.13383485412597657, 0.13249317932128907, 0.131004638671875, 0.13064553833007814, 0.1308673858642578, 0.13256924438476564, 0.132589599609375, 0.134217529296875, 0.13354742431640626, 0.13216761779785155, 0.13082095336914062, 0.13067263793945313, 0.13148159790039063, 0.13284352111816405, 0.1340436553955078, 0.1339330596923828, 0.13243951416015626, 0.13126710510253906, 0.13100236511230468, 0.13315583801269532, 0.1316812744140625, 0.13301951599121092, 0.13399977111816405, 0.13328688049316406, 0.132421630859375, 0.13281893920898438, 0.13147544860839844, 0.13127433776855468, 0.13291529846191405, 0.133808349609375, 0.13252400207519532, 0.1335784912109375, 0.1441458282470703, 0.1318651885986328, 0.1305128936767578, 0.13071974182128906, 0.1303442840576172, 0.13047261047363282, 0.1316864013671875, 0.13605990600585938, 0.13276176452636718, 0.13115478515625, 0.13044876098632813, 0.13052359008789063, 0.1315569305419922, 0.1320597686767578, 0.1333719024658203, 0.13352326965332031, 0.13335494995117186, 0.1318943634033203, 0.130999267578125, 0.13095989990234375, 0.13105113220214842, 0.1337533721923828, 0.1331403503417969, 0.133378173828125, 0.13327769470214842, 0.13184819030761719, 0.13080166625976564, 0.13058216857910157, 0.13221270751953124, 0.13344309997558593, 0.13348077392578125, 0.13270252990722656, 0.13190969848632814, 0.13205279541015624, 0.13058493041992186, 0.13163880920410156, 0.13328022766113282, 0.13267575073242188, 0.13393898010253907, 0.13252204895019531, 0.13237657165527345, 0.13114169311523438, 0.13141191101074218, 0.13252198791503905, 0.13313221740722656, 0.13351651000976564, 0.1333830108642578, 0.13282322692871093, 0.13256617736816406, 0.1310847930908203, 0.13184159851074218, 0.13242556762695312, 0.133421630859375, 0.133714111328125, 0.13268988037109375, 0.13228402709960937, 0.13258384704589843, 0.13146258544921874, 0.13188359069824218, 0.13309327697753906, 0.1342710723876953, 0.1337012176513672, 0.1320800323486328, 0.14409318542480468, 0.13179017639160157, 0.130900634765625, 0.1304226531982422, 0.13033279418945312, 0.130844482421875, 0.13195896911621094, 0.1355404510498047, 0.1340429382324219, 0.1324349060058594, 0.1311129608154297, 0.1324073028564453, 0.13090815734863281, 0.13045555114746094, 0.13303366088867188, 0.13303021240234375, 0.13357437133789063, 0.13246287536621093, 0.13126559448242187, 0.13123004150390624, 0.13226194763183594, 0.131733154296875, 0.13336213684082032, 0.13290684509277345, 0.1335465545654297, 0.13193116760253906, 0.13067362976074218, 0.13252323913574218, 0.13232412719726563, 0.13258956909179687, 0.1325875244140625, 0.1326366424560547, 0.13308522033691406, 0.1322388153076172, 0.13105372619628905, 0.13253251647949219, 0.1317928924560547, 0.13326960754394532, 0.1327960662841797, 0.13261785888671876, 0.1319881896972656, 0.1327860107421875, 0.13305830383300782, 0.13140419006347656, 0.13260386657714843, 0.13275135803222657, 0.13258685302734374, 0.13331484985351563, 0.13295225524902343, 0.13344111633300781, 0.13205702209472656, 0.1326537628173828, 0.1323520050048828, 0.1325280303955078, 0.13367884826660156, 0.1328438720703125, 0.13287423706054688, 0.13226803588867186, 0.13336781311035156, 0.13225369262695313, 0.13277503967285156, 0.13271267700195313, 0.1334805145263672, 0.14332470703125, 0.13226646423339844, 0.13045298767089844, 0.13033625793457032, 0.13026611328125, 0.13026847839355468, 0.13154170227050782, 0.13490109252929688, 0.13394192504882813, 0.13215948486328125, 0.1310323181152344, 0.13043283081054688, 0.13052598571777344, 0.1311868438720703, 0.13342320251464843, 0.13351925659179686, 0.13354803466796875, 0.1322677459716797, 0.13192630004882813, 0.1306516876220703, 0.13124176025390624, 0.13301011657714842, 0.13379107666015624, 0.1337960662841797, 0.1325015106201172, 0.13270060729980468, 0.13125369262695313, 0.13086965942382814, 0.1322960662841797, 0.13368960571289062, 0.1327208709716797, 0.13265951538085938, 0.13352864074707033, 0.13258848571777343, 0.13104537963867188, 0.13188096618652342, 0.13348658752441406, 0.13327894592285155, 0.1328770294189453, 0.13345184326171874, 0.13323043823242187, 0.13160873413085938, 0.13168447875976563, 0.13323455810546875, 0.13259365844726562, 0.13265715026855468, 0.13347021484375, 0.1324419860839844, 0.13294534301757813, 0.13284556579589843, 0.13263296508789063, 0.13251820373535156, 0.1324541778564453, 0.13365382385253907, 0.13313526916503907, 0.1322904968261719, 0.1328661193847656, 0.13228851318359375, 0.13303094482421876, 0.13260694885253907, 0.13266943359375, 0.13404261779785157, 0.13257420349121093, 0.14285157775878907, 0.13190354919433595, 0.13045826721191406, 0.13031184387207032, 0.13021586608886718, 0.1303233642578125, 0.13216128540039063, 0.13490115356445312, 0.1339132843017578, 0.13231849670410156, 0.13092086791992188, 0.13066671752929687, 0.1303900146484375, 0.13136441040039062, 0.13384544372558593, 0.1337689208984375, 0.1333445739746094, 0.13176316833496093, 0.13090797424316405, 0.13046173095703126, 0.13111036682128907, 0.13374461364746093, 0.13283120727539063, 0.1333358154296875, 0.1328302459716797, 0.13141091918945313, 0.1305575714111328, 0.13117478942871094, 0.13311727905273438, 0.13298112487792968, 0.13306466674804687, 0.13256285095214843, 0.1326739501953125, 0.13302784729003905, 0.1312788543701172, 0.13151437377929687, 0.13221417236328126, 0.1331881561279297, 0.13291098022460937, 0.1324893798828125, 0.1328476104736328, 0.1330155487060547, 0.13228440856933593, 0.132071044921875, 0.1329256591796875, 0.1329701385498047, 0.13301737976074218, 0.13273980712890626, 0.13288406372070313, 0.1326508483886719, 0.13270201110839844, 0.13262448120117187, 0.13331248474121093, 0.13233126831054687, 0.13349778747558594, 0.13302784729003905, 0.1327941131591797, 0.13302195739746095, 0.13347430419921874, 0.1321448974609375, 0.133242431640625, 0.13285574340820314, 0.13273500061035157, 0.14334870910644532, 0.13180038452148438, 0.13031698608398437, 0.130322265625, 0.13017868041992187, 0.13030387878417968, 0.13165779113769532, 0.13593020629882813, 0.133791748046875, 0.13197747802734375, 0.1307269744873047, 0.1302947235107422, 0.13038796997070312, 0.13117666625976562, 0.13422569274902343, 0.1345797119140625, 0.13273458862304688, 0.13209481811523438, 0.1319239959716797, 0.13053543090820313, 0.1308847961425781, 0.13306553649902345, 0.13397811889648437, 0.13354803466796875, 0.13165977478027344, 0.13258956909179687, 0.13165951538085938, 0.13089190673828124, 0.1321985321044922, 0.13368634033203125, 0.13308615112304686, 0.13277162170410156, 0.13204092407226561, 0.13232102966308593, 0.13114393615722655, 0.13254197692871095, 0.13303855895996095, 0.1338709716796875, 0.13308787536621094, 0.1326796875, 0.13291651916503905, 0.13284835815429688, 0.1322434539794922, 0.13258656311035155, 0.1330410919189453, 0.1337507781982422, 0.13278207397460937, 0.13288447570800782, 0.13254579162597657, 0.1328053436279297, 0.1331682891845703, 0.13324159240722655, 0.13300953674316407, 0.13324662780761717, 0.13241993713378905, 0.1325875244140625, 0.13271449279785155, 0.13342105102539062, 0.13261993408203124, 0.1326266632080078, 0.13245404052734375, 0.13247731018066405, 0.13370991516113281]",tokens/s,7.556873159697436,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,2196.226048,7355.695104,0.0,6960.447488,6722.822144,s,1,15.48496875,15.48496875,0.0,15.48496875,15.48496875,15.48496875,15.48496875,[15.48496875],,kWh,0.00024491887688329823,2.700919066494004e-05,9.056562800799361e-05,0.0003624936955562319,,MB,1687.216128,7921.926144,0.0,7514.095616,7161.534464,s,10,10.654595092773437,1.0654595092773438,0.006423383328901411,1.06682958984375,1.0714741821289062,1.073470672607422,1.0750678649902345,"[1.05067626953125, 1.0610797119140625, 1.0617020263671875, 1.0637078857421876, 1.0675457763671874, 1.06951171875, 1.0677606201171874, 1.0661134033203126, 1.0754671630859376, 1.071030517578125]",tokens/s,240.2719181450959,kWh,3.107582351999781e-05,3.425319836929779e-06,2.056862756600053e-05,5.506977092292811e-05,tokens/kWh,4648648.3547984995,MB,1691.369472,7924.023296,0.0,7516.192768,7161.537024,s,10,51.312615722656254,5.131261572265625,0.02000570967956672,5.1339006347656255,5.1546525390625,5.1548863281249995,5.155073359375,"[5.08705126953125, 5.113611328125, 5.11806201171875, 5.12836181640625, 5.12734521484375, 5.139439453125, 5.1427421875, 5.14628173828125, 5.1546005859375, 5.1551201171875]",tokens/s,12.277682420345485,kWh,0.00015117442855958185,1.66774975152408e-05,0.00010051808041439957,0.0002683700064892223,tokens/kWh,234750.52530704497,,s,630,51.30943235778806,0.08144354342506047,0.0018556501328159932,0.08116019439697265,0.08238155364990235,0.08316942100524902,0.09381185348510744,"[0.09235491180419922, 0.07987139129638672, 0.07895305633544922, 0.0793702392578125, 0.07965609741210937, 0.07962505340576172, 0.08048127746582032, 0.08027760314941407, 0.08023337554931641, 0.08075183868408203, 0.08094796752929688, 0.08150627136230469, 0.08381804656982422, 0.08080230712890625, 0.08005366516113281, 0.07967327880859375, 0.08007746887207032, 0.0799683837890625, 0.08066035461425781, 0.07986790466308594, 0.07976134490966796, 0.0806052474975586, 0.08042192077636719, 0.0819163818359375, 0.08121395111083984, 0.080932861328125, 0.08070867156982423, 0.08046031951904296, 0.07979161834716797, 0.08015267181396485, 0.07988835144042969, 0.07966191864013672, 0.08017305755615234, 0.07999897766113281, 0.08042700958251953, 0.08049785614013671, 0.0828070068359375, 0.0808446044921875, 0.08102521514892579, 0.08054844665527344, 0.08053350067138672, 0.08042291259765624, 0.08017305755615234, 0.08003584289550782, 0.08043462371826172, 0.0800118408203125, 0.07991085052490235, 0.08071379089355468, 0.08134182739257813, 0.08108048248291015, 0.08065071868896484, 0.08116838073730469, 0.08102285003662109, 0.08108863830566407, 0.08072566223144531, 0.08020003509521484, 0.08060066986083984, 0.08054134368896484, 0.08051789093017578, 0.08116429138183594, 0.08105574035644532, 0.08100863647460937, 0.08156364440917968, 0.09391452789306641, 0.08084342193603515, 0.08176016235351563, 0.08120652770996094, 0.08080470275878907, 0.08087551879882812, 0.0814571533203125, 0.08086729431152344, 0.08091785430908204, 0.08091923522949218, 0.08113152313232422, 0.08108771514892578, 0.0823897933959961, 0.08096768188476562, 0.08032611083984376, 0.0803476791381836, 0.08047206115722656, 0.08028160095214844, 0.0808686752319336, 0.08029840087890625, 0.0799315185546875, 0.08054595184326171, 0.08061488342285156, 0.08148226928710937, 0.0811171875, 0.08043929290771484, 0.08063926696777343, 0.08138758087158203, 0.08060380554199219, 0.08026316833496094, 0.08073983764648437, 0.08050265502929688, 0.07991753387451171, 0.08049833679199218, 0.08124591827392579, 0.08107087707519531, 0.08148377227783203, 0.08137856292724609, 0.08154390716552734, 0.08059241485595703, 0.08011971282958984, 0.08076758575439454, 0.08063906860351562, 0.08079043579101562, 0.08068505859375, 0.0810040283203125, 0.080961181640625, 0.08051388549804687, 0.08084230041503906, 0.0816409912109375, 0.0813636474609375, 0.08124444580078125, 0.08128915405273437, 0.08102662658691406, 0.08159241485595703, 0.0823193588256836, 0.08141859436035156, 0.08181145477294922, 0.0808938217163086, 0.08155919647216797, 0.08099273681640624, 0.0807383041381836, 0.08136089324951172, 0.09320713806152343, 0.07985715484619141, 0.07958758544921875, 0.08017091369628906, 0.07982902526855469, 0.08019999694824219, 0.07990262603759765, 0.07976048278808594, 0.079801025390625, 0.07980063629150391, 0.07969586944580079, 0.08378777313232422, 0.0831827163696289, 0.08123481750488282, 0.08051302337646485, 0.08053350067138672, 0.080648193359375, 0.08057609558105469, 0.07981712341308594, 0.07982041931152344, 0.07990924835205078, 0.0805212173461914, 0.08060723114013672, 0.08210431671142578, 0.08249459075927734, 0.08157679748535156, 0.08113549041748047, 0.08078966522216798, 0.08125234985351562, 0.08121753692626953, 0.08198963165283203, 0.08212480163574219, 0.08148786926269531, 0.08151039886474609, 0.08091958618164062, 0.08192098999023438, 0.08170496368408203, 0.08100249481201172, 0.08116806030273438, 0.08077123260498047, 0.0807507553100586, 0.08076652526855468, 0.0812674560546875, 0.08136057281494141, 0.08089750671386718, 0.0807449951171875, 0.08140595245361328, 0.08097792053222656, 0.08132559967041016, 0.08142076873779297, 0.08104959869384766, 0.08076271820068359, 0.08041693115234375, 0.08193228912353516, 0.08146134185791015, 0.08172124481201172, 0.08086457824707032, 0.08126943969726562, 0.08235622406005859, 0.08257917022705077, 0.0814302749633789, 0.08189801788330078, 0.08091840362548829, 0.09483315277099609, 0.0802607650756836, 0.07969305419921875, 0.08019744110107421, 0.08028050994873047, 0.0796275863647461, 0.08017526245117187, 0.07992988586425781, 0.07973887634277343, 0.07993110656738281, 0.08008022308349609, 0.08390956878662109, 0.08330035400390624, 0.08172886657714844, 0.08063657379150391, 0.08060928344726563, 0.0810265884399414, 0.08053193664550781, 0.0805992660522461, 0.08063362884521484, 0.0797655029296875, 0.08036966705322265, 0.08128431701660156, 0.08217884826660156, 0.08286406707763672, 0.08349616241455078, 0.08154198455810546, 0.08159369659423828, 0.08076156616210937, 0.08077101135253906, 0.08083865356445312, 0.08099839782714843, 0.08073625946044923, 0.08087347412109375, 0.08111698913574218, 0.08177446746826172, 0.08227059173583984, 0.08204847717285156, 0.08115452575683593, 0.081491455078125, 0.08113613128662109, 0.08050466918945312, 0.08137315368652344, 0.0806316146850586, 0.08080217742919922, 0.08015382385253907, 0.0813985595703125, 0.0813096923828125, 0.08242160034179688, 0.08296259307861328, 0.08308499145507812, 0.08283372497558594, 0.08118886566162109, 0.08058675384521484, 0.0805212173461914, 0.08064985656738281, 0.08073171234130859, 0.0810047378540039, 0.0818799057006836, 0.08119478607177734, 0.08205516815185547, 0.08150527954101562, 0.08242415618896484, 0.09356047821044922, 0.08068348693847656, 0.08021810913085937, 0.08008700561523438, 0.08062905883789062, 0.08074291229248047, 0.08055420684814453, 0.08056422424316406, 0.08060675048828125, 0.0809816665649414, 0.08050112152099609, 0.08344316864013672, 0.08213565063476562, 0.08126627349853516, 0.08087648010253906, 0.08070441436767578, 0.08064409637451173, 0.08031737518310547, 0.08027545928955078, 0.07996956634521485, 0.07961264038085937, 0.08069529724121094, 0.08142198181152344, 0.08220838165283204, 0.08178880310058594, 0.08203510284423828, 0.08119750213623046, 0.08110867309570312, 0.08079347229003907, 0.08096720123291015, 0.0802557144165039, 0.08094534301757812, 0.08078150177001953, 0.08099606323242188, 0.08107427215576171, 0.08175411224365234, 0.0832573471069336, 0.083174560546875, 0.08139657592773437, 0.08059699249267578, 0.08060873413085938, 0.0810624008178711, 0.08054991912841797, 0.08052652740478515, 0.08082514953613282, 0.08093260955810547, 0.08120140838623047, 0.08225587463378906, 0.08193574523925781, 0.08202713775634765, 0.08122163391113281, 0.08108998107910156, 0.08088816070556641, 0.08138313293457031, 0.0811045150756836, 0.08125689697265626, 0.08086367797851562, 0.08067839813232422, 0.08236083221435547, 0.08255487823486328, 0.08380210876464844, 0.08173494720458985, 0.0813372802734375, 0.09488384246826172, 0.080729248046875, 0.08134127807617188, 0.0817086410522461, 0.08167215728759766, 0.08159056091308593, 0.08081005096435546, 0.08169245147705079, 0.08082611083984376, 0.08109257507324219, 0.08091887664794922, 0.08220902252197265, 0.08184355163574218, 0.08063862609863282, 0.08063385772705078, 0.08067696380615234, 0.08091149139404297, 0.08105244445800781, 0.08089395141601563, 0.08088905334472657, 0.0802291488647461, 0.08082431793212891, 0.08167424011230469, 0.08168447875976563, 0.08140921783447265, 0.0817242202758789, 0.08100249481201172, 0.08105359649658203, 0.08128521728515625, 0.08129312133789063, 0.08180339050292969, 0.08089615631103515, 0.08039823913574219, 0.08133554840087891, 0.08141696166992188, 0.0823193588256836, 0.0836456298828125, 0.08129209899902344, 0.08135475158691406, 0.08128038024902344, 0.08078924560546875, 0.0804727020263672, 0.08146524810791016, 0.0810351333618164, 0.08059133148193359, 0.08068505859375, 0.08194048309326171, 0.08227021026611328, 0.08240946960449219, 0.0814202880859375, 0.08122525024414062, 0.08168019104003907, 0.08144963073730468, 0.0816885757446289, 0.0818116455078125, 0.08280044555664062, 0.08144895935058594, 0.08128704071044922, 0.08167436981201172, 0.08166595458984376, 0.08180540466308593, 0.08148512268066406, 0.08108268737792969, 0.09476316833496094, 0.08143218994140625, 0.08160294342041016, 0.08108441925048829, 0.08126866912841797, 0.08076703643798828, 0.08090419006347656, 0.08093695831298828, 0.08082950592041016, 0.08093177795410156, 0.08079682922363281, 0.08326640319824219, 0.08206130981445313, 0.0817371826171875, 0.08109724426269531, 0.08065023803710937, 0.08085298919677734, 0.08078540802001953, 0.08027519989013672, 0.08078975677490234, 0.08028160095214844, 0.08065744018554688, 0.081544189453125, 0.08228639984130859, 0.08199411010742187, 0.08174784088134765, 0.08135465240478515, 0.08149951934814453, 0.08153766632080078, 0.08162713623046874, 0.08159212493896484, 0.08248038482666016, 0.08195986938476563, 0.08099215698242188, 0.08169071960449219, 0.08118271636962891, 0.08172748565673828, 0.08150851440429688, 0.08103119659423828, 0.08163513946533203, 0.08150198364257813, 0.0809324493408203, 0.08168102264404296, 0.08111433410644531, 0.08079849243164063, 0.08115609741210937, 0.08150857543945313, 0.08147046661376953, 0.08173238372802734, 0.08209212493896484, 0.08126560211181641, 0.0822833251953125, 0.08210006713867188, 0.08304994964599609, 0.08178489685058593, 0.0813177947998047, 0.08090675354003907, 0.08125888061523437, 0.08122496032714843, 0.08129203033447266, 0.08176435089111328, 0.08141619110107422, 0.08160460662841797, 0.09513529968261719, 0.08085327911376954, 0.08183990478515625, 0.08083289337158203, 0.0809574432373047, 0.08075878143310547, 0.08076898956298828, 0.08075267028808594, 0.08087888336181641, 0.08091910552978515, 0.0804947509765625, 0.084170654296875, 0.0823806381225586, 0.08115213012695313, 0.08163750457763672, 0.08101840209960938, 0.080390625, 0.08103472137451172, 0.08077356719970703, 0.08077117156982422, 0.08047539520263672, 0.08079436492919922, 0.08169420623779297, 0.08242963409423829, 0.08302880096435547, 0.0814830093383789, 0.08168319702148437, 0.08316313934326172, 0.08121510314941406, 0.0814513931274414, 0.08138931274414063, 0.08004972839355469, 0.08071206665039063, 0.08049839782714843, 0.08138198089599609, 0.08210368347167969, 0.08237734222412109, 0.08182150268554687, 0.08185699462890625, 0.08162480163574219, 0.0814940185546875, 0.08182307434082031, 0.08066320037841797, 0.08103529357910157, 0.08088162994384765, 0.08131759643554687, 0.08164985656738281, 0.08205052947998047, 0.08426284790039062, 0.08152054595947265, 0.08142108917236328, 0.08176025390625, 0.08141974639892578, 0.08112796783447265, 0.08154447937011719, 0.08094588470458984, 0.08021318054199218, 0.08172736358642578, 0.08171965026855468, 0.08224009704589844, 0.0827883529663086, 0.0815308837890625, 0.08211251068115234, 0.09568163299560548, 0.0809701156616211, 0.08182425689697266, 0.08213916778564453, 0.08151174163818359, 0.08140665435791015, 0.08061666870117187, 0.08102992248535157, 0.08105680084228516, 0.08077206420898438, 0.08072576141357422, 0.084076416015625, 0.08255068969726563, 0.081218017578125, 0.08183602905273438, 0.08143901062011719, 0.08137677001953125, 0.08043746948242188, 0.08093081665039062, 0.08036502075195312, 0.08121548461914062, 0.08071222686767578, 0.08144687652587891, 0.08283926391601562, 0.08157843017578124, 0.0811805419921875, 0.08276604461669922, 0.08362166595458985, 0.08137522888183593, 0.08090160369873046, 0.08142899322509765, 0.08033849334716797, 0.08032713317871094, 0.08071984100341797, 0.0813197784423828, 0.08180307006835938, 0.08183433532714844, 0.08169884490966797, 0.08270880126953126, 0.08196249389648437, 0.08157350158691407, 0.0814044189453125, 0.081295166015625, 0.08123622131347656, 0.08066783905029297, 0.08128377532958984, 0.08312435150146484, 0.08316242980957031, 0.08104188537597656, 0.08162531280517578, 0.08177247619628907, 0.08223446655273438, 0.08147859191894531, 0.08162102508544922, 0.08112947082519531, 0.08078540802001953, 0.08136294555664063, 0.08099635314941406, 0.08213024139404297, 0.08247907257080078, 0.08202889251708985, 0.08332530975341797, 0.08276172637939454, 0.09585167694091797, 0.08175276947021484, 0.08160806274414062, 0.08131273651123047, 0.08087503814697265, 0.08171363067626954, 0.08068595123291016, 0.08080025482177734, 0.08066297912597656, 0.08019967651367188, 0.08018115234375, 0.08466031646728515, 0.08314262390136719, 0.08172752380371094, 0.0814073257446289, 0.08085772705078124, 0.08089615631103515, 0.08079766082763672, 0.08054080200195313, 0.08081078338623048, 0.08069324493408203, 0.08089600372314452, 0.08136495971679687, 0.0836178207397461, 0.08219647979736328, 0.08444518280029296, 0.08162070465087891, 0.08036978912353515, 0.08168019104003907, 0.08072166442871094, 0.08052387237548828, 0.08052915191650391, 0.08041702270507813, 0.0810096664428711, 0.08175446319580078, 0.0819039077758789, 0.08273872375488281, 0.08255315399169921, 0.08181609344482423, 0.08103936004638672, 0.08179724884033203, 0.08125424194335938, 0.0820343017578125, 0.08160912322998047, 0.08236589050292968, 0.08203218841552734, 0.0819183349609375, 0.081168701171875, 0.0827026596069336, 0.08221305847167969, 0.08170579528808594, 0.08157695770263672, 0.08136080169677734, 0.08173782348632813, 0.08131890869140625, 0.08061849975585937, 0.08141366577148437, 0.08200444793701171, 0.0815302734375, 0.08163001251220703, 0.08381417846679687, 0.08320537567138672, 0.08141606140136719]",tokens/s,12.278444158316134,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1557.417984,3332.243456,0.0,2929.721344,2929.394176,s,1,11.1492587890625,11.1492587890625,0.0,11.1492587890625,11.1492587890625,11.1492587890625,11.1492587890625,[11.1492587890625],,kWh,0.0001170931202875181,1.2908974004674324e-05,3.693030732199176e-05,0.0001669324016141842,,MB,1426.997248,4208.852992,0.0,3793.747968,3508.531712,s,10,4.495707641601562,0.44957076416015623,0.00109256033151545,0.45009172058105473,0.4505439056396484,0.45062887115478517,0.45069684356689454,"[0.4495422973632813, 0.4479869079589844, 0.4482954711914062, 0.4476620178222656, 0.4507138366699219, 0.4503055419921875, 0.45003189086914064, 0.45015155029296877, 0.4505250244140625, 0.45049310302734374]",tokens/s,569.4320458720974,kWh,1.3408836084851247e-05,1.4780436732427903e-06,8.875916191636806e-06,2.3762795949730846e-05,tokens/kWh,10773143.048551895,MB,1484.156928,4210.950144,0.0,3793.747968,3508.534272,s,10,26.10001953125,2.610001953125,0.005692428504392662,2.609274169921875,2.617348828125,2.61854501953125,2.61950197265625,"[2.6111435546875, 2.612770263671875, 2.6170830078125, 2.6197412109375, 2.61417822265625, 2.603892822265625, 2.60740478515625, 2.6072626953125, 2.60098388671875, 2.60555908203125]",tokens/s,24.137912971509092,kWh,7.59468876247368e-05,8.37733014598073e-06,4.770057351396134e-05,0.00013202479128467885,tokens/kWh,477183.10619523015,,s,630,26.098160957336443,0.04142565231323242,0.0009248708164361108,0.041253553390502934,0.041583387374877934,0.04194574813842773,0.04783623935699464,"[0.04817532730102539, 0.04278681564331055, 0.04175030517578125, 0.041578495025634765, 0.04139404678344726, 0.04132271957397461, 0.04145276641845703, 0.04117712020874023, 0.041220767974853516, 0.04114140701293945, 0.041231201171875, 0.04120076751708984, 0.04129267120361328, 0.04125689697265625, 0.04145542526245117, 0.041285343170166015, 0.0411341438293457, 0.041270751953125, 0.041164833068847655, 0.04126387023925781, 0.04112374496459961, 0.04123993682861328, 0.04112460708618164, 0.041239040374755856, 0.0411583366394043, 0.041276576995849606, 0.04113100814819336, 0.041298912048339846, 0.04115276718139648, 0.04128134536743164, 0.04120588684082031, 0.04125059127807617, 0.041251743316650394, 0.04129967880249023, 0.041279777526855466, 0.04120985412597656, 0.04144332885742188, 0.041365505218505856, 0.04121721649169922, 0.04129846572875977, 0.0411671371459961, 0.0413675537109375, 0.04118246459960938, 0.04133174514770508, 0.04131916809082031, 0.041159008026123045, 0.04137542343139648, 0.0413111686706543, 0.041234432220458986, 0.041590110778808594, 0.04132636642456055, 0.041200576782226564, 0.041428672790527345, 0.04139471817016602, 0.04120342254638672, 0.0414021110534668, 0.04136166381835937, 0.0412119026184082, 0.04140784072875976, 0.04139865493774414, 0.042016864776611325, 0.041740447998046874, 0.04142697525024414, 0.04806115341186523, 0.0426451530456543, 0.04166041564941406, 0.041575679779052736, 0.04152511978149414, 0.04114681625366211, 0.04126355361938477, 0.04102739334106445, 0.041420257568359375, 0.04103606414794922, 0.04122447967529297, 0.04114419174194336, 0.04134089660644531, 0.04112406539916992, 0.04144252777099609, 0.04138918304443359, 0.041237281799316405, 0.0414332160949707, 0.0414376335144043, 0.041269344329833986, 0.04123884963989258, 0.04136140823364258, 0.04116902542114258, 0.04136511993408203, 0.0413037109375, 0.041235393524169925, 0.04130575942993164, 0.041181182861328124, 0.041375137329101565, 0.041126495361328126, 0.04126310348510742, 0.04114419174194336, 0.04127769470214844, 0.041168769836425784, 0.04219007873535156, 0.04152191925048828, 0.041398273468017575, 0.04122367858886719, 0.04139478302001953, 0.04141865539550781, 0.041211040496826175, 0.04131107330322266, 0.041299968719482424, 0.04130604934692383, 0.041293216705322267, 0.041261856079101565, 0.04135459136962891, 0.04141123199462891, 0.041369537353515624, 0.04129574584960938, 0.041543582916259765, 0.04140867233276367, 0.04155596923828125, 0.041387935638427735, 0.041296993255615234, 0.04149318313598633, 0.04137580871582031, 0.041314430236816406, 0.04137750244140625, 0.04147241592407227, 0.04148633575439453, 0.04131955337524414, 0.041377727508544924, 0.04844073486328125, 0.04266409683227539, 0.041644031524658204, 0.04191891098022461, 0.04142489624023438, 0.04159619140625, 0.041220417022705076, 0.041236896514892575, 0.041215614318847654, 0.04119750213623047, 0.04139667129516601, 0.04113612747192383, 0.04122214508056641, 0.04112774276733398, 0.04127110290527344, 0.041180862426757815, 0.04124127960205078, 0.0410975341796875, 0.04125244903564453, 0.04117103958129883, 0.04147814559936523, 0.041500160217285156, 0.041193790435791015, 0.041328094482421876, 0.041261375427246096, 0.04120560073852539, 0.04133049774169922, 0.04117580795288086, 0.0414035530090332, 0.04134700775146485, 0.04118767929077148, 0.04131078338623047, 0.041181121826171875, 0.041299232482910155, 0.04152195358276367, 0.04122201538085937, 0.04328870391845703, 0.04140851211547852, 0.0414345588684082, 0.0415643196105957, 0.04150518417358398, 0.041248767852783204, 0.04136486434936523, 0.04136127853393555, 0.04124544143676758, 0.0413331184387207, 0.041627262115478514, 0.04135961532592773, 0.041291519165039064, 0.041387840270996096, 0.041377822875976564, 0.041573665618896485, 0.04122102355957031, 0.04171567916870117, 0.04134297561645508, 0.041543678283691404, 0.04218198394775391, 0.0414194221496582, 0.04132815933227539, 0.04151299285888672, 0.04158303833007813, 0.04163174438476563, 0.04144876861572266, 0.04745564651489258, 0.042654144287109376, 0.04179987335205078, 0.04151500701904297, 0.041333919525146486, 0.041380062103271484, 0.041468544006347655, 0.04114227294921875, 0.041355262756347655, 0.04112179183959961, 0.04137363052368164, 0.04132175827026367, 0.04117174530029297, 0.0413221435546875, 0.04141091156005859, 0.041248767852783204, 0.04145356750488281, 0.04137984085083008, 0.041278465270996094, 0.041401344299316405, 0.04147347259521485, 0.041237056732177736, 0.04135456085205078, 0.04134560012817383, 0.04118745422363281, 0.04148188781738281, 0.041380191802978514, 0.04126700973510742, 0.041351360321044923, 0.04137919998168945, 0.04135590362548828, 0.04133212661743164, 0.04142959976196289, 0.04157174301147461, 0.04162774276733398, 0.04158937454223633, 0.041564033508300784, 0.041551551818847655, 0.04160287857055664, 0.041627166748046875, 0.04154467010498047, 0.04155392074584961, 0.041545726776123046, 0.04165577697753906, 0.04157904052734375, 0.04145091247558594, 0.04140297698974609, 0.04147808074951172, 0.041516735076904294, 0.041595008850097655, 0.04159104156494141, 0.041972000122070315, 0.04163759994506836, 0.0415120964050293, 0.041529953002929686, 0.041521537780761716, 0.04159884643554688, 0.04160636901855469, 0.04151363372802734, 0.041541473388671875, 0.04148867034912109, 0.04145286560058594, 0.04193759918212891, 0.047607872009277345, 0.042822719573974606, 0.041667518615722654, 0.041621505737304686, 0.04135068893432617, 0.04122780990600586, 0.042511295318603516, 0.041374752044677734, 0.04131119918823242, 0.041172927856445315, 0.04150601577758789, 0.0414257583618164, 0.041312255859375, 0.041267200469970705, 0.0412940788269043, 0.04112144088745117, 0.04398681640625, 0.04156156921386719, 0.04131711959838867, 0.04127129745483398, 0.041250816345214845, 0.04111516952514648, 0.04140195083618164, 0.04126182556152344, 0.041226367950439456, 0.04129564666748047, 0.041121761322021486, 0.04130028915405273, 0.041187263488769534, 0.04125478363037109, 0.04113625717163086, 0.04122623825073242, 0.04109721755981445, 0.041299137115478515, 0.04148038482666016, 0.041198337554931644, 0.04132236862182617, 0.04115235137939453, 0.04127289581298828, 0.04112278366088867, 0.04138150405883789, 0.0413306884765625, 0.0414466552734375, 0.04135164642333984, 0.041320735931396485, 0.041162593841552735, 0.04135692977905273, 0.04127376174926758, 0.04130009460449219, 0.04138163375854492, 0.04118758392333984, 0.041381889343261716, 0.04137984085083008, 0.04119875335693359, 0.04137046432495117, 0.04138412857055664, 0.041226367950439456, 0.04137945556640625, 0.04147820663452149, 0.0412723503112793, 0.04138467025756836, 0.04139443206787109, 0.041199615478515625, 0.04768780899047852, 0.04257583999633789, 0.04160982513427734, 0.04152931213378906, 0.04158652877807617, 0.04139971160888672, 0.04128847885131836, 0.04110335922241211, 0.0409536018371582, 0.04110156631469727, 0.041099262237548825, 0.0409169921875, 0.04114585494995117, 0.04108316802978516, 0.04092950439453125, 0.0409126091003418, 0.04116304016113281, 0.041172897338867184, 0.04117103958129883, 0.04117459106445313, 0.04099116897583008, 0.04123648071289063, 0.04102921676635742, 0.04117248153686524, 0.04119004821777344, 0.04093478393554688, 0.04115359878540039, 0.04111667251586914, 0.04125574493408203, 0.0411396484375, 0.041271713256835936, 0.041145790100097654, 0.0412781753540039, 0.04119100952148438, 0.04126556777954102, 0.041046241760253906, 0.04118038558959961, 0.04119184112548828, 0.041191646575927734, 0.04118921661376953, 0.04127948760986328, 0.04119561767578125, 0.04145151901245117, 0.04110307312011719, 0.041312030792236325, 0.04103424072265625, 0.041250816345214845, 0.041062110900878905, 0.04130844879150391, 0.04108697509765625, 0.04126275253295898, 0.041193473815917966, 0.0412224006652832, 0.04114153671264648, 0.041249599456787106, 0.041331905364990235, 0.04151583862304688, 0.04138393783569336, 0.04125696182250976, 0.041414047241210936, 0.04137020874023437, 0.04127676773071289, 0.041228958129882816, 0.04789686584472656, 0.04452105712890625, 0.04327619171142578, 0.041621952056884765, 0.04132185745239258, 0.04127603149414062, 0.04108902359008789, 0.04143737411499023, 0.041131137847900394, 0.04107334518432617, 0.04102348709106445, 0.04086393737792969, 0.04111500930786133, 0.04109356689453125, 0.041143550872802734, 0.04106726455688477, 0.04106649780273437, 0.040925182342529294, 0.041166847229003906, 0.04112518310546875, 0.04088108825683594, 0.04097817611694336, 0.04110054397583008, 0.04104064178466797, 0.04090390396118164, 0.041081119537353515, 0.04210227203369141, 0.04118368148803711, 0.0411899528503418, 0.041011329650878905, 0.041076606750488284, 0.04109020614624023, 0.04116668701171875, 0.041296897888183595, 0.04120883178710937, 0.04118220901489258, 0.041194656372070315, 0.04121097564697265, 0.04120345687866211, 0.04116275024414062, 0.041381889343261716, 0.041100353240966794, 0.04125132751464844, 0.04113248062133789, 0.041270401000976564, 0.04116774368286133, 0.04124160003662109, 0.04109414291381836, 0.041215999603271485, 0.04114460754394531, 0.04126025772094727, 0.041148929595947265, 0.04138348770141602, 0.04147071838378906, 0.0412586898803711, 0.04134105682373047, 0.041135871887207034, 0.04134310531616211, 0.04144678497314453, 0.04115651321411133, 0.04131087875366211, 0.04112015914916992, 0.04134467315673828, 0.048857887268066405, 0.042869247436523435, 0.04162406539916992, 0.041455265045166015, 0.041355518341064455, 0.04129980850219726, 0.041102977752685545, 0.041148574829101565, 0.041206272125244144, 0.04119753646850586, 0.041121025085449216, 0.04113433456420899, 0.04111727905273437, 0.041050975799560546, 0.04122220611572266, 0.04112547302246094, 0.04117663955688477, 0.04119126510620117, 0.041204734802246096, 0.04123635101318359, 0.04120755386352539, 0.04120975875854492, 0.041187263488769534, 0.04113257598876953, 0.04096540832519531, 0.0412677116394043, 0.04178947067260742, 0.041164993286132816, 0.04122623825073242, 0.04106444931030274, 0.04138124847412109, 0.04142963027954102, 0.041137374877929685, 0.04129788970947266, 0.041356094360351564, 0.04120576095581055, 0.041232383728027344, 0.04229939270019531, 0.041240577697753904, 0.04121395111083984, 0.041209598541259766, 0.041189632415771484, 0.04117094421386719, 0.04123168182373047, 0.04121571350097656, 0.041283935546875, 0.041119487762451175, 0.04114726257324219, 0.04118508911132813, 0.04098992156982422, 0.04115971374511719, 0.04110230255126953, 0.0411104965209961, 0.04131951904296875, 0.04130012893676758, 0.041070560455322265, 0.041218238830566405, 0.04122608184814453, 0.041224193572998044, 0.041156993865966794, 0.041265537261962894, 0.041160224914550785, 0.04131887817382812, 0.04860355377197265, 0.04293737411499023, 0.04154217529296875, 0.041484737396240236, 0.041350624084472654, 0.041299583435058594, 0.04112480163574219, 0.041056415557861325, 0.04101871871948242, 0.041001438140869144, 0.040933631896972654, 0.041078529357910155, 0.04105231857299805, 0.04099055862426758, 0.04114400100708008, 0.041085502624511716, 0.04097612762451172, 0.040997184753417966, 0.04123836898803711, 0.040965694427490235, 0.04092038345336914, 0.041098335266113284, 0.04113919830322266, 0.04090719985961914, 0.04106707382202148, 0.04105347061157227, 0.041003616333007815, 0.040978431701660156, 0.041099552154541016, 0.04112355041503906, 0.041137664794921876, 0.04116121673583984, 0.040994560241699216, 0.04114051055908203, 0.04121164703369141, 0.04116089630126953, 0.04130326461791992, 0.041208641052246094, 0.041102657318115236, 0.04094976043701172, 0.041181568145751954, 0.041058559417724606, 0.04094883346557617, 0.04112870407104492, 0.0411357421875, 0.04121392059326172, 0.04121219253540039, 0.041331039428710935, 0.04113568115234375, 0.04117139053344727, 0.04113996887207031, 0.04117049789428711, 0.04119222259521484, 0.04097014236450195, 0.041379039764404296, 0.041048831939697265, 0.04126518249511719, 0.04110908889770508, 0.04117715072631836, 0.04112534332275391, 0.04140662384033203, 0.04147494506835937, 0.04118307113647461, 0.04915407943725586, 0.04294041442871094, 0.04239155197143555, 0.041707454681396486, 0.041320510864257816, 0.04143308639526367, 0.041275009155273434, 0.040943614959716795, 0.04103372955322265, 0.041132415771484375, 0.04104832077026367, 0.040931041717529294, 0.04111337661743164, 0.041148670196533205, 0.0410748176574707, 0.04104995346069336, 0.041091102600097656, 0.041096446990966796, 0.041048831939697265, 0.04113993453979492, 0.04114255905151367, 0.041952415466308596, 0.04166073608398437, 0.04128422546386719, 0.041093025207519535, 0.04114742279052734, 0.04123542404174805, 0.04123830413818359, 0.041162334442138675, 0.04119411087036133, 0.04117299270629883, 0.04091904067993164, 0.041111553192138675, 0.04115491104125977, 0.041076385498046875, 0.041183231353759765, 0.04104191970825195, 0.04112345504760742, 0.04115494537353516, 0.040937824249267576, 0.04113904190063476, 0.04118815994262695, 0.04105945587158203, 0.041139072418212894, 0.04101324844360352, 0.0411514892578125, 0.04118374252319336, 0.041239040374755856, 0.04121583938598633, 0.04113446426391602, 0.041242401123046876, 0.04117747116088867, 0.0411357421875, 0.041148414611816404, 0.041398273468017575, 0.04129123306274414, 0.041105953216552735, 0.04125600051879883, 0.04113119888305664, 0.04125465774536133, 0.0414637451171875, 0.041207584381103515, 0.04102102279663086]",tokens/s,24.139631946859513,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,916.04992,6935.150592,0.0,6557.794304,6542.705664,s,1,15.5748359375,15.5748359375,0.0,15.5748359375,15.5748359375,15.5748359375,15.5748359375,[15.5748359375],,kWh,0.00024598582146250007,2.712681854451744e-05,8.446201201400133e-05,0.0003575746520210188,,MB,1554.456576,7904.034816,0.0,7484.735488,6954.981888,s,10,12.697212524414061,1.269721252441406,0.004771295195134504,1.2689224853515624,1.2765460571289062,1.276761004638672,1.2769329626464843,"[1.261804443359375, 1.2671829833984376, 1.26684033203125, 1.2691566162109376, 1.265218505859375, 1.269647705078125, 1.2686883544921874, 1.2769759521484374, 1.276498291015625, 1.2751993408203126]",tokens/s,201.61905576343312,kWh,3.703785972541747e-05,4.08310180085193e-06,2.4420297313999805e-05,6.55412588402692e-05,tokens/kWh,3905936.5738442466,MB,1586.475008,7908.22912,0.0,7488.929792,6954.984448,s,10,62.12733544921874,6.212733544921875,0.015069473024978035,6.218083251953125,6.224770166015625,6.2286424072265625,6.231740200195313,"[6.18585498046875, 6.19065576171875, 6.19651025390625, 6.2161982421875, 6.2158974609375, 6.22298291015625, 6.21996826171875, 6.22284326171875, 6.22390966796875, 6.2325146484375]",tokens/s,10.140463862560877,kWh,0.00018272821791583322,2.0158026645342622e-05,0.00012112181911959895,0.0003240080636807748,tokens/kWh,194439.6052502879,,s,630,62.12535711669921,0.09861167796301464,0.0015114568882829678,0.09834756851196289,0.09998424377441406,0.1004399127960205,0.10716291336059572,"[0.10697023773193359, 0.09757087707519531, 0.09727897644042968, 0.0971118392944336, 0.09643622589111328, 0.09837270355224609, 0.09898896026611329, 0.09789644622802735, 0.09724674987792968, 0.09702559661865234, 0.09838480377197266, 0.0984985580444336, 0.10008092498779297, 0.09925020599365235, 0.09783580780029297, 0.09781852722167969, 0.09639347076416016, 0.09690496063232422, 0.09840569305419922, 0.09923385620117188, 0.09847052764892578, 0.09831593322753907, 0.09808726501464844, 0.09730246734619141, 0.0973578872680664, 0.09831219482421875, 0.09897574615478516, 0.09774899291992188, 0.09717555236816407, 0.09785683441162109, 0.09733200073242188, 0.09851459503173828, 0.09971942138671876, 0.09966361236572266, 0.09789875030517578, 0.0975730209350586, 0.09740598297119141, 0.09690975952148438, 0.0976346893310547, 0.09966182708740234, 0.09934438323974609, 0.09843097686767578, 0.09809017944335938, 0.09812012481689453, 0.09696102142333984, 0.09695142364501953, 0.09854803466796876, 0.09886969757080079, 0.09880271911621094, 0.09649180603027344, 0.09771065521240234, 0.09733542633056641, 0.0978370590209961, 0.09883148956298828, 0.09903807830810547, 0.0979210205078125, 0.09703014373779296, 0.09730623626708984, 0.09671308898925782, 0.0984267807006836, 0.09941852569580079, 0.09898780822753907, 0.09886924743652344, 0.10762905883789063, 0.09681014251708984, 0.09719894409179687, 0.09653555297851563, 0.09888665771484376, 0.09895116424560547, 0.09851427459716797, 0.09720285034179688, 0.09715020751953125, 0.09801190185546875, 0.09878118133544922, 0.09870159912109375, 0.09918793487548828, 0.0988738555908203, 0.09665122985839844, 0.09722064208984375, 0.09720396423339844, 0.09736627197265625, 0.0988753890991211, 0.09950208282470703, 0.09927577972412109, 0.09839513397216797, 0.0974028778076172, 0.09789679718017578, 0.09732819366455078, 0.09778781127929688, 0.09815110778808593, 0.0992294692993164, 0.09774700927734375, 0.09757711791992188, 0.09807462310791015, 0.0972943344116211, 0.09818704223632813, 0.09925606536865235, 0.09989987182617187, 0.0984876480102539, 0.09697142028808593, 0.0972676773071289, 0.09666732788085937, 0.09877670288085938, 0.10025059509277344, 0.09890704345703125, 0.0989755859375, 0.09704550170898438, 0.09792511749267578, 0.09834877014160157, 0.09705644989013672, 0.09930521392822265, 0.09885167694091797, 0.09955843353271485, 0.09846268463134765, 0.09654431915283203, 0.09743532562255859, 0.09788285064697265, 0.09822211456298828, 0.09902607727050781, 0.09886396789550782, 0.09796514892578125, 0.09723388671875, 0.09709152221679687, 0.09759487915039063, 0.0977772445678711, 0.09920098876953125, 0.10723123168945313, 0.09714640045166016, 0.0968749771118164, 0.09685196685791016, 0.09734284973144532, 0.09917708587646484, 0.09896543884277344, 0.09695852661132813, 0.09726771545410157, 0.09917052459716796, 0.0985733413696289, 0.09818803405761718, 0.09877708435058594, 0.09924752044677734, 0.0989760971069336, 0.09682559967041016, 0.09783424377441406, 0.09704934692382812, 0.09684992218017578, 0.10083328247070313, 0.0992126693725586, 0.09976691436767578, 0.0989491195678711, 0.09662873840332031, 0.09763353729248046, 0.09715174102783203, 0.09666355133056641, 0.09999164581298828, 0.09892649841308594, 0.0994567642211914, 0.09800860595703124, 0.0979725112915039, 0.09848876953125, 0.09692979431152343, 0.09779567718505859, 0.09889014434814453, 0.09904029083251953, 0.09724310302734375, 0.0972421112060547, 0.09806028747558594, 0.09793740844726563, 0.09902899169921875, 0.09912067413330078, 0.09928137969970703, 0.09952051544189452, 0.09709363555908203, 0.09706905364990234, 0.09714864349365235, 0.09738697814941406, 0.10011014556884766, 0.09878870391845704, 0.09940035247802734, 0.09892182159423828, 0.09793603515625, 0.09803984069824219, 0.09687862396240235, 0.09721993255615234, 0.09829183959960938, 0.0991277084350586, 0.0990939483642578, 0.09869990539550781, 0.09785279846191407, 0.09818402862548828, 0.10803504180908204, 0.09726338958740234, 0.09709903717041016, 0.0983310089111328, 0.09926486206054687, 0.09780633544921875, 0.09679427337646485, 0.09725782775878906, 0.09710297393798828, 0.09949219512939453, 0.10113078308105469, 0.09882828521728515, 0.09890201568603516, 0.09777970886230469, 0.09702159881591797, 0.09721686553955078, 0.09691545867919922, 0.09850582122802734, 0.09870428466796875, 0.10064857482910156, 0.10012057495117188, 0.09874784088134765, 0.09887635040283203, 0.09713362884521484, 0.09716627502441406, 0.09787753295898438, 0.09924195098876953, 0.10023987579345703, 0.0989706573486328, 0.09816365051269531, 0.09808624267578125, 0.0981879653930664, 0.09791426849365234, 0.0973809585571289, 0.09897881317138672, 0.09908879852294922, 0.09854147338867188, 0.09796473693847656, 0.0981844482421875, 0.0980357437133789, 0.09743843078613282, 0.09956092834472656, 0.09885340881347657, 0.10063471984863281, 0.10008771514892578, 0.09917030334472657, 0.09842483520507812, 0.09766409301757813, 0.09730754852294922, 0.09768550109863282, 0.09822822570800781, 0.09978265380859375, 0.09931353759765625, 0.10041152191162109, 0.09963494110107422, 0.09804415893554687, 0.09737187194824219, 0.09730425262451171, 0.09720687866210938, 0.09855296325683593, 0.10063346862792968, 0.10016700744628906, 0.09951708984375, 0.10757961273193359, 0.09778972625732422, 0.09693593597412109, 0.0972042236328125, 0.09724018859863282, 0.09892745971679688, 0.09932189178466796, 0.098600830078125, 0.09797644805908202, 0.09928096008300781, 0.09993824005126953, 0.09766502380371093, 0.09814390563964843, 0.09737798309326172, 0.09927542114257812, 0.1002086410522461, 0.09885830688476563, 0.09777356719970703, 0.09723680114746094, 0.09796041870117188, 0.09890217590332032, 0.09863177490234375, 0.09935708618164063, 0.09893017578125, 0.09801139068603516, 0.10186473846435547, 0.0977108154296875, 0.09640521240234375, 0.09784963226318359, 0.09803932952880859, 0.10025740814208985, 0.09955619049072266, 0.09919926452636718, 0.09912700653076172, 0.09814595031738281, 0.09765718078613281, 0.0978309097290039, 0.09818726348876954, 0.09798847961425781, 0.10008806610107422, 0.10015526580810546, 0.10006111907958984, 0.09820985412597656, 0.098410400390625, 0.09778390502929687, 0.09773203277587891, 0.09786361694335938, 0.09873062133789062, 0.09960371398925781, 0.09931852722167969, 0.09859203338623047, 0.09742205047607422, 0.0979578857421875, 0.09718374633789062, 0.09875046539306641, 0.09961385345458984, 0.09923465728759766, 0.09931353759765625, 0.09887088012695312, 0.09795433807373047, 0.09765068817138672, 0.09824050903320312, 0.09802336120605469, 0.10699565124511719, 0.09859897613525391, 0.09946316528320312, 0.09841049957275391, 0.09779814147949219, 0.09766671752929687, 0.09829004669189453, 0.09723503875732421, 0.09817485046386719, 0.1006714859008789, 0.10187161254882812, 0.09935462188720703, 0.0993034210205078, 0.0976527328491211, 0.09665692901611328, 0.09761196899414062, 0.0981825942993164, 0.0982109146118164, 0.09925401306152344, 0.10052579498291016, 0.10007785797119141, 0.09827503967285156, 0.09746665954589843, 0.09662979125976562, 0.09818364715576172, 0.09780274963378906, 0.09923788452148438, 0.10027232360839844, 0.09895024108886719, 0.0981118392944336, 0.09741555023193359, 0.09935462188720703, 0.0971668472290039, 0.09782252502441406, 0.0994310073852539, 0.09916425323486328, 0.0994549789428711, 0.09768345642089844, 0.09804742431640626, 0.09832828521728515, 0.09902761840820312, 0.09807276916503906, 0.09814137268066406, 0.09925020599365235, 0.09956432342529296, 0.0983157730102539, 0.09806694030761719, 0.09737830352783203, 0.09775049591064452, 0.09830044555664062, 0.09920848083496094, 0.10064150238037109, 0.10021027374267578, 0.09896284484863281, 0.09861996459960938, 0.09737836456298828, 0.09730892944335938, 0.09802073669433593, 0.09749900817871093, 0.09957465362548829, 0.0992194595336914, 0.100853759765625, 0.10060390472412109, 0.10696137237548828, 0.09802281951904297, 0.09779865264892579, 0.09763372802734376, 0.0978803482055664, 0.09757465362548828, 0.09876544189453125, 0.0999834213256836, 0.09931072235107422, 0.10060912322998047, 0.09962223815917969, 0.09805644989013672, 0.09732313537597656, 0.09762963104248047, 0.09735561370849609, 0.09849520111083984, 0.09937455749511719, 0.09960912322998047, 0.09892658996582031, 0.09908364868164063, 0.09819132995605469, 0.09775577545166016, 0.097359619140625, 0.09848851013183593, 0.09898751831054688, 0.10011094665527344, 0.09976217651367188, 0.09862348937988281, 0.0980948486328125, 0.09861795043945312, 0.09768243408203126, 0.0977103042602539, 0.09843756866455078, 0.09875389099121094, 0.09968013000488281, 0.09819574737548828, 0.0975730209350586, 0.09872418975830079, 0.09774694061279297, 0.0984616928100586, 0.09813811492919922, 0.09889167785644531, 0.09968649291992188, 0.0981053466796875, 0.0974356460571289, 0.09839110565185546, 0.09804918670654297, 0.09787078094482422, 0.09994671630859375, 0.10018265533447265, 0.09956966400146484, 0.10044992065429688, 0.09904166412353516, 0.09807033538818359, 0.09714857482910157, 0.0978765106201172, 0.09757695770263672, 0.09902899169921875, 0.09918873596191406, 0.10054450988769531, 0.09948159790039063, 0.09808895874023438, 0.09802342224121094, 0.10771469116210937, 0.09913475036621094, 0.09808758544921875, 0.09758726501464844, 0.09782790374755859, 0.09769261169433593, 0.09840640258789063, 0.09895283508300781, 0.09898649597167969, 0.09884659576416016, 0.09922873687744141, 0.09811219024658203, 0.09805213165283203, 0.09822415924072266, 0.09781574249267579, 0.0991385269165039, 0.09978208160400391, 0.0994125747680664, 0.09731177520751953, 0.0988780517578125, 0.09794547271728515, 0.09867311859130859, 0.09789033508300782, 0.09905561828613281, 0.09870130920410156, 0.099557373046875, 0.09834636688232422, 0.09778034973144531, 0.09785139465332031, 0.09812700653076172, 0.09751023864746093, 0.09960364532470703, 0.09994281768798828, 0.09968643188476563, 0.09940211486816407, 0.09891020965576172, 0.09778543853759766, 0.09734595489501953, 0.0979060821533203, 0.09797666931152343, 0.09898857879638671, 0.09922354888916016, 0.1008903350830078, 0.09992195129394531, 0.09815446472167969, 0.09728205108642578, 0.09775023651123046, 0.09724591827392579, 0.09832044982910157, 0.10045350646972656, 0.10057945251464843, 0.09999404907226563, 0.09931382751464844, 0.09909004974365235, 0.09764857482910157, 0.09780489349365235, 0.09751497650146485, 0.09791136169433594, 0.09928208160400391, 0.10038902282714844, 0.09932662200927735, 0.0985374755859375, 0.09784089660644531, 0.10764508819580078, 0.10025039672851563, 0.09920668792724609, 0.09785596466064453, 0.09716678619384765, 0.09810797119140625, 0.09750313568115235, 0.09830569458007812, 0.09886150360107422, 0.10042768096923828, 0.09995990753173828, 0.09903158569335938, 0.09835568237304687, 0.09783261108398437, 0.09749501037597656, 0.09801152038574219, 0.09972121429443359, 0.09937305450439453, 0.09899619293212891, 0.09809308624267578, 0.09877094268798828, 0.09809053039550782, 0.09805244445800781, 0.09772013092041015, 0.09864224243164063, 0.10016973114013672, 0.10003981018066406, 0.09902783966064453, 0.0976527328491211, 0.09831629180908204, 0.09827327728271484, 0.09794940948486328, 0.09897154998779296, 0.09903142547607421, 0.09967820739746094, 0.09987433624267578, 0.09738697814941406, 0.09959254455566406, 0.09733084869384766, 0.09781190490722656, 0.09803804779052734, 0.09942972564697265, 0.09963001251220703, 0.09971302032470702, 0.09864806365966797, 0.09768051147460938, 0.09832230377197265, 0.09797494506835938, 0.09767692565917968, 0.09898044586181641, 0.09916201782226562, 0.09973123168945312, 0.09848230743408203, 0.09775516510009766, 0.09840259552001954, 0.09793331146240235, 0.097746337890625, 0.09897840118408203, 0.099108642578125, 0.10054812622070312, 0.09889043426513672, 0.09826249694824218, 0.09801372528076172, 0.10851618957519531, 0.0979268798828125, 0.096751708984375, 0.09761811065673828, 0.09816259002685547, 0.09870345306396484, 0.09997516632080078, 0.09945263671875, 0.09873206329345703, 0.09926787567138672, 0.09941091156005859, 0.09814835357666016, 0.09743580627441406, 0.09834384155273437, 0.09874527740478516, 0.10011353302001953, 0.10038771057128906, 0.09900994873046876, 0.0972171173095703, 0.09871974182128906, 0.0987484130859375, 0.09795993804931641, 0.098219970703125, 0.09890121459960938, 0.099982177734375, 0.09921536254882812, 0.09738380432128907, 0.09768819427490234, 0.09862300872802734, 0.09835977935791015, 0.09762611389160156, 0.09908617401123047, 0.09887494659423827, 0.10068233489990235, 0.09996640014648438, 0.10012729644775391, 0.09801522827148437, 0.09731890869140625, 0.09793126678466797, 0.09785689544677735, 0.09886784362792969, 0.09919078063964844, 0.09965760040283203, 0.10007769775390625, 0.09974169921875, 0.0990904312133789, 0.09786268615722657, 0.0974345932006836, 0.09817459106445313, 0.09823875427246094, 0.0989836196899414, 0.10048925018310546, 0.09962329864501954, 0.10045641326904296, 0.10039225769042968, 0.10013491058349609, 0.09937136077880859, 0.09793942260742187, 0.0972679672241211, 0.09733955383300781, 0.098050048828125, 0.09923538970947265, 0.09949382019042968]",tokens/s,10.140786777556515,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1150.042112,13202.55488,0.0,12807.307264,12661.927936,s,1,26.22058203125,26.22058203125,0.0,26.22058203125,26.22058203125,26.22058203125,26.22058203125,[26.22058203125],,kWh,0.0005574009664708247,6.147828936480916e-05,0.00020758127717600272,0.0008264605330116366,,MB,1272.885248,15727.525888,0.0,15319.69536,14320.027648,s,10,30.43433520507812,3.0434335205078122,0.007328731761490402,3.0446331787109377,3.051364599609375,3.053053637695313,3.0544048681640628,"[3.02811279296875, 3.0337109375, 3.041738525390625, 3.043341796875, 3.04512890625, 3.0509892578125, 3.0469658203125, 3.044137451171875, 3.045467041015625, 3.05474267578125]",tokens/s,84.11552224649385,kWh,8.847248944041761e-05,9.758437180565623e-06,5.87711859058003e-05,0.00015700211252678354,tokens/kWh,1630551.3083865547,MB,1295.986688,15727.525888,0.0,15319.69536,14320.030208,s,10,145.58787109375,14.558787109375,0.0227658984668569,14.569966308593749,14.579945214843749,14.580775341796874,14.581439443359375,"[14.5112666015625, 14.5271318359375, 14.542541015625, 14.5587841796875, 14.5797607421875, 14.56684765625, 14.57310546875, 14.5737431640625, 14.5730849609375, 14.58160546875]",tokens/s,4.327283552311285,kWh,0.000425607064507916,4.6947708711063024e-05,0.0002830553097773997,0.0007556100829963785,tokens/kWh,83376.33578177377,,s,630,145.58305897521964,0.23108422059558686,0.002321878640323582,0.23113728332519531,0.23276569824218749,0.2345638153076172,0.2406544161987305,"[0.23835647583007813, 0.22681805419921874, 0.22691226196289063, 0.2283701171875, 0.23533395385742187, 0.22786253356933595, 0.228238525390625, 0.2280592041015625, 0.23383724975585937, 0.22964346313476564, 0.2274272003173828, 0.22796624755859374, 0.23188954162597655, 0.2315145263671875, 0.22883705139160157, 0.22799360656738282, 0.23043600463867187, 0.2315552978515625, 0.22978009033203126, 0.22779705810546874, 0.2293472900390625, 0.23161170959472657, 0.22977603149414064, 0.2288714599609375, 0.22848899841308593, 0.23233836364746094, 0.23008566284179688, 0.22982736206054688, 0.22842387390136717, 0.2310840301513672, 0.23034060668945314, 0.23051222229003906, 0.23003996276855468, 0.23001097106933593, 0.230152099609375, 0.23047775268554688, 0.22883743286132813, 0.22996163940429687, 0.2318726043701172, 0.2311658935546875, 0.2301805419921875, 0.22902003479003907, 0.23188803100585936, 0.23133680725097655, 0.23044709777832031, 0.22952960205078124, 0.23002316284179689, 0.23109207153320313, 0.23068893432617188, 0.23061923217773436, 0.23042652893066407, 0.23081983947753906, 0.23059455871582032, 0.2311700439453125, 0.22997555541992187, 0.2327039031982422, 0.23061325073242187, 0.2315813751220703, 0.23038018798828125, 0.23189830017089844, 0.23119955444335938, 0.2314895324707031, 0.23127352905273438, 0.24072761535644532, 0.22724415588378907, 0.22727130126953124, 0.22921420288085936, 0.2344837188720703, 0.22802841186523437, 0.22699005126953126, 0.22786051940917967, 0.2346762237548828, 0.22905209350585937, 0.22809432983398437, 0.2280998077392578, 0.2327410888671875, 0.23019699096679688, 0.2286261444091797, 0.22834629821777344, 0.23072163391113282, 0.23181033325195313, 0.22893344116210937, 0.22952438354492188, 0.22937744140625, 0.23119932556152345, 0.230582275390625, 0.2286510009765625, 0.22834803771972656, 0.2320623321533203, 0.23174195861816407, 0.22911517333984374, 0.22776089477539063, 0.23132566833496093, 0.23087513732910156, 0.23090585327148438, 0.22885171508789062, 0.2310360565185547, 0.23094537353515626, 0.23201405334472655, 0.22943133544921876, 0.22956646728515626, 0.23067237854003905, 0.2320762939453125, 0.23006105041503908, 0.23008869934082032, 0.23062442016601561, 0.23252806091308595, 0.22978778076171874, 0.23117446899414062, 0.23062550354003905, 0.23214285278320312, 0.231189697265625, 0.2314714813232422, 0.2318995819091797, 0.2301317138671875, 0.2313768310546875, 0.23168415832519532, 0.2309979248046875, 0.2303468475341797, 0.23194784545898436, 0.23193394470214843, 0.23069686889648439, 0.23109686279296876, 0.2314176025390625, 0.23261415100097657, 0.23161351013183593, 0.2402891845703125, 0.2272303009033203, 0.2266926727294922, 0.23014854431152343, 0.2351820831298828, 0.22882009887695312, 0.22699224853515626, 0.2292446746826172, 0.2346293487548828, 0.22941282653808595, 0.2284610595703125, 0.2281793212890625, 0.2329054718017578, 0.2312572784423828, 0.2282239990234375, 0.22829055786132812, 0.23113523864746094, 0.23235894775390625, 0.22947724914550782, 0.22758761596679689, 0.23072006225585937, 0.23408140563964844, 0.23163958740234375, 0.22788540649414063, 0.2301063690185547, 0.23228492736816406, 0.2299099578857422, 0.22972451782226563, 0.22952572631835938, 0.23166157531738282, 0.23132070922851564, 0.2302320251464844, 0.22983917236328125, 0.2311603240966797, 0.23226786804199218, 0.23000210571289062, 0.23015890502929687, 0.2305834503173828, 0.23212646484375, 0.2303714599609375, 0.2305780487060547, 0.23006495666503907, 0.23231082153320312, 0.23118194580078125, 0.23163273620605468, 0.2303350067138672, 0.23126010131835936, 0.2312454376220703, 0.23151046752929688, 0.23053517150878905, 0.23174143981933593, 0.23089497375488283, 0.23117999267578124, 0.23116278076171876, 0.23116595458984374, 0.23181925964355468, 0.2312806396484375, 0.2321017303466797, 0.23123779296875, 0.2318602294921875, 0.23204454040527345, 0.23186431884765624, 0.2309345245361328, 0.23917977905273438, 0.22695677185058594, 0.22765171813964843, 0.22981581115722657, 0.2359817352294922, 0.22758195495605468, 0.2278943634033203, 0.22875155639648437, 0.23475273132324218, 0.22963340759277343, 0.22814373779296876, 0.22847897338867187, 0.23596237182617188, 0.2320299530029297, 0.2285810546875, 0.229042236328125, 0.23199385070800782, 0.23244744873046874, 0.2285019226074219, 0.22831936645507814, 0.23069900512695313, 0.23281869506835937, 0.23071273803710937, 0.22923234558105468, 0.23047666931152344, 0.2316104278564453, 0.23155091857910157, 0.22983065795898439, 0.2308590087890625, 0.23196237182617188, 0.231878662109375, 0.229718017578125, 0.23108607482910157, 0.23095295715332032, 0.23377920532226562, 0.2294847412109375, 0.2306414794921875, 0.22984037780761718, 0.23274044799804688, 0.2311519012451172, 0.23209024047851562, 0.2296584014892578, 0.23304214477539062, 0.23101408386230468, 0.2315676727294922, 0.2302423095703125, 0.23238041687011718, 0.23198707580566405, 0.23102886962890626, 0.2300185546875, 0.23139173889160156, 0.23266099548339844, 0.23224070739746094, 0.23099209594726564, 0.2305631103515625, 0.23196505737304687, 0.23195091247558594, 0.23223526000976563, 0.23117593383789062, 0.2316996154785156, 0.23258198547363282, 0.23231797790527345, 0.23074327087402344, 0.24260231018066405, 0.2279239044189453, 0.22738966369628907, 0.22891523742675782, 0.23686968994140625, 0.228669189453125, 0.22704742431640626, 0.22878207397460937, 0.23506246948242188, 0.23016099548339844, 0.22863690185546875, 0.22853176879882814, 0.2520203857421875, 0.22839736938476562, 0.2279838409423828, 0.22824140930175782, 0.235610107421875, 0.23063548278808593, 0.2279956817626953, 0.22952960205078124, 0.23299842834472656, 0.23343356323242187, 0.22948809814453125, 0.228315673828125, 0.23106739807128907, 0.23308248901367187, 0.23042317199707033, 0.22841958618164063, 0.2298440704345703, 0.23230557250976563, 0.23159193420410157, 0.22995907592773437, 0.23029525756835936, 0.23235775756835939, 0.23270297241210938, 0.23148115539550781, 0.22853036499023438, 0.2310102996826172, 0.23472848510742186, 0.23138198852539063, 0.2296351318359375, 0.23110304260253905, 0.23397824096679687, 0.23166371154785156, 0.2300654754638672, 0.2309412841796875, 0.23317503356933594, 0.23185350036621094, 0.23032890319824217, 0.2305146942138672, 0.23193299865722655, 0.23127955627441407, 0.23154893493652343, 0.23017062377929687, 0.23209368896484375, 0.2327941131591797, 0.2320343017578125, 0.23174552917480468, 0.2314915771484375, 0.23233737182617187, 0.23216336059570314, 0.2316247100830078, 0.230411865234375, 0.24189669799804686, 0.2276666564941406, 0.22706816101074218, 0.22999760437011718, 0.2378027801513672, 0.2287953338623047, 0.2277538604736328, 0.2277425994873047, 0.23395738220214843, 0.23059251403808595, 0.2288353271484375, 0.22753805541992186, 0.23353616333007812, 0.23206256103515624, 0.22814076232910158, 0.22905743408203125, 0.23218995666503905, 0.23307264709472655, 0.23025013732910157, 0.229431640625, 0.23041023254394533, 0.23225138854980468, 0.23076377868652342, 0.22966160583496092, 0.2302278137207031, 0.23178445434570313, 0.23142323303222656, 0.23101504516601562, 0.23027235412597657, 0.2323948516845703, 0.23126416015625, 0.23066294860839845, 0.2297318115234375, 0.2320389404296875, 0.23205223083496093, 0.23097190856933594, 0.2301255645751953, 0.23092576599121092, 0.23207379150390625, 0.23197039794921875, 0.23092189025878906, 0.23119468688964845, 0.23222108459472657, 0.23083445739746095, 0.23018290710449218, 0.23117369079589845, 0.23164710998535157, 0.23220895385742188, 0.23132774353027344, 0.23069631958007814, 0.23220223999023437, 0.23308761596679686, 0.23153990173339845, 0.23136253356933595, 0.23234442138671876, 0.23235340881347658, 0.2316537628173828, 0.2305986633300781, 0.2311393280029297, 0.23211538696289064, 0.23184466552734376, 0.23144242858886718, 0.2328783416748047, 0.24208816528320312, 0.2270812530517578, 0.22740652465820313, 0.23039414978027345, 0.2371604461669922, 0.22813081359863283, 0.22680720520019532, 0.2290694122314453, 0.2351513671875, 0.23098162841796874, 0.22872679138183594, 0.22834141540527345, 0.23496124267578125, 0.23155247497558593, 0.22857171630859374, 0.22814512634277342, 0.23276339721679687, 0.23289447021484375, 0.22927667236328125, 0.22758892822265625, 0.23168223571777344, 0.2330166778564453, 0.231295654296875, 0.228853759765625, 0.23089663696289062, 0.2327163848876953, 0.2311771240234375, 0.23053517150878905, 0.23043072509765625, 0.23140956115722655, 0.23178250122070312, 0.23098355102539062, 0.22995484924316406, 0.23191180419921875, 0.2323563232421875, 0.23156880187988282, 0.23006694030761718, 0.231272216796875, 0.2319459228515625, 0.23123983764648437, 0.23023799133300782, 0.23155567932128907, 0.23136151123046875, 0.23105209350585937, 0.23086904907226563, 0.23168109130859374, 0.2320572509765625, 0.2326492462158203, 0.23074153137207032, 0.23278640747070312, 0.23191168212890625, 0.23217277526855468, 0.23090205383300783, 0.23240019226074218, 0.23139833068847657, 0.23203634643554688, 0.23110450744628908, 0.23226771545410158, 0.23191903686523438, 0.23274124145507813, 0.23236019897460938, 0.23208551025390625, 0.2321667785644531, 0.24207699584960937, 0.22715872192382813, 0.2275081329345703, 0.22964437866210938, 0.2374266815185547, 0.22879026794433593, 0.22808515930175782, 0.23059059143066407, 0.2354204864501953, 0.2306096649169922, 0.22705836486816405, 0.22832150268554688, 0.23337164306640626, 0.23184384155273438, 0.22901475524902343, 0.22871270751953124, 0.23194473266601562, 0.2326650848388672, 0.2299658203125, 0.22854981994628906, 0.23159628295898438, 0.232884033203125, 0.23078579711914063, 0.22942515563964844, 0.23056973266601563, 0.23205914306640624, 0.2313318328857422, 0.22966061401367188, 0.23109552001953124, 0.2320611572265625, 0.23294451904296876, 0.23039974975585936, 0.22974038696289062, 0.23187472534179687, 0.23260365295410157, 0.2311265869140625, 0.22987767028808595, 0.2312352294921875, 0.23187692260742188, 0.23252024841308594, 0.23005699157714843, 0.23168234252929687, 0.23155296325683594, 0.23232293701171874, 0.23193075561523438, 0.2306334686279297, 0.2316940155029297, 0.2321595458984375, 0.23238627624511718, 0.23157994079589844, 0.23101222229003907, 0.23256600952148437, 0.23174029541015626, 0.23175782775878906, 0.23198025512695314, 0.23242547607421876, 0.23129945373535157, 0.23195059204101562, 0.23054762268066406, 0.23198310852050782, 0.23230998229980468, 0.2316295928955078, 0.2315898895263672, 0.24180738830566406, 0.22821888732910156, 0.22740777587890626, 0.22937770080566405, 0.23642544555664063, 0.22785638427734375, 0.22783818054199217, 0.22875135803222657, 0.23528652954101562, 0.2306759033203125, 0.22748150634765624, 0.22956112670898438, 0.23339990234375, 0.23139701843261717, 0.22971455383300782, 0.22833561706542968, 0.23189414978027345, 0.23156211853027345, 0.23042839050292968, 0.22777066040039062, 0.2309099578857422, 0.23235296630859376, 0.2310828094482422, 0.2294325408935547, 0.22992127990722655, 0.23389126586914064, 0.23158041381835937, 0.23022589111328126, 0.2302313537597656, 0.23354861450195313, 0.23140762329101563, 0.23023207092285156, 0.22970162963867188, 0.23234080505371094, 0.23223365783691408, 0.2310511932373047, 0.2308602294921875, 0.2318745880126953, 0.23238505554199218, 0.23134214782714843, 0.23003546142578124, 0.23168988037109375, 0.23195068359375, 0.2312539825439453, 0.2313338623046875, 0.23188636779785157, 0.2314686737060547, 0.23235472106933594, 0.23081369018554687, 0.23150784301757812, 0.23238368225097655, 0.23192649841308594, 0.23135459899902344, 0.23111807250976563, 0.23249411010742188, 0.2323289337158203, 0.2340240936279297, 0.23078988647460938, 0.23147120666503906, 0.23222610473632813, 0.23350093078613282, 0.23130361938476562, 0.23159504699707031, 0.24047520446777343, 0.2286473846435547, 0.22908767700195312, 0.2304389190673828, 0.23505101013183594, 0.22877993774414063, 0.2290134735107422, 0.23027850341796874, 0.23481832885742188, 0.23052450561523438, 0.22929830932617187, 0.2296746826171875, 0.23238902282714843, 0.23136592102050782, 0.2299155578613281, 0.22973440551757812, 0.2317884521484375, 0.23160432434082032, 0.23074610900878906, 0.230076416015625, 0.23059872436523438, 0.23225331115722656, 0.23163909912109376, 0.23001496887207032, 0.23086285400390624, 0.2309591064453125, 0.23105946350097656, 0.23099714660644532, 0.23141667175292968, 0.2306682891845703, 0.23140762329101563, 0.23135845947265626, 0.23256678771972655, 0.23079936218261718, 0.23238645935058594, 0.2306817626953125, 0.23233836364746094, 0.23072877502441405, 0.2319102325439453, 0.23057008361816406, 0.2316798095703125, 0.23185427856445312, 0.2319237060546875, 0.231468994140625, 0.23165373229980468, 0.2326580810546875, 0.23182188415527344, 0.23184165954589844, 0.23034275817871094, 0.23288426208496094, 0.23213658142089844, 0.23178866577148438, 0.2305576934814453, 0.23162384033203126, 0.23213104248046876, 0.23175308227539063, 0.231404541015625, 0.23193394470214843, 0.2330864715576172, 0.231938232421875, 0.2313691864013672, 0.2321017303466797, 0.23223091125488282]",tokens/s,4.327426586820344,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 48.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 38.12 MiB is free. Process 188071 has 14.70 GiB memory in use. Of the allocated memory 14.42 GiB is allocated by PyTorch, and 176.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,810.307584,4683.923456,0.0,4288.67584,4213.842432,s,1,13.3825947265625,13.3825947265625,0.0,13.3825947265625,13.3825947265625,13.3825947265625,13.3825947265625,[13.3825947265625],,kWh,0.00018314163358749863,2.0194795041710082e-05,5.9438658661999924e-05,0.00026277508729120863,,MB,1203.580928,5107.54816,0.0,4699.717632,4535.245312,s,10,8.61562255859375,0.861562255859375,0.008665497944930763,0.8641178894042969,0.8676427673339844,0.8691550628662109,0.8703648992919921,"[0.8376939697265625, 0.8578470458984375, 0.8613602294921875, 0.86381201171875, 0.86074853515625, 0.8644237670898437, 0.8673067016601562, 0.866231689453125, 0.86553125, 0.8706673583984375]",tokens/s,297.13465075678124,kWh,2.512609172499987e-05,2.77097493549076e-06,1.671614763216668e-05,4.461321429265731e-05,tokens/kWh,5738210.170660891,MB,1254.387712,5115.936768,0.0,4708.10624,4535.247872,s,10,40.49503979492187,4.049503979492187,0.008818434365495443,4.05219580078125,4.058032958984375,4.060212646484375,4.0619563964843755,"[4.034101318359375, 4.038423095703125, 4.041629638671875, 4.04458203125, 4.0519599609375, 4.055001220703125, 4.052431640625, 4.062392333984375, 4.057548583984375, 4.056969970703125]",tokens/s,15.557460943130193,kWh,0.00011892334101375033,1.3116958640399224e-05,7.873059539183332e-05,0.00021077089504598282,tokens/kWh,298902.7492920956,,s,630,40.49235707473757,0.06427358265831357,0.0015613808815573744,0.06408670425415039,0.06476814651489259,0.06502831153869629,0.07511216720581056,"[0.07883795166015625, 0.06517282867431641, 0.06399033737182618, 0.06358208084106445, 0.06323929595947266, 0.06272499084472656, 0.06272784042358398, 0.06273904037475586, 0.06267811203002929, 0.0627658576965332, 0.06272927856445312, 0.06436297607421874, 0.0639450569152832, 0.06363328170776367, 0.06317478561401367, 0.06414745330810546, 0.06385391998291015, 0.06370131301879883, 0.06434236907958985, 0.06395510482788086, 0.06379913711547852, 0.06296960067749023, 0.06361520004272461, 0.06377679824829102, 0.06308883285522461, 0.06342943954467774, 0.06381792068481446, 0.0633469123840332, 0.06313430404663085, 0.06461849975585937, 0.06422099304199219, 0.0638199691772461, 0.0634093132019043, 0.06483977508544922, 0.06380006408691406, 0.06366617584228515, 0.06442111968994141, 0.06395775985717773, 0.06388051223754883, 0.0632364158630371, 0.06434371185302734, 0.0640823974609375, 0.0639526710510254, 0.06328163146972657, 0.06322134399414063, 0.06354927825927735, 0.06443417358398437, 0.06413369750976562, 0.0636313591003418, 0.06385459136962891, 0.06440464019775391, 0.0640021743774414, 0.06467219543457031, 0.06434342193603515, 0.06393334579467773, 0.06358425521850586, 0.0645481948852539, 0.06434678649902344, 0.06397747039794922, 0.06351433563232421, 0.06330352020263671, 0.06500188446044922, 0.06455219268798829, 0.07693769836425782, 0.06492147064208985, 0.06408739471435547, 0.06373033523559571, 0.06331763076782226, 0.06295158386230469, 0.06320095825195313, 0.06293056106567382, 0.06303382492065429, 0.06298790359497071, 0.06296275329589844, 0.06297983932495117, 0.06299609756469726, 0.0629961280822754, 0.0649551010131836, 0.06710681915283204, 0.06476179504394532, 0.06423149108886719, 0.06352719879150391, 0.06341603088378907, 0.06429417419433593, 0.06403663635253906, 0.06366227340698243, 0.06314006423950196, 0.06325827026367188, 0.06452877044677735, 0.06380323028564452, 0.06352758407592773, 0.06308438491821289, 0.06336959838867187, 0.06432745361328125, 0.0637583351135254, 0.06394204711914063, 0.064340576171875, 0.06398566436767578, 0.06374582290649414, 0.06383023834228516, 0.06456524658203125, 0.06415564727783203, 0.06390364837646484, 0.06357772827148438, 0.06443465423583984, 0.06453775787353516, 0.06418716430664062, 0.06393376159667968, 0.06361983871459961, 0.06444236755371094, 0.0639365119934082, 0.06369270324707031, 0.0644486083984375, 0.06403260803222656, 0.06384630584716797, 0.06356198501586914, 0.06447872161865234, 0.06412544250488281, 0.0639766731262207, 0.06348880004882812, 0.06452355194091797, 0.0646595230102539, 0.06434883117675781, 0.06393241500854492, 0.0634511375427246, 0.06363478469848632, 0.07382377624511718, 0.06468851470947265, 0.06397673416137695, 0.06359932708740235, 0.06322995376586914, 0.06435558319091797, 0.06392460632324219, 0.06360486221313477, 0.06313008117675781, 0.0629634895324707, 0.06299238586425782, 0.06474748992919922, 0.06424578857421875, 0.0637279052734375, 0.06355116653442383, 0.0643318099975586, 0.06368255996704102, 0.06357820892333985, 0.06436831665039063, 0.06392380905151367, 0.06372825622558594, 0.06377676773071289, 0.06486016082763672, 0.0642779541015625, 0.06379987335205078, 0.06348185729980468, 0.06330275344848633, 0.06440335845947266, 0.06384873580932617, 0.06357270431518555, 0.06407901000976562, 0.06407657623291016, 0.06385446548461914, 0.06494636535644531, 0.06439247894287109, 0.06387171173095703, 0.0638130226135254, 0.06351878356933593, 0.06440809631347656, 0.06471065521240234, 0.06398099136352539, 0.0633554573059082, 0.06436857604980468, 0.06392975997924805, 0.06367299270629882, 0.06373750305175781, 0.0644529571533203, 0.06406940460205078, 0.06442620849609375, 0.06395814514160156, 0.06356579208374023, 0.06376335906982422, 0.06454271697998047, 0.06424937438964844, 0.06495894622802735, 0.06439437103271485, 0.06396540832519532, 0.06380147171020507, 0.06445625305175781, 0.06410105895996093, 0.06381084823608399, 0.06449443054199219, 0.06414556884765625, 0.07492995452880859, 0.06452464294433594, 0.06403494262695313, 0.06359116744995118, 0.06315097427368165, 0.06425202941894531, 0.06392575836181641, 0.06389503860473633, 0.0632490234375, 0.06422566223144531, 0.0636819839477539, 0.06346912002563476, 0.06346566390991211, 0.06417286682128906, 0.06402845001220703, 0.06360496139526367, 0.0644106216430664, 0.0642325439453125, 0.06332611083984375, 0.06400972747802734, 0.06429478454589843, 0.06395065689086914, 0.06383084869384766, 0.06354358291625976, 0.06425296020507812, 0.06387782287597656, 0.06363750457763671, 0.06427017974853516, 0.06387868881225586, 0.0638490867614746, 0.06392623901367188, 0.06436156463623047, 0.06398223876953125, 0.06379983901977539, 0.06418816375732422, 0.064505859375, 0.06404096221923829, 0.0638914566040039, 0.0645090560913086, 0.06422617340087891, 0.06386003112792969, 0.06355424118041993, 0.06439910125732422, 0.06388761520385743, 0.06350438308715821, 0.0647906265258789, 0.06441522979736328, 0.06397699356079102, 0.06372236633300782, 0.0644582061767578, 0.06425039672851562, 0.06454608154296874, 0.06416588592529297, 0.06391471862792969, 0.06410614776611329, 0.06426787567138671, 0.06443084716796875, 0.06365343856811523, 0.0645263671875, 0.06400144195556641, 0.06385532760620118, 0.06457782745361328, 0.06448099517822266, 0.07537216186523438, 0.06461004638671874, 0.06401206207275391, 0.06360969543457032, 0.06322550582885743, 0.06404905700683594, 0.06376902389526368, 0.06357606506347656, 0.06314422225952149, 0.0641632308959961, 0.0636473617553711, 0.06421984100341797, 0.06379110336303712, 0.06360892868041992, 0.06380944061279296, 0.06426419067382813, 0.06401974487304687, 0.0645429458618164, 0.06417430114746094, 0.06360086441040039, 0.06428438568115234, 0.06390000152587891, 0.0641719970703125, 0.06391350555419922, 0.0637199363708496, 0.06364889526367187, 0.06416883087158202, 0.06385657501220703, 0.06374201583862305, 0.06441165161132813, 0.06381977462768555, 0.06365305709838867, 0.06437366485595702, 0.06487849426269532, 0.06428070068359375, 0.06387247848510742, 0.06446707153320312, 0.06407968139648437, 0.06493414306640626, 0.06615676879882812, 0.06381977462768555, 0.06358220672607422, 0.0634769287109375, 0.06500355529785157, 0.06440239715576172, 0.06374399948120117, 0.06419149017333985, 0.06447801971435548, 0.06456034851074219, 0.06441244506835937, 0.06415974426269531, 0.06456934356689453, 0.06422528076171875, 0.06504649353027343, 0.06456508636474609, 0.06413533020019531, 0.06357590484619141, 0.06401657867431641, 0.06507520294189453, 0.06457875061035157, 0.06402738952636719, 0.0638853759765625, 0.06462464141845703, 0.07551795196533204, 0.06456934356689453, 0.06394879913330079, 0.06416582489013672, 0.06370105743408203, 0.0635228157043457, 0.06346748733520508, 0.06398064041137695, 0.06359750366210938, 0.0632699203491211, 0.06420601654052735, 0.06385232162475586, 0.06363056182861328, 0.06320134353637695, 0.06418915557861328, 0.06505010986328125, 0.06477008056640625, 0.06423091125488281, 0.06377532958984375, 0.06418470764160156, 0.06426195526123046, 0.06387756729125976, 0.06366124725341797, 0.06336700820922851, 0.06432841491699219, 0.0644505615234375, 0.06410034942626953, 0.06381260681152344, 0.06378742218017579, 0.06438358306884766, 0.06542745971679688, 0.06431737518310547, 0.06476806640625, 0.06437593841552734, 0.06416063690185547, 0.06491526031494141, 0.06424508666992187, 0.06384316635131836, 0.06370297622680664, 0.06437689971923828, 0.0649583969116211, 0.0644109115600586, 0.06398236846923828, 0.06366761779785156, 0.06386368179321289, 0.06446665954589843, 0.06380953598022461, 0.06574845123291016, 0.06378675079345703, 0.06438790130615234, 0.06409827423095703, 0.06466873931884766, 0.0642011489868164, 0.06403727722167969, 0.06445065307617187, 0.06514073944091797, 0.06456934356689453, 0.06424761962890625, 0.06402272033691406, 0.0640610580444336, 0.0646123504638672, 0.06455567932128906, 0.06399151992797851, 0.07492991638183594, 0.06456793975830079, 0.06400204467773438, 0.06357196807861328, 0.06315827178955079, 0.06459552001953126, 0.06398611068725586, 0.06355699157714843, 0.0632856330871582, 0.06357958221435547, 0.06406790161132812, 0.06367283248901368, 0.0633449592590332, 0.06420652770996094, 0.06389126586914062, 0.06417635345458984, 0.06461436462402344, 0.0643399658203125, 0.06400819396972657, 0.06369459152221679, 0.0637768325805664, 0.0643094711303711, 0.06341363143920899, 0.06392483139038085, 0.0643276824951172, 0.0641630401611328, 0.06345721435546875, 0.06539759826660156, 0.06430105590820312, 0.06407955169677734, 0.06437715148925781, 0.0644382095336914, 0.06483542633056641, 0.06432790374755859, 0.06378438568115234, 0.06439993286132813, 0.06397235107421875, 0.0644775390625, 0.06417030334472656, 0.06380886459350586, 0.06344931030273437, 0.0644016342163086, 0.0639309425354004, 0.06461440277099609, 0.06449152374267578, 0.06452601623535156, 0.06423725128173828, 0.06501644897460937, 0.06437586975097656, 0.06398252868652343, 0.06384608078002929, 0.0640904312133789, 0.06449056243896484, 0.0640992660522461, 0.06395391845703124, 0.06415795135498047, 0.06456374359130859, 0.06418447875976563, 0.0645033950805664, 0.06420291137695312, 0.0642174072265625, 0.06462834930419922, 0.0651960678100586, 0.07654399871826172, 0.06484716796875, 0.06434886169433594, 0.06389350509643554, 0.06374409484863282, 0.06655133056640625, 0.06360271835327148, 0.06421129608154297, 0.06356172943115235, 0.0636231689453125, 0.0635814094543457, 0.06414828491210937, 0.06371865463256836, 0.06331606292724609, 0.064837890625, 0.06497318267822266, 0.06433177947998046, 0.06402252960205078, 0.06434611511230469, 0.06422732543945313, 0.06344467163085937, 0.0643703384399414, 0.06394464111328126, 0.06436265563964844, 0.0640456314086914, 0.06382953643798828, 0.06429878234863282, 0.06384662246704101, 0.06363820648193359, 0.06449517059326172, 0.0648153305053711, 0.06443417358398437, 0.06454681396484375, 0.06426214599609376, 0.06384435272216797, 0.06695116424560547, 0.06386687850952148, 0.06350787353515625, 0.0654131851196289, 0.06441629028320313, 0.06393856048583985, 0.06378905487060547, 0.06389680099487305, 0.06436943817138673, 0.06390934371948243, 0.06397139358520508, 0.06449199676513671, 0.06437423706054687, 0.0645633316040039, 0.0645206069946289, 0.0644814682006836, 0.064325439453125, 0.06422105407714844, 0.06557884979248046, 0.06444866943359374, 0.06401017761230468, 0.06391212844848633, 0.06395699310302734, 0.0646123504638672, 0.0644874267578125, 0.06414950561523437, 0.06510963439941406, 0.06422470092773437, 0.07518659210205078, 0.06450534057617187, 0.06406195068359374, 0.06460415649414063, 0.06399987030029297, 0.06354137420654297, 0.06339567947387695, 0.06402655792236328, 0.06361110305786133, 0.06428262329101563, 0.06396723175048828, 0.06374195098876953, 0.06334678268432617, 0.06427638244628907, 0.0638397102355957, 0.06467763519287109, 0.06433052825927735, 0.064036865234375, 0.06418637084960938, 0.0637317771911621, 0.06471612548828125, 0.06420950317382812, 0.06381913757324219, 0.06381427383422851, 0.06424371337890625, 0.06479180908203125, 0.06419942474365234, 0.06384844970703125, 0.06359878540039063, 0.06426809692382812, 0.06408601379394531, 0.06441983795166016, 0.06430643463134765, 0.06385283279418945, 0.06432975769042969, 0.0639799690246582, 0.06444009399414062, 0.06412310028076172, 0.06381296157836915, 0.06418294525146484, 0.06475971221923828, 0.06495552062988282, 0.06438114929199219, 0.06378726577758789, 0.06422742462158203, 0.0644758071899414, 0.06495945739746094, 0.0643755874633789, 0.06413648223876953, 0.06489775848388672, 0.06409625244140625, 0.06494380950927735, 0.06434425354003906, 0.06388748931884766, 0.06459091186523437, 0.06418118286132812, 0.06499737548828124, 0.06451824188232422, 0.06576118469238282, 0.06392217636108398, 0.06448025512695313, 0.06399897766113281, 0.0642007064819336, 0.07526537322998048, 0.06485635375976563, 0.06396556854248046, 0.06350848007202148, 0.0634224624633789, 0.06410240173339844, 0.06363919830322265, 0.06352262496948242, 0.0630338897705078, 0.064104736328125, 0.06366547012329102, 0.06352278518676757, 0.06342873764038086, 0.06422278594970703, 0.06473554992675781, 0.06542176055908203, 0.06533734130859375, 0.06435750579833985, 0.06411148834228515, 0.06333235168457031, 0.06362863922119141, 0.06534210968017579, 0.06421702575683594, 0.06390313720703125, 0.06368483352661133, 0.06379286575317383, 0.0641370849609375, 0.06380563354492187, 0.06349177551269532, 0.06483197021484376, 0.06454118347167968, 0.0647759017944336, 0.06444624328613281, 0.06436637115478516, 0.06458211517333984, 0.06432998657226563, 0.06473113250732422, 0.06507315063476563, 0.06415315246582032, 0.06381232070922851, 0.06364950561523437, 0.06436204528808594, 0.06400249481201171, 0.06389775848388672, 0.06392345428466797, 0.06458428955078124, 0.06442915344238281, 0.06457027435302734, 0.06465945434570312, 0.0645010528564453, 0.06423126220703125, 0.06476886749267578, 0.06454476928710938, 0.06442598724365234, 0.06395465469360352, 0.06388281631469726, 0.0648259506225586, 0.06449779510498047, 0.06388121414184571, 0.06503801727294922, 0.06453414154052735, 0.06417052459716797, 0.06414498901367187]",tokens/s,15.558491663925528,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,821.194752,5878.185984,0.0,5475.663872,5452.374016,s,1,16.393779296875,16.393779296875,0.0,16.393779296875,16.393779296875,16.393779296875,16.393779296875,[16.393779296875],,kWh,0.0002615456428374955,2.8842998792134644e-05,8.614840225200371e-05,0.0003765370438816338,,MB,1346.678784,6475.874304,0.0,6060.76928,5886.76096,s,10,13.266024658203124,1.3266024658203124,0.00817306123555496,1.3283225708007813,1.3348250488281248,1.3351523315429688,1.3354141577148437,"[1.305518310546875, 1.3205419921875, 1.324936279296875, 1.327226806640625, 1.327194091796875, 1.3305157470703124, 1.330441162109375, 1.3294183349609374, 1.3347523193359374, 1.3354796142578125]",tokens/s,192.97416264163274,kWh,3.869953485416659e-05,4.268053703678933e-06,2.5772381729000094e-05,6.873997028684561e-05,tokens/kWh,3724179.6720559434,MB,1402.986496,6494.748672,0.0,6077.546496,5886.76352,s,10,61.269902343750005,6.1269902343750005,0.020077264303914755,6.1324619140625,6.1482697265625,6.149683447265625,6.150814423828125,"[6.094228515625, 6.0989755859375, 6.10583740234375, 6.11919384765625, 6.12966064453125, 6.13526318359375, 6.1435751953125, 6.144115234375, 6.14795556640625, 6.15109716796875]",tokens/s,10.282373170197566,kWh,0.00018015086496166795,1.9872084053081297e-05,0.00011954773452700075,0.00031957068354175003,tokens/kWh,197139.48507973642,,s,630,61.266048393249505,0.09724769586230081,0.0017441477570986745,0.09697465515136719,0.09829068832397461,0.09898698768615723,0.10839793655395508,"[0.10948473358154297, 0.09633171081542968, 0.09537747192382813, 0.0951961898803711, 0.09586534118652344, 0.09516876983642578, 0.09527932739257812, 0.09605120086669922, 0.09520508575439453, 0.09703657531738281, 0.09871673583984375, 0.09683769226074218, 0.09587599945068359, 0.09551407623291015, 0.09578694152832032, 0.0965146255493164, 0.09560671997070312, 0.0955230712890625, 0.09553673553466797, 0.09688480377197266, 0.09747452545166016, 0.09727954864501953, 0.09647952270507812, 0.09580079650878906, 0.09650838470458985, 0.09676131439208985, 0.09566121673583984, 0.09632278442382812, 0.0954537582397461, 0.09678348541259765, 0.09722150421142578, 0.09699533081054687, 0.09648127746582032, 0.09621011352539062, 0.09674630737304687, 0.09672297668457032, 0.0966308135986328, 0.09608390045166015, 0.09827532958984375, 0.09627648162841797, 0.0968826904296875, 0.09806553649902344, 0.0966843490600586, 0.09653897857666016, 0.09673923492431641, 0.09622150421142578, 0.09669222259521484, 0.09675981140136719, 0.09628428649902344, 0.09688893127441406, 0.09666941070556641, 0.0975406723022461, 0.09663597106933594, 0.09674352264404297, 0.09686307525634766, 0.096359619140625, 0.09718867492675781, 0.09695174407958984, 0.0972191390991211, 0.0960997085571289, 0.09700621032714844, 0.09722061157226562, 0.09763212585449219, 0.10844009399414062, 0.0962542724609375, 0.09534636688232422, 0.0951357421875, 0.09601433563232421, 0.09526461029052734, 0.09611222076416015, 0.09530413055419922, 0.09534272003173828, 0.09785958099365234, 0.09873331451416016, 0.09724198150634765, 0.09654179382324218, 0.09572380828857421, 0.09630719757080078, 0.09636710357666016, 0.09537712097167969, 0.09519868469238281, 0.09613533020019531, 0.09694070434570312, 0.09732915496826172, 0.0974111328125, 0.0966429443359375, 0.09700495910644531, 0.09640950775146484, 0.09690959930419922, 0.09573014068603515, 0.09609561920166015, 0.0963792953491211, 0.09653600311279296, 0.09705961608886719, 0.09694364929199219, 0.09665734100341797, 0.09768399810791016, 0.09648332977294923, 0.09589759826660156, 0.09564774322509766, 0.09670861053466796, 0.09716941070556641, 0.09648118591308594, 0.09634729766845704, 0.09705929565429687, 0.09704496002197266, 0.09806028747558594, 0.0965466537475586, 0.09602473449707032, 0.09647929382324219, 0.0970212173461914, 0.09749152374267578, 0.09673123168945312, 0.09673113250732422, 0.09676390075683594, 0.09750118255615234, 0.0973803482055664, 0.09714892578125, 0.09668589019775391, 0.09633219146728515, 0.0968578872680664, 0.09697264099121093, 0.09752387237548828, 0.09712435150146484, 0.09676595306396485, 0.09719091033935547, 0.10916230773925781, 0.096159423828125, 0.09530419158935546, 0.0953733139038086, 0.09567362976074219, 0.09525321960449219, 0.09607504272460937, 0.09525091552734374, 0.09617814636230469, 0.09899199676513672, 0.09906361389160157, 0.09722886657714844, 0.09629955291748046, 0.0959283218383789, 0.09582796478271484, 0.0956924819946289, 0.09610006713867188, 0.09645254516601562, 0.09575247955322265, 0.09775676727294921, 0.0982240982055664, 0.09737503814697265, 0.09679170989990235, 0.09639161682128906, 0.09565135955810547, 0.0955889892578125, 0.09577894592285156, 0.09669817352294922, 0.09630137634277344, 0.09713459014892578, 0.0974725112915039, 0.09753785705566406, 0.09751776123046875, 0.09696460723876953, 0.09624781036376953, 0.09652838134765625, 0.09567167663574219, 0.09647494506835938, 0.09654115295410157, 0.09706940460205078, 0.09690930938720703, 0.09719916534423828, 0.09757539367675781, 0.0982462387084961, 0.09662509155273437, 0.09603705596923828, 0.09651455688476562, 0.09679952239990235, 0.0964085464477539, 0.09688829040527344, 0.09716304016113281, 0.09724390411376953, 0.09724079895019531, 0.09826707458496094, 0.09738275146484375, 0.09670854187011718, 0.09629523468017578, 0.09680255889892578, 0.09669017791748047, 0.0971673583984375, 0.09744121551513672, 0.09703890991210938, 0.09730194854736328, 0.10970323181152344, 0.0963420181274414, 0.09554534149169921, 0.0953957748413086, 0.09542572784423828, 0.09539891052246094, 0.09548786926269531, 0.09551033782958984, 0.09533663940429687, 0.09931766510009765, 0.09935881805419922, 0.09743360137939452, 0.09638861083984375, 0.09554585266113282, 0.09541836547851562, 0.0954224624633789, 0.0954429473876953, 0.09649667358398438, 0.09630985260009765, 0.09809356689453125, 0.1000771484375, 0.097612060546875, 0.0968089599609375, 0.09653187561035156, 0.09663935852050781, 0.09591420745849609, 0.09568460845947266, 0.09571721649169922, 0.09697296142578125, 0.097842529296875, 0.09790534210205078, 0.0973839340209961, 0.09692797088623047, 0.09683094024658204, 0.09720057678222656, 0.09648982238769531, 0.09583821105957031, 0.096635009765625, 0.09688057708740234, 0.09779795074462891, 0.09781468963623047, 0.09748000335693359, 0.09722537231445312, 0.09746147155761718, 0.09757299041748047, 0.09714466857910156, 0.09693881225585937, 0.09658303833007813, 0.0968853759765625, 0.09726134490966797, 0.09804950714111328, 0.09793596649169922, 0.09771965026855468, 0.09734432220458984, 0.09759334564208984, 0.0968845443725586, 0.09684786987304687, 0.09698528289794922, 0.0970645751953125, 0.0979922866821289, 0.09744652557373047, 0.09774899291992188, 0.0977716827392578, 0.10753228759765625, 0.09636659240722656, 0.09537741088867188, 0.09540198516845703, 0.09533030700683594, 0.09631887817382813, 0.09565811157226563, 0.09539222717285156, 0.09616515350341796, 0.10011865234375, 0.09999526214599609, 0.09775794982910156, 0.09649993896484375, 0.09575628662109376, 0.09596018981933593, 0.09693478393554687, 0.09589462280273438, 0.09642691040039063, 0.09637888336181641, 0.09835110473632812, 0.09863314819335937, 0.09764640045166016, 0.09682816314697265, 0.0976275863647461, 0.09656787109375, 0.09580134582519531, 0.09595903778076172, 0.09683932495117188, 0.09748038482666016, 0.09795855712890625, 0.09801439666748046, 0.09776156616210938, 0.09720889282226562, 0.09724720001220703, 0.09691337585449218, 0.09663491058349609, 0.09633542633056641, 0.09672707366943359, 0.09755264282226563, 0.09794764709472656, 0.0988325424194336, 0.09774899291992188, 0.09694643402099609, 0.09693084716796875, 0.09700409698486329, 0.09685826873779296, 0.09742777252197266, 0.09671443176269531, 0.09692124938964844, 0.09774281311035156, 0.09796015930175782, 0.09811984252929687, 0.09751551818847656, 0.09746022033691407, 0.09707305908203125, 0.09668438720703125, 0.09714252471923829, 0.09717660522460937, 0.0983927001953125, 0.0985255355834961, 0.09701376342773438, 0.0979078369140625, 0.09785993957519531, 0.1082947235107422, 0.09629119873046875, 0.09549404907226562, 0.09593228912353516, 0.096112060546875, 0.09632009887695313, 0.09629695892333984, 0.09635862731933593, 0.09658303833007813, 0.09923529815673827, 0.09862374114990234, 0.09722029113769531, 0.09635533142089844, 0.09677161407470704, 0.09671708679199219, 0.09654812622070312, 0.09634703826904296, 0.09597500610351563, 0.09689129638671876, 0.09902067565917969, 0.09902025604248046, 0.0970093765258789, 0.09647539520263672, 0.09677689361572266, 0.09674060821533204, 0.09684659576416016, 0.09696361541748047, 0.09637372589111329, 0.09716969299316407, 0.09796150207519531, 0.09797634887695313, 0.09786998748779296, 0.09833267211914062, 0.0965665283203125, 0.09612671661376954, 0.09674034881591796, 0.09695846557617188, 0.09702588653564453, 0.09717775726318359, 0.09777766418457032, 0.09764214324951172, 0.09789679718017578, 0.09792675018310547, 0.0976531524658203, 0.0974244155883789, 0.096623779296875, 0.0965050277709961, 0.09698162841796874, 0.09764454650878907, 0.09780838775634766, 0.09773590087890625, 0.09795231628417969, 0.09746249389648437, 0.09766092681884765, 0.09766297912597656, 0.09702188873291015, 0.09776306915283203, 0.09684614562988281, 0.09740614318847657, 0.09735485076904297, 0.09807622528076172, 0.09834233856201172, 0.09821625518798828, 0.10884236907958984, 0.09653254699707031, 0.09577734375, 0.09634809875488282, 0.0962314910888672, 0.09634815979003906, 0.09623142242431641, 0.09638057708740234, 0.09607804870605469, 0.09934246063232421, 0.09985574340820312, 0.09739942169189453, 0.09669010925292969, 0.09726956939697265, 0.09634226989746093, 0.0963768310546875, 0.0964070053100586, 0.09648585510253906, 0.09684393310546875, 0.09838787078857422, 0.09852067565917969, 0.0973431396484375, 0.09737644958496093, 0.09739507293701172, 0.09670783996582032, 0.09676806640625, 0.09656204986572266, 0.09658573150634765, 0.09721590423583984, 0.09807868957519532, 0.09895171356201173, 0.09758528137207031, 0.09753517150878906, 0.0971046371459961, 0.09673030090332031, 0.09673097229003906, 0.09739475250244141, 0.09665039825439453, 0.09735523223876953, 0.09785359954833985, 0.09781276702880859, 0.09818860626220703, 0.09800755310058594, 0.09801939392089844, 0.09680847930908203, 0.09677871704101562, 0.09708454132080079, 0.09717030334472657, 0.09772361755371094, 0.0983560028076172, 0.097959228515625, 0.09792752075195313, 0.09758866882324219, 0.09765705871582031, 0.09718022155761719, 0.09736409759521485, 0.09827059173583984, 0.09689510345458985, 0.09726207733154296, 0.09804799652099609, 0.09781657409667968, 0.09843276977539063, 0.09821414184570312, 0.1080387191772461, 0.09615974426269532, 0.09638015747070312, 0.0962915802001953, 0.09578495788574219, 0.09633177947998046, 0.09604630279541015, 0.09634601593017578, 0.0962138900756836, 0.10005923461914062, 0.099706787109375, 0.09761084747314454, 0.09667603302001954, 0.09670114898681641, 0.09635020446777344, 0.09638483428955077, 0.09641149139404297, 0.09671670532226563, 0.09725138854980468, 0.09843103790283203, 0.09822444915771485, 0.09792451477050781, 0.09731305694580078, 0.09703011322021485, 0.09673702239990234, 0.09670918273925781, 0.0965220184326172, 0.09658803558349609, 0.09713664245605469, 0.09897081756591797, 0.0989151382446289, 0.09772032165527343, 0.09727302551269532, 0.09692982482910156, 0.09740367889404297, 0.09701376342773438, 0.09681715393066406, 0.09678848266601563, 0.09750326538085938, 0.09786297607421875, 0.09807657623291016, 0.09839868927001953, 0.09751126098632812, 0.09817935943603516, 0.0965346908569336, 0.09667747497558593, 0.09682911682128906, 0.09692601776123047, 0.09832879638671875, 0.09844528198242188, 0.09740310668945312, 0.09783910369873047, 0.09758704376220703, 0.0978023681640625, 0.09772201538085938, 0.09742578887939453, 0.09661644744873046, 0.09741926574707031, 0.09799696350097656, 0.09817411041259766, 0.09800141143798828, 0.09793145751953125, 0.09864150238037109, 0.10868415832519532, 0.09607711791992188, 0.09631814575195312, 0.09621222686767578, 0.09635667419433594, 0.09618406677246094, 0.09636048126220703, 0.0961104965209961, 0.09631756591796875, 0.09994099426269532, 0.10013699340820313, 0.09737161254882812, 0.09709737396240234, 0.09674345397949219, 0.09639561462402343, 0.09655753326416015, 0.09632947540283203, 0.09628697967529297, 0.09701376342773438, 0.09834194946289063, 0.09870636749267578, 0.09788127899169922, 0.09731267547607422, 0.0968194580078125, 0.096961181640625, 0.09771129608154297, 0.09650873565673829, 0.09659417724609375, 0.09702169799804687, 0.09798041534423828, 0.09893682861328125, 0.09887680053710937, 0.09690380859375, 0.09696051025390626, 0.09689907073974609, 0.09764873504638671, 0.09693536376953125, 0.09756902313232421, 0.09708124542236328, 0.09768582153320313, 0.09791875457763671, 0.09839971160888672, 0.09759552001953126, 0.09761650848388671, 0.09759273529052734, 0.09692240142822266, 0.09706448364257812, 0.09724479675292969, 0.0975202865600586, 0.09812713623046875, 0.09814908599853515, 0.09816694641113281, 0.09754182434082032, 0.09741737365722657, 0.09795993804931641, 0.09768537902832031, 0.09697634887695313, 0.0974649887084961, 0.09778339385986329, 0.09816687774658203, 0.09829817962646484, 0.09835311889648438, 0.09774697875976562, 0.10994163513183594, 0.09619840240478515, 0.09613951873779297, 0.09630735778808594, 0.0960428466796875, 0.09643177795410156, 0.0961146240234375, 0.09628912353515626, 0.09650796508789063, 0.10072166442871094, 0.10018924713134765, 0.097187744140625, 0.09657110595703125, 0.0963497314453125, 0.09672496032714843, 0.09682003021240235, 0.09643827056884766, 0.0962498550415039, 0.09700723266601563, 0.09929507446289063, 0.09900086212158203, 0.09800697326660156, 0.09733942413330078, 0.09643968200683593, 0.09632220458984375, 0.09644342041015624, 0.09686319732666016, 0.09695228576660156, 0.09746435546875, 0.0984035873413086, 0.09824537658691407, 0.09790873718261718, 0.09731890869140625, 0.09757071685791016, 0.09801526641845704, 0.09663410949707031, 0.09651872253417969, 0.09676416015625, 0.09789846038818359, 0.09816067504882812, 0.09898086547851563, 0.09828985595703126, 0.09718230438232422, 0.09736112213134765, 0.09749795532226563, 0.09713884735107423, 0.09700070190429687, 0.09804467010498047, 0.09702304077148438, 0.09786669158935547, 0.09810489654541016, 0.09920044708251953, 0.09755340576171875, 0.09781817626953125, 0.09757734680175781, 0.09678854370117188, 0.09700045013427734, 0.09751376342773438, 0.09766575622558593, 0.09813740539550782, 0.09826988983154297, 0.0993259506225586, 0.0975770263671875]",tokens/s,10.283019984514219,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,884.334592,657.391616,0.0,262.144,258.163712,s,1,8.331421875,8.331421875,0.0,8.331421875,8.331421875,8.331421875,8.331421875,[8.331421875],,kWh,2.553798450001068e-05,2.8098189440193495e-06,8.393062269990148e-06,3.6740865714020175e-05,,MB,1195.66336,758.054912,0.0,350.224384,317.820928,s,10,0.23482944107055664,0.023482944107055665,0.0001976525830758702,0.023503056526184084,0.02369461154937744,0.023746105670928955,0.023787300968170166,"[0.02336067199707031, 0.02362454414367676, 0.023585119247436524, 0.023522176742553712, 0.023483936309814452, 0.023074464797973634, 0.023319007873535157, 0.02379759979248047, 0.023378751754760743, 0.023683168411254882]",tokens/s,10901.529162311572,kWh,6.968638696897281e-07,7.68513307585397e-08,4.622689988257993e-07,1.2359841992740671e-06,tokens/kWh,207122388.90299484,MB,1229.115392,779.026432,0.0,371.195904,317.823488,s,10,11.461855712890623,1.1461855712890623,0.008816181875136324,1.14757421875,1.1575904418945313,1.1585895935058594,1.159388914794922,"[1.1345552978515625, 1.1515673828125, 1.1480777587890625, 1.157368408203125, 1.1485537109375, 1.1332515869140625, 1.1595887451171876, 1.1347701416015625, 1.1470706787109375, 1.147052001953125]",tokens/s,54.96492154332984,kWh,3.322266337739506e-05,3.6639666195696823e-06,1.38264368765738e-05,5.0713066873538534e-05,tokens/kWh,1242283.3775188746,,s,630,11.456464830398557,0.018184864810156444,0.00044804628817504525,0.0181625919342041,0.018491910934448245,0.018595945358276366,0.019085622653961187,"[0.01769340705871582, 0.018038816452026367, 0.017971168518066405, 0.017991199493408203, 0.017947359085083006, 0.01788697624206543, 0.01800396728515625, 0.018288576126098632, 0.01803225517272949, 0.01802729606628418, 0.01840905570983887, 0.018012224197387697, 0.017982528686523436, 0.017949600219726563, 0.017825056076049804, 0.017947519302368164, 0.017827007293701173, 0.01778521537780762, 0.017997919082641603, 0.017994304656982422, 0.017968128204345703, 0.018207487106323243, 0.017945920944213867, 0.017820255279541015, 0.017889280319213868, 0.017879039764404296, 0.018149375915527344, 0.017958015441894532, 0.01788812828063965, 0.01796847915649414, 0.018225248336791993, 0.018433631896972655, 0.018410303115844726, 0.01839529609680176, 0.018440000534057616, 0.018106399536132814, 0.018067615509033203, 0.01818934440612793, 0.018217983245849608, 0.01787696075439453, 0.018159839630126955, 0.01795465660095215, 0.018069440841674805, 0.017939647674560546, 0.018033567428588866, 0.01786979293823242, 0.017748928070068358, 0.017718719482421874, 0.017820064544677734, 0.017721248626708985, 0.017753536224365235, 0.017774816513061523, 0.017752351760864257, 0.01766383934020996, 0.01775868797302246, 0.017811679840087892, 0.01778659248352051, 0.018026399612426757, 0.017965024948120117, 0.01808211135864258, 0.018124671936035158, 0.018335071563720703, 0.01846134376525879, 0.017756351470947264, 0.018103103637695312, 0.018481151580810547, 0.017893375396728514, 0.017821695327758787, 0.017915903091430666, 0.01804697608947754, 0.018247840881347656, 0.01840320014953613, 0.018009727478027343, 0.017759904861450196, 0.017966880798339843, 0.017768768310546874, 0.01819926452636719, 0.018120576858520508, 0.018160991668701172, 0.018094751358032228, 0.01834966468811035, 0.01807401657104492, 0.018218399047851563, 0.01812950325012207, 0.018081855773925782, 0.018442432403564454, 0.018593536376953126, 0.018255872726440428, 0.01844633674621582, 0.017954944610595703, 0.017932159423828125, 0.017983104705810545, 0.017885568618774415, 0.01828873634338379, 0.017960832595825194, 0.01847260856628418, 0.01811289596557617, 0.018194463729858397, 0.01818623924255371, 0.018184127807617186, 0.01797228813171387, 0.01801254463195801, 0.018409631729125978, 0.018345855712890626, 0.018516544342041016, 0.018367712020874023, 0.018385887145996094, 0.018453664779663086, 0.018451103210449217, 0.01857161521911621, 0.018523807525634765, 0.01840127944946289, 0.01848121643066406, 0.01849955177307129, 0.018532320022583006, 0.018710527420043945, 0.018792448043823243, 0.01849888038635254, 0.018476959228515624, 0.018921375274658203, 0.01862499237060547, 0.018557600021362305, 0.018480640411376953, 0.018520320892333984, 0.01856060791015625, 0.01842838478088379, 0.018151424407958985, 0.01849113655090332, 0.018448640823364258, 0.018409023284912108, 0.018479551315307617, 0.01830297660827637, 0.018381856918334962, 0.01866032028198242, 0.018595680236816407, 0.018452640533447265, 0.018539648056030273, 0.018402175903320314, 0.01840947151184082, 0.018548959732055663, 0.018510751724243164, 0.018617311477661134, 0.018536352157592775, 0.018366592407226563, 0.01848512077331543, 0.018381919860839844, 0.01818281555175781, 0.017939744949340822, 0.018190399169921875, 0.018172832489013673, 0.01803264045715332, 0.018093631744384765, 0.018022335052490235, 0.01802627182006836, 0.01808697509765625, 0.018110111236572267, 0.017876575469970703, 0.017902048110961914, 0.017858495712280275, 0.017989023208618164, 0.018666048049926758, 0.01835580825805664, 0.018155904769897462, 0.018155584335327147, 0.01822329521179199, 0.018187904357910158, 0.01865292739868164, 0.01842835235595703, 0.018290016174316408, 0.01867024040222168, 0.018869760513305665, 0.01801849555969238, 0.018071008682250977, 0.018216096878051757, 0.017823423385620117, 0.01788217544555664, 0.017777599334716798, 0.017864704132080078, 0.017819648742675782, 0.017958656311035156, 0.017856000900268554, 0.01780950355529785, 0.017775487899780274, 0.017870624542236327, 0.018104320526123048, 0.018179424285888673, 0.018000736236572265, 0.018083776473999023, 0.018145151138305664, 0.018109888076782227, 0.018170080184936523, 0.018233823776245116, 0.018835552215576173, 0.01825564765930176, 0.018464799880981445, 0.01833977508544922, 0.018152576446533203, 0.01800284767150879, 0.017954975128173827, 0.018294271469116212, 0.01780678367614746, 0.017770816802978515, 0.01774444770812988, 0.018429983139038087, 0.017922048568725587, 0.017903039932250977, 0.017905599594116212, 0.01797385597229004, 0.017799200057983397, 0.017733600616455077, 0.018282751083374023, 0.017780511856079102, 0.018292736053466797, 0.018685951232910156, 0.018441503524780273, 0.01809619140625, 0.018069183349609375, 0.018351423263549806, 0.018107679367065428, 0.01826464080810547, 0.018115936279296876, 0.018080255508422852, 0.018157855987548828, 0.018670944213867186, 0.01872496032714844, 0.01848067283630371, 0.018395967483520508, 0.01812879943847656, 0.01813811111450195, 0.018039392471313476, 0.01806502342224121, 0.01807436752319336, 0.01800998306274414, 0.017960159301757813, 0.017924543380737304, 0.018096607208251955, 0.018126880645751953, 0.01795187187194824, 0.017927040100097658, 0.0179182071685791, 0.018015167236328126, 0.020692800521850584, 0.01842870330810547, 0.018358015060424806, 0.018285343170166016, 0.018300607681274415, 0.018301984786987305, 0.018546911239624025, 0.020617984771728517, 0.02458624076843262, 0.018993152618408202, 0.018486623764038087, 0.017826784133911134, 0.018275520324707032, 0.01834441566467285, 0.01838425636291504, 0.0182893123626709, 0.01842835235595703, 0.018463903427124024, 0.018419519424438476, 0.018506528854370118, 0.01847859191894531, 0.01834566307067871, 0.018264480590820312, 0.018270336151123046, 0.018298879623413086, 0.01827155113220215, 0.018303871154785156, 0.01827235221862793, 0.018230815887451172, 0.018276832580566407, 0.018300224304199218, 0.018291391372680665, 0.018304927825927735, 0.018415712356567384, 0.018372608184814454, 0.01859584045410156, 0.018519872665405272, 0.01828022384643555, 0.018383487701416016, 0.018462303161621094, 0.01836684799194336, 0.018377920150756837, 0.018284223556518556, 0.018432416915893556, 0.018306623458862303, 0.01838960075378418, 0.01828656005859375, 0.01829724884033203, 0.018364416122436524, 0.018328863143920897, 0.018350751876831054, 0.01839849662780762, 0.018403711318969725, 0.018552928924560546, 0.018461055755615234, 0.018219104766845705, 0.018132831573486326, 0.0180849609375, 0.018006431579589845, 0.01835468864440918, 0.018354175567626953, 0.01807356834411621, 0.018062559127807618, 0.017865535736083984, 0.017738752365112305, 0.01780393600463867, 0.017754655838012695, 0.017767679214477538, 0.017657920837402342, 0.017705472946166992, 0.017663999557495116, 0.017770719528198243, 0.017856096267700194, 0.01772163200378418, 0.01734275245666504, 0.018203456878662108, 0.017975296020507812, 0.017812160491943358, 0.017941791534423827, 0.01769375991821289, 0.017700735092163085, 0.017712703704833986, 0.01780726432800293, 0.017840511322021486, 0.017826303482055664, 0.017915552139282226, 0.01802979278564453, 0.018282527923583983, 0.018131711959838866, 0.01870470428466797, 0.01816339111328125, 0.01802444839477539, 0.018506784439086914, 0.017996768951416015, 0.018355871200561525, 0.018198751449584962, 0.017879487991333008, 0.017880256652832032, 0.01791811180114746, 0.01764182472229004, 0.017649824142456055, 0.018350112915039064, 0.01896780776977539, 0.01784480094909668, 0.01785241508483887, 0.01789030456542969, 0.01796112060546875, 0.01816569519042969, 0.018039712905883788, 0.017957984924316408, 0.018069408416748048, 0.018043392181396483, 0.01811097526550293, 0.018145503997802733, 0.018328895568847658, 0.01808252716064453, 0.017961984634399415, 0.01785116767883301, 0.01771321678161621, 0.01761622428894043, 0.017723968505859375, 0.01760665512084961, 0.0176312313079834, 0.01757798385620117, 0.017563648223876953, 0.017737247467041015, 0.01774991989135742, 0.0178156795501709, 0.018004447937011718, 0.019564512252807618, 0.01809619140625, 0.01793222427368164, 0.017922048568725587, 0.017918239593505858, 0.017821407318115233, 0.017894399642944335, 0.018076671600341796, 0.0177096004486084, 0.018045055389404298, 0.01794892883300781, 0.018273344039916994, 0.0182260799407959, 0.018186431884765625, 0.018607168197631835, 0.01838345527648926, 0.01839689636230469, 0.018444095611572266, 0.018524768829345704, 0.018239391326904296, 0.01838703918457031, 0.018227071762084962, 0.018337919235229493, 0.018307071685791015, 0.018126047134399415, 0.01802329635620117, 0.01862646484375, 0.01812895965576172, 0.018005823135375975, 0.018091295242309572, 0.018121536254882813, 0.018019424438476563, 0.01793734359741211, 0.0178787841796875, 0.01794256019592285, 0.018090208053588866, 0.018085599899291992, 0.018186527252197264, 0.01844540786743164, 0.018545568466186522, 0.01833091163635254, 0.018250463485717773, 0.01828668785095215, 0.018319263458251953, 0.018403135299682617, 0.01912339210510254, 0.01840015983581543, 0.018466911315917968, 0.018394912719726562, 0.018311391830444335, 0.018470943450927733, 0.018417631149291992, 0.018466304779052735, 0.018345567703247072, 0.018347999572753907, 0.018384159088134764, 0.018407072067260742, 0.018435232162475584, 0.018370752334594728, 0.018206943511962892, 0.018423967361450196, 0.018274911880493162, 0.01835100746154785, 0.018342784881591797, 0.018419488906860352, 0.02021107292175293, 0.022358943939208984, 0.018523967742919922, 0.018596031188964843, 0.01852604866027832, 0.018341888427734376, 0.018132543563842772, 0.018364864349365233, 0.01845248031616211, 0.018357887268066406, 0.018315168380737306, 0.018465248107910157, 0.01833123207092285, 0.01829478454589844, 0.018306528091430664, 0.01814860725402832, 0.01808729553222656, 0.018009727478027343, 0.017937088012695314, 0.017721343994140625, 0.017696767807006835, 0.017688575744628905, 0.01769267272949219, 0.017889280319213868, 0.017735103607177734, 0.017760032653808593, 0.017750751495361327, 0.017968223571777343, 0.017849279403686524, 0.017786720275878905, 0.01779840087890625, 0.017646528244018553, 0.017729631423950197, 0.017679391860961916, 0.01776278305053711, 0.017718879699707032, 0.017775136947631835, 0.017823808670043944, 0.017750240325927733, 0.017734912872314452, 0.01780790328979492, 0.018024799346923828, 0.018207679748535155, 0.01813395118713379, 0.018161535263061523, 0.018161792755126954, 0.01802239990234375, 0.017879039764404296, 0.017790016174316407, 0.017719968795776368, 0.017815839767456054, 0.01818828773498535, 0.018121919631958007, 0.018111488342285157, 0.01817350387573242, 0.018432256698608398, 0.017989023208618164, 0.017938880920410155, 0.018484512329101564, 0.017934656143188475, 0.01795907211303711, 0.01817622375488281, 0.018341951370239258, 0.01822265625, 0.018207456588745115, 0.018042560577392577, 0.018071456909179686, 0.018077951431274414, 0.017912832260131836, 0.017520832061767577, 0.017829120635986326, 0.017867103576660156, 0.018158367156982422, 0.018278432846069337, 0.01805308723449707, 0.01814089584350586, 0.018397279739379883, 0.018657184600830077, 0.018361440658569338, 0.018383583068847655, 0.018300960540771485, 0.01830694389343262, 0.01834124755859375, 0.01839606475830078, 0.01824176025390625, 0.018058687210083007, 0.01803094482421875, 0.017928192138671875, 0.01811187171936035, 0.017887872695922853, 0.017954336166381837, 0.017799135208129882, 0.01789148712158203, 0.01803081512451172, 0.018039039611816406, 0.018347391128540037, 0.018573535919189452, 0.017994239807128908, 0.0180467529296875, 0.018268159866333008, 0.01821900749206543, 0.018179264068603516, 0.018279327392578124, 0.018263967514038085, 0.018247903823852538, 0.018475999832153322, 0.018413503646850585, 0.018299776077270506, 0.01829395294189453, 0.01827465629577637, 0.018355743408203125, 0.018383808135986328, 0.018433759689331055, 0.018155807495117186, 0.01822719955444336, 0.01813929557800293, 0.018208608627319336, 0.01836851119995117, 0.01819171142578125, 0.018233535766601562, 0.018231008529663088, 0.01807436752319336, 0.0182043514251709, 0.018065536499023437, 0.01804243278503418, 0.018252544403076172, 0.018210687637329102, 0.018126848220825196, 0.01824723243713379, 0.018319807052612304, 0.01835139274597168, 0.018503423690795898, 0.01827299118041992, 0.01864521598815918, 0.018702367782592773, 0.018557056427001953, 0.018509695053100586, 0.018479103088378905, 0.01848431968688965, 0.01841878318786621, 0.018443552017211914, 0.018407968521118163, 0.01831020736694336, 0.01837766456604004, 0.018507776260375978, 0.01840127944946289, 0.01838051223754883, 0.018469152450561525, 0.01837980842590332, 0.018364864349365233, 0.018401567459106444, 0.018364479064941406, 0.01824563217163086, 0.01837094306945801, 0.01830019187927246, 0.018290719985961913, 0.01835856056213379, 0.018415872573852538, 0.01844220733642578, 0.018757631301879883, 0.018315263748168945, 0.018427711486816406, 0.018386463165283203, 0.0184550724029541, 0.018435583114624024, 0.01851587104797363, 0.018406112670898436, 0.0183985595703125, 0.01884774398803711, 0.01844291114807129, 0.018275583267211914, 0.018119232177734375, 0.018002111434936522, 0.01790332794189453, 0.017885471343994142, 0.017893375396728514, 0.01783782386779785, 0.017832191467285156, 0.017674240112304687, 0.017749343872070313, 0.017754079818725586, 0.017625791549682617, 0.017657855987548828, 0.017573312759399416, 0.017813568115234376, 0.01793280029296875, 0.017942527770996093, 0.01780531120300293, 0.01769584083557129, 0.018309696197509766, 0.017867103576660156, 0.01776639938354492, 0.017615936279296876, 0.017666080474853515, 0.017697471618652344]",tokens/s,54.9907854933015,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1051.111424,5046.730752,0.0,4651.483136,4638.22848,s,1,14.167484375,14.167484375,0.0,14.167484375,14.167484375,14.167484375,14.167484375,[14.167484375],,kWh,0.00020650797157087102,2.2772040955236278e-05,7.706478387400906e-05,0.0003063447964001164,,MB,1251.110912,6172.901376,0.0,5765.070848,5418.661888,s,10,10.221631225585938,1.0221631225585939,0.007044564666882446,1.0222241821289062,1.0298222412109377,1.0307274047851562,1.0314515356445313,"[1.0081405029296875, 1.0177132568359375, 1.017791259765625, 1.0163858642578125, 1.019808837890625, 1.0292550048828124, 1.0246395263671875, 1.02962109375, 1.026643310546875, 1.031632568359375]",tokens/s,250.44926230482866,kWh,2.973801881458409e-05,3.2795504039942414e-06,1.9803376953803075e-05,5.282094617238141e-05,tokens/kWh,4846562.179415393,MB,1274.10176,6172.901376,0.0,5765.070848,5418.664448,s,10,52.74841650390624,5.274841650390624,0.017688636116568155,5.276070068359376,5.29464609375,5.295215625,5.29567125,"[5.24542919921875, 5.252671875, 5.26422802734375, 5.2634521484375, 5.2657001953125, 5.28643994140625, 5.2933076171875, 5.2868828125, 5.29578515625, 5.29451953125]",tokens/s,11.943486492212442,kWh,0.0001548616572941675,1.708246161493997e-05,0.00010273677663379388,0.00027468089554290136,tokens/kWh,229357.05038925895,,s,630,52.74474177551269,0.08372181234208365,0.0016123843966272529,0.08350317001342773,0.08451623992919921,0.08499251632690429,0.09415560745239258,"[0.09373286437988282, 0.08188425445556641, 0.08277494049072266, 0.08255078125, 0.08146883392333984, 0.08222502136230468, 0.08254438018798828, 0.08226850891113281, 0.08222566223144531, 0.08239523315429688, 0.08211869049072265, 0.08367922973632813, 0.0836648941040039, 0.08332864379882812, 0.083159423828125, 0.08293990325927734, 0.08295219421386718, 0.08243199920654297, 0.08239513397216797, 0.08242572784423828, 0.0826103057861328, 0.08262973022460937, 0.08279535675048828, 0.08468281555175781, 0.08369971466064453, 0.08407360076904297, 0.08286707305908203, 0.08282931518554687, 0.08255030059814453, 0.08345600128173829, 0.08336022186279297, 0.08388960266113281, 0.08249606323242188, 0.08229865264892579, 0.0836171875, 0.08336380767822266, 0.08358521270751954, 0.08457417297363282, 0.0827992935180664, 0.08306278228759766, 0.0833062744140625, 0.08291964721679687, 0.08293888092041016, 0.08280323028564453, 0.08288508605957032, 0.083378173828125, 0.08305628967285156, 0.08392124938964844, 0.08400272369384766, 0.08334524536132812, 0.08345420837402344, 0.0834326400756836, 0.08384288024902344, 0.08277693176269531, 0.08317938995361328, 0.08288591766357421, 0.08290406036376953, 0.08331257629394531, 0.08328195190429688, 0.08376937866210937, 0.0842260513305664, 0.08376028442382813, 0.08324585723876952, 0.09474403381347657, 0.08245712280273437, 0.0822838363647461, 0.08258834838867188, 0.08222675323486328, 0.08281951904296875, 0.08237184143066406, 0.08274406433105469, 0.08244838714599609, 0.08250326538085938, 0.08283123016357422, 0.0852750701904297, 0.08408803558349609, 0.08409897613525391, 0.0830248031616211, 0.08250588989257812, 0.08279235076904297, 0.08234700775146485, 0.0827585906982422, 0.08258268737792969, 0.08262448120117187, 0.08277024078369141, 0.08378841400146485, 0.0840244140625, 0.08388819122314453, 0.08336653137207031, 0.08340838623046876, 0.08259248352050781, 0.08261734771728516, 0.08312934112548828, 0.08275113677978516, 0.08275593566894532, 0.0827146224975586, 0.0833628158569336, 0.0834420166015625, 0.0834464340209961, 0.08394927978515625, 0.08349314880371093, 0.08389990234375, 0.08259839630126953, 0.08308121490478515, 0.08254838562011718, 0.08253215789794922, 0.08416515350341797, 0.08279039764404297, 0.08401878356933594, 0.0833089599609375, 0.08357039642333984, 0.08373481750488282, 0.08353791809082031, 0.08342118072509766, 0.08315081787109375, 0.0827449951171875, 0.08274278259277344, 0.08333958435058594, 0.08254534149169922, 0.08296636962890624, 0.08361094665527344, 0.08418946838378906, 0.08419987487792968, 0.08418656158447266, 0.08418319702148437, 0.08361615753173827, 0.09531391906738282, 0.08266342163085938, 0.08249932861328126, 0.08223881530761719, 0.08286239624023438, 0.08204723358154296, 0.08270883178710937, 0.0821860122680664, 0.08229199981689453, 0.082430908203125, 0.08282521820068359, 0.08570230102539063, 0.08478755187988281, 0.08360108947753907, 0.08312223815917968, 0.08263814544677735, 0.0827742691040039, 0.08268051147460938, 0.0826060791015625, 0.08261631774902344, 0.08297779083251954, 0.08302285003662109, 0.08396364593505859, 0.08368339538574218, 0.08384121704101563, 0.08333516693115234, 0.08339250946044922, 0.0830750732421875, 0.08344985961914063, 0.08257855987548827, 0.08258854675292969, 0.0828006362915039, 0.08269209289550782, 0.08280604553222656, 0.08355289459228515, 0.08343116760253906, 0.08400931549072266, 0.0841195526123047, 0.0833597412109375, 0.08349612426757813, 0.08328479766845703, 0.08298601531982422, 0.08394786834716797, 0.08350131225585937, 0.08236444854736329, 0.08387824249267578, 0.08419020843505859, 0.08362432098388672, 0.08404188537597657, 0.08434121704101563, 0.08422156524658203, 0.08327926635742187, 0.08301372528076172, 0.08393119812011719, 0.08326172637939454, 0.08341078186035156, 0.0833148193359375, 0.08365318298339844, 0.08426290893554687, 0.08564326477050781, 0.08353177642822265, 0.08437904357910156, 0.0850370864868164, 0.093502685546875, 0.08262319946289062, 0.08272697448730469, 0.08185855865478515, 0.08269596862792969, 0.08230524444580078, 0.08299314880371093, 0.08247062683105469, 0.0826370849609375, 0.08252620697021484, 0.08317282867431641, 0.08695657348632813, 0.08456396484375, 0.0847093734741211, 0.0830525131225586, 0.08315702056884766, 0.08279596710205078, 0.08251853179931641, 0.08263276672363282, 0.08300112152099609, 0.08184435272216797, 0.08306288146972657, 0.0831448974609375, 0.08414803314208984, 0.08451622772216796, 0.08310198211669922, 0.08334166717529297, 0.08253440093994141, 0.08271459197998046, 0.08262659454345703, 0.08343551635742187, 0.08301158142089844, 0.08328524780273437, 0.08290787506103516, 0.08414825439453125, 0.08377497863769531, 0.08368998718261719, 0.08449219512939453, 0.0836278076171875, 0.08283168029785157, 0.08412764739990235, 0.08323193359375, 0.0832437744140625, 0.08290525054931641, 0.08354608154296875, 0.08372140502929687, 0.08433545684814453, 0.08415805053710937, 0.08418345642089843, 0.0841928939819336, 0.08401548767089843, 0.08317132568359376, 0.08376866912841797, 0.08383554840087891, 0.08307917022705077, 0.08286822509765625, 0.08324294281005859, 0.08347241973876954, 0.0833656997680664, 0.08403782653808593, 0.08403097534179688, 0.08350323486328125, 0.08395174407958984, 0.09424813079833984, 0.08263107299804688, 0.08235049438476562, 0.08203469085693359, 0.08314662170410156, 0.08288182067871094, 0.08285065460205078, 0.0826429443359375, 0.08223232269287109, 0.0826497573852539, 0.08272489929199218, 0.08669580841064453, 0.08457430267333985, 0.08375299072265625, 0.0823967056274414, 0.08287484741210938, 0.0827886734008789, 0.08164940643310546, 0.0830047378540039, 0.0823447036743164, 0.0831219482421875, 0.08300790405273438, 0.08436736297607422, 0.08464383697509766, 0.08426496124267578, 0.08397209930419922, 0.08342733001708984, 0.08272064208984375, 0.08317324829101562, 0.08349065399169922, 0.08281298828125, 0.08227996826171875, 0.08366073608398437, 0.08352243041992187, 0.08410050964355469, 0.08394608306884765, 0.08352687835693359, 0.08399542236328125, 0.08322361755371094, 0.08305350494384765, 0.08349081420898438, 0.08285903930664062, 0.08314115142822266, 0.0830665283203125, 0.08406095886230469, 0.08399052429199219, 0.08406416320800782, 0.08346633911132813, 0.08402329254150391, 0.08387789154052734, 0.08379801940917969, 0.08333055877685547, 0.08345241546630859, 0.08350310516357422, 0.083019775390625, 0.08354611206054688, 0.08403148651123046, 0.0842072296142578, 0.08413426971435548, 0.08406832122802735, 0.08382208251953124, 0.08403408050537109, 0.08356249237060547, 0.09464048004150391, 0.08275360107421875, 0.08243170928955078, 0.08230844879150391, 0.08279750061035156, 0.0830382080078125, 0.08288870239257813, 0.08281702423095703, 0.08288003540039063, 0.08283948516845703, 0.08289311981201172, 0.08619235229492188, 0.08539110565185547, 0.08424678039550781, 0.08404505920410156, 0.0832577896118164, 0.08309161376953125, 0.08295645141601563, 0.08341011047363281, 0.08302674865722656, 0.08274508666992188, 0.08358118438720703, 0.08449024200439453, 0.08418099212646485, 0.08396940612792969, 0.08395225524902344, 0.08351744079589844, 0.08359635162353515, 0.08263775634765624, 0.08344576263427735, 0.08328806304931641, 0.08296857452392578, 0.08333106994628907, 0.08411545562744141, 0.0843563232421875, 0.08447875213623048, 0.08434073638916016, 0.08426207733154296, 0.0841346206665039, 0.08322672271728515, 0.0828310089111328, 0.08379542541503907, 0.08343436431884765, 0.08353913879394531, 0.08412038421630859, 0.0849731216430664, 0.0843411865234375, 0.08442880249023438, 0.0841890869140625, 0.08400870513916016, 0.08481158447265626, 0.0839234848022461, 0.08333277130126954, 0.08311228942871093, 0.08377318572998046, 0.084500732421875, 0.08421689605712891, 0.08454351806640625, 0.08374291229248047, 0.084457763671875, 0.08406416320800782, 0.0841016616821289, 0.08527590179443359, 0.09516851043701172, 0.08265727996826172, 0.08262044525146485, 0.08278550720214843, 0.08243276977539063, 0.08337129974365234, 0.08266825866699219, 0.08340444946289062, 0.08307542419433593, 0.08304025268554688, 0.08301363372802735, 0.08621414184570313, 0.08516044616699218, 0.08294121551513672, 0.08338505554199219, 0.08289673614501954, 0.08354217529296876, 0.08328396606445312, 0.0830013427734375, 0.08354351806640625, 0.08281683349609376, 0.0836956787109375, 0.08421443176269532, 0.08514790344238281, 0.08464358520507813, 0.08396390533447265, 0.08329945373535157, 0.08472598266601562, 0.08347456359863281, 0.08437814331054687, 0.08338547515869141, 0.08357772827148438, 0.08367922973632813, 0.08362592315673828, 0.08453126525878907, 0.08423014068603515, 0.08380745697021484, 0.0846012191772461, 0.08380210876464844, 0.08429535675048828, 0.0836487045288086, 0.08342550659179687, 0.08331295776367187, 0.08421945953369141, 0.08322000122070312, 0.08412973022460937, 0.08420223999023438, 0.08401299285888672, 0.08435945892333985, 0.08454342651367187, 0.08477005004882812, 0.08379270172119141, 0.08424652862548829, 0.08394137573242187, 0.08443289947509766, 0.08412156677246094, 0.08451634979248047, 0.08407001495361328, 0.08514211273193359, 0.08414553833007812, 0.08437763214111328, 0.0839197769165039, 0.08428953552246093, 0.09392908477783203, 0.08289129638671874, 0.08263382720947265, 0.08307798767089844, 0.08284166717529297, 0.08329567718505859, 0.08275411224365234, 0.08307472229003907, 0.08289929962158203, 0.08225791931152343, 0.08451686096191406, 0.0861338882446289, 0.0846192626953125, 0.08428224182128906, 0.08369939422607423, 0.08298902130126953, 0.08269859313964843, 0.08315904235839844, 0.08288665771484376, 0.08291942596435548, 0.08283084869384766, 0.08316316986083984, 0.08372886657714844, 0.08420146942138672, 0.08523538970947266, 0.08371561431884765, 0.08368787384033204, 0.08320832061767579, 0.08395388793945313, 0.08356012725830078, 0.0835771484375, 0.08306278228759766, 0.08309283447265625, 0.08400086212158203, 0.08376582336425781, 0.08492237091064453, 0.08413129425048828, 0.08426140594482422, 0.08372153472900391, 0.08392774200439453, 0.08409907531738281, 0.08411750030517579, 0.08356249237060547, 0.08320745849609375, 0.08424505615234375, 0.08415001678466796, 0.08441680145263672, 0.08429376220703125, 0.084168701171875, 0.08416595458984374, 0.08390262603759766, 0.08414784240722656, 0.08365872192382813, 0.08387577819824218, 0.08397926330566406, 0.08385740661621094, 0.08415846252441406, 0.08427836608886718, 0.08441744232177735, 0.0841295394897461, 0.08408882904052735, 0.084216064453125, 0.0840847396850586, 0.09591603088378907, 0.08282316589355469, 0.08293170928955078, 0.08317747497558593, 0.08296857452392578, 0.0830135040283203, 0.08340902709960937, 0.08342323303222657, 0.08300482940673828, 0.08303472137451172, 0.08328601837158203, 0.08601805114746094, 0.08506813049316406, 0.08354166412353516, 0.08297792053222657, 0.08307392120361329, 0.08327372741699218, 0.08322866821289063, 0.08354611206054688, 0.0837201919555664, 0.08268185424804687, 0.08351129913330078, 0.08463884735107421, 0.08422284698486328, 0.08500838470458984, 0.08376525115966797, 0.08428749084472656, 0.08339584350585938, 0.08365033721923829, 0.08385635375976562, 0.0840327377319336, 0.08320899200439454, 0.08331791687011719, 0.08413654327392578, 0.08425286102294922, 0.08424419403076172, 0.08418748474121093, 0.0837918701171875, 0.0840002212524414, 0.08358326721191406, 0.08315110778808593, 0.08414733123779297, 0.08385330963134766, 0.08356047821044922, 0.08436208343505859, 0.084242431640625, 0.08447404479980469, 0.08486441802978516, 0.0841480941772461, 0.0841938247680664, 0.08421311950683594, 0.08370035552978515, 0.08382669067382813, 0.08425167846679688, 0.0842208023071289, 0.08402748870849609, 0.08433869171142579, 0.0840847396850586, 0.08571084594726562, 0.08439926147460937, 0.08427811431884766, 0.0842977294921875, 0.08384307098388671, 0.096570556640625, 0.08266819000244141, 0.08274524688720702, 0.08321663665771484, 0.08266130828857422, 0.0829573745727539, 0.08254566192626953, 0.08302735900878906, 0.0825898208618164, 0.08306121826171875, 0.08286402893066407, 0.08687567901611327, 0.08484921264648437, 0.08342095947265625, 0.08356204986572266, 0.08309622192382812, 0.08263597106933594, 0.08283993530273437, 0.08373462677001953, 0.08279821014404297, 0.08317001342773438, 0.08344739532470703, 0.08518492889404297, 0.08490937805175781, 0.08434758758544922, 0.08422182464599609, 0.08392867279052735, 0.08348694610595703, 0.08338361358642578, 0.08401769256591797, 0.08336227416992187, 0.0832034912109375, 0.08292601776123047, 0.08379804992675781, 0.0849420166015625, 0.0846447982788086, 0.08409420776367188, 0.08451148986816406, 0.08368128204345703, 0.08322649383544922, 0.08468252563476562, 0.08388028717041016, 0.08450048065185548, 0.08355760192871094, 0.0834543685913086, 0.08367926025390625, 0.08439228820800782, 0.08402861022949219, 0.08584889221191407, 0.08421785736083984, 0.08421171569824219, 0.08411254119873048, 0.08425965118408203, 0.08483229064941407, 0.08315833282470703, 0.08396819305419922, 0.08337664031982422, 0.08394054412841796, 0.08453987121582031, 0.08447216033935546, 0.08451891326904297, 0.0843345947265625, 0.08496742248535157]",tokens/s,11.94431859542223,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,824.438784,8493.334528,0.0,8143.241216,7829.257216,s,1,19.839845703125,19.839845703125,0.0,19.839845703125,19.839845703125,19.839845703125,19.839845703125,[19.839845703125],,kWh,0.00035861734559584874,3.9550651732650174e-05,0.00011898481741001721,0.0005171528147385161,,MB,1272.651776,9262.989312,0.0,8847.884288,8509.386752,s,10,17.330203735351564,1.733020373535156,0.008741147815637082,1.7342926635742186,1.7409916137695312,1.741107684326172,1.7412005407714843,"[1.7100670166015626, 1.7279404296875, 1.7330943603515625, 1.7305341796875, 1.7340433349609374, 1.7345419921875, 1.7378045654296874, 1.7412237548828124, 1.73998828125, 1.7409658203125]",tokens/s,147.71897890489907,kWh,5.059520843125104e-05,5.580261625930519e-06,3.359744354460015e-05,8.97729136017817e-05,tokens/kWh,2851639.650859223,MB,1305.120768,9262.989312,0.0,8847.884288,8509.389312,s,10,88.48499902343751,8.84849990234375,0.020112595712162527,8.854975585937499,8.86634208984375,8.868356103515625,8.869967314453126,"[8.800326171875, 8.83361328125, 8.8403603515625, 8.8380498046875, 8.8522236328125, 8.860541015625, 8.8577275390625, 8.86589453125, 8.865892578125, 8.8703701171875]",tokens/s,7.119850900751307,kWh,0.0002590323581825024,2.8572849366391717e-05,0.00017229608228119946,0.0004599012898300937,tokens/kWh,136985.91718078192,,s,630,88.4807718505861,0.14044566960410465,0.0016321696666458353,0.14024050140380862,0.14170250244140625,0.1422095634460449,0.14898177780151367,"[0.15213587951660157, 0.13840339660644532, 0.1371400909423828, 0.13891641235351562, 0.13776205444335937, 0.13861964416503905, 0.13968118286132813, 0.14189987182617186, 0.13912319946289062, 0.1382129669189453, 0.13916934204101564, 0.13824700927734376, 0.13871717834472655, 0.13931494140625, 0.14091903686523438, 0.14010563659667968, 0.13900592041015625, 0.1387439422607422, 0.13829670715332032, 0.13855807495117187, 0.13981491088867187, 0.1404041290283203, 0.1403705291748047, 0.13867213439941406, 0.13872537231445312, 0.13838131713867188, 0.13860453796386718, 0.1394913330078125, 0.14159075927734374, 0.14049871826171875, 0.14013031005859375, 0.13852639770507813, 0.13914259338378906, 0.1382265625, 0.1390960693359375, 0.1409835205078125, 0.14012086486816405, 0.1400052490234375, 0.1389443817138672, 0.13833445739746095, 0.1390850830078125, 0.1398873291015625, 0.1404231719970703, 0.14093107604980468, 0.14012211608886718, 0.1392801971435547, 0.139040771484375, 0.1384239044189453, 0.13891644287109375, 0.14069480895996095, 0.14092361450195312, 0.14021565246582032, 0.13932550048828124, 0.13918269348144532, 0.1386366729736328, 0.13999320983886718, 0.14140261840820312, 0.14035968017578124, 0.1404805145263672, 0.14019378662109375, 0.13963987731933594, 0.13947955322265626, 0.140233154296875, 0.14823405456542968, 0.13897987365722655, 0.13799139404296876, 0.1398464660644531, 0.1382392578125, 0.13832057189941407, 0.14088383483886718, 0.14189581298828124, 0.1397203826904297, 0.14004978942871094, 0.13960665893554688, 0.1384278106689453, 0.13877340698242188, 0.1412894744873047, 0.14117478942871095, 0.14085104370117188, 0.13962255859375, 0.13888674926757813, 0.1401327667236328, 0.13972889709472655, 0.13925897216796876, 0.14135321044921875, 0.14022518920898439, 0.13912237548828124, 0.139560546875, 0.14010032653808593, 0.1390913543701172, 0.14004838562011718, 0.1410498809814453, 0.13997666931152344, 0.13934632873535155, 0.1401612091064453, 0.14082386779785155, 0.13925447082519532, 0.13991116333007814, 0.14118092346191408, 0.14082432556152344, 0.13960832214355468, 0.14049830627441406, 0.1400482940673828, 0.13994671630859376, 0.139555908203125, 0.14091261291503906, 0.14067401123046874, 0.13897727966308593, 0.14103257751464843, 0.14030058288574218, 0.14151263427734376, 0.1400931854248047, 0.1410671691894531, 0.14087142944335937, 0.13989913940429688, 0.14036099243164063, 0.1400470733642578, 0.1408573455810547, 0.14004838562011718, 0.14024000549316407, 0.13983013916015624, 0.14023414611816407, 0.14066444396972655, 0.14093983459472656, 0.14013687133789063, 0.14088783264160157, 0.14823616027832032, 0.13923735046386718, 0.13941871643066406, 0.1383843231201172, 0.13918199157714845, 0.1383438720703125, 0.14169973754882811, 0.1421967315673828, 0.13954637145996093, 0.1399947509765625, 0.13792729187011718, 0.13949952697753906, 0.13932899475097657, 0.14086402893066408, 0.14176255798339843, 0.139739013671875, 0.1406231689453125, 0.13902093505859375, 0.13978848266601562, 0.1401200714111328, 0.1406273651123047, 0.14173776245117187, 0.13985670471191405, 0.14084281921386718, 0.13899288940429688, 0.13897340393066407, 0.1395736999511719, 0.1410358123779297, 0.1411788787841797, 0.14041702270507814, 0.14013235473632812, 0.14023680114746093, 0.13912416076660156, 0.1392767028808594, 0.1411790008544922, 0.14141238403320314, 0.1397834930419922, 0.13999276733398439, 0.14069453430175782, 0.13921414184570313, 0.13970649719238282, 0.1415460205078125, 0.14159228515625, 0.14001983642578125, 0.14022674560546874, 0.13975347900390625, 0.14076873779296875, 0.1388109130859375, 0.1416468505859375, 0.1413258514404297, 0.140179931640625, 0.1395240936279297, 0.14045184326171875, 0.13923942565917968, 0.14079493713378907, 0.14156486511230468, 0.14127513122558594, 0.14033692932128905, 0.14009980773925781, 0.14014192199707032, 0.1395145263671875, 0.14037762451171876, 0.14186134338378906, 0.15013641357421875, 0.1386072998046875, 0.13801446533203124, 0.13934521484375, 0.13938502502441405, 0.14001744079589845, 0.14206428527832032, 0.1421167755126953, 0.13961865234375, 0.1387295379638672, 0.1381021728515625, 0.1388382110595703, 0.13868406677246095, 0.1414561309814453, 0.14186819458007813, 0.14007382202148438, 0.1387389373779297, 0.13822642517089845, 0.13877587890625, 0.1400224609375, 0.14071589660644532, 0.141936767578125, 0.14051327514648437, 0.13967155456542968, 0.13887283325195313, 0.13870489501953126, 0.14011187744140624, 0.14125042724609374, 0.14198591613769532, 0.1404451904296875, 0.14037001037597657, 0.13945423889160155, 0.13933151245117187, 0.1393690948486328, 0.14041094970703125, 0.1412464599609375, 0.14050918579101562, 0.13990480041503905, 0.14000877380371093, 0.13950563049316406, 0.14006300354003906, 0.14082275390625, 0.14126124572753906, 0.1405168914794922, 0.1394447021484375, 0.14034944152832032, 0.1396628875732422, 0.13923715209960938, 0.1413369903564453, 0.14116397094726563, 0.14032981872558595, 0.14009138488769532, 0.14007705688476563, 0.140191650390625, 0.14033314514160156, 0.1411004180908203, 0.14093580627441407, 0.14067683410644533, 0.14002117919921875, 0.1405013732910156, 0.1408345947265625, 0.14026138305664063, 0.1412635498046875, 0.15187472534179688, 0.13865989685058594, 0.138994140625, 0.13810508728027343, 0.13912173461914062, 0.1380873565673828, 0.14250550842285156, 0.14305279541015625, 0.1396597442626953, 0.13966114807128907, 0.1382809295654297, 0.13960166931152343, 0.13898294067382813, 0.141529541015625, 0.14210096740722655, 0.14037551879882812, 0.1400773162841797, 0.13875193786621093, 0.13916400146484376, 0.1390032958984375, 0.14141632080078126, 0.140880615234375, 0.14086349487304686, 0.13992550659179687, 0.13951901245117188, 0.13955299377441407, 0.1398198699951172, 0.14080307006835938, 0.1409340515136719, 0.14101913452148437, 0.1397821502685547, 0.1392762908935547, 0.14002902221679686, 0.14087875366210936, 0.1402777557373047, 0.14079795837402342, 0.14117478942871095, 0.1392512969970703, 0.13996588134765625, 0.14006524658203126, 0.14105856323242189, 0.14041477966308594, 0.1409251251220703, 0.14118704223632814, 0.1397821807861328, 0.13927218627929688, 0.14087890625, 0.14133334350585938, 0.14068124389648437, 0.1414246063232422, 0.14101484680175783, 0.1406343994140625, 0.14002694702148438, 0.14090541076660157, 0.14144511413574218, 0.14136729431152345, 0.1411092529296875, 0.14145945739746094, 0.1408566131591797, 0.14026007080078126, 0.14011506652832031, 0.14040562438964843, 0.1414265594482422, 0.15005081176757812, 0.13825637817382813, 0.13784197998046874, 0.13957379150390625, 0.13935221862792968, 0.13967155456542968, 0.14307484436035156, 0.14245936584472657, 0.13952774047851563, 0.13898591613769531, 0.13809458923339843, 0.13861065673828124, 0.13923667907714843, 0.14254563903808593, 0.1420038146972656, 0.14021379089355468, 0.13953523254394531, 0.1393904266357422, 0.13930120849609376, 0.13919448852539062, 0.14186093139648437, 0.14219587707519532, 0.14026634216308595, 0.13964492797851563, 0.1397760009765625, 0.13953407287597655, 0.14017971801757811, 0.1414983673095703, 0.1420298614501953, 0.14114710998535157, 0.1395589141845703, 0.13989273071289063, 0.14021221923828125, 0.14027366638183594, 0.1420548095703125, 0.14170172119140625, 0.14005014038085936, 0.14024099731445314, 0.13972703552246094, 0.14014874267578126, 0.14042930603027343, 0.14158799743652345, 0.14136982727050781, 0.14089395141601563, 0.13995443725585938, 0.13998226928710938, 0.1398011474609375, 0.14187110900878908, 0.1412894744873047, 0.1414080352783203, 0.14082479858398436, 0.14148403930664064, 0.14023475646972655, 0.14039756774902343, 0.14092982482910157, 0.14170953369140624, 0.1414385986328125, 0.14046861267089844, 0.14121702575683595, 0.14016899108886718, 0.14044053649902344, 0.14195071411132812, 0.1413485107421875, 0.15072032165527344, 0.13880653381347657, 0.13865008544921875, 0.1379886016845703, 0.13773318481445312, 0.13911135864257812, 0.14449459838867187, 0.1433313293457031, 0.13961772155761717, 0.13894099426269532, 0.13923942565917968, 0.13881117248535157, 0.13950303649902343, 0.14222006225585937, 0.14137344360351561, 0.14116038513183593, 0.13957484436035156, 0.13943788146972655, 0.13947769165039062, 0.13951795959472657, 0.14251622009277343, 0.1411928253173828, 0.14085942077636718, 0.14012188720703125, 0.1391614990234375, 0.13991389465332033, 0.1403038330078125, 0.1411011505126953, 0.14100114440917969, 0.14099455261230467, 0.1412232971191406, 0.13986810302734376, 0.13971238708496095, 0.14015367126464845, 0.14151475524902343, 0.1413324737548828, 0.14064230346679688, 0.14014256286621093, 0.13922306823730468, 0.13950909423828126, 0.14099224853515624, 0.14089923095703125, 0.14118707275390624, 0.14125801086425782, 0.1400122528076172, 0.14017666625976563, 0.140042236328125, 0.14163427734375, 0.14086962890625, 0.14112973022460937, 0.14125669860839843, 0.13982301330566407, 0.13960202026367188, 0.1403750762939453, 0.14111843872070312, 0.14066073608398438, 0.14162944030761718, 0.1407093505859375, 0.13955535888671874, 0.1409655303955078, 0.14138607788085938, 0.14053990173339845, 0.14129327392578125, 0.1497270050048828, 0.1389385986328125, 0.1388564453125, 0.13953762817382812, 0.1379254150390625, 0.1398494110107422, 0.14356480407714844, 0.142827392578125, 0.1400345001220703, 0.13927987670898437, 0.1391805419921875, 0.1395281982421875, 0.13992140197753905, 0.14225750732421874, 0.14214620971679687, 0.13972685241699218, 0.13947415161132812, 0.13905520629882812, 0.139517822265625, 0.14061241149902343, 0.14097328186035157, 0.14120169067382812, 0.1409986267089844, 0.14007554626464844, 0.13984153747558595, 0.1393807373046875, 0.1406156768798828, 0.1410987548828125, 0.141481689453125, 0.1409275207519531, 0.1400238037109375, 0.13930876159667968, 0.13997421264648438, 0.141091552734375, 0.14117478942871095, 0.1413158721923828, 0.14058438110351562, 0.14024716186523437, 0.1396185302734375, 0.14002195739746093, 0.14124826049804687, 0.1406878662109375, 0.14238925170898437, 0.1412095947265625, 0.14041702270507814, 0.14019967651367188, 0.14070101928710937, 0.1412694091796875, 0.14054383850097657, 0.1413802490234375, 0.14083071899414062, 0.14061772155761718, 0.14044364929199218, 0.14064230346679688, 0.14152088928222656, 0.14144857788085938, 0.14105223083496093, 0.14119456481933593, 0.13952511596679687, 0.14059706115722656, 0.14122563171386718, 0.14280758666992188, 0.14157516479492188, 0.1489671630859375, 0.13934617614746095, 0.13807154846191405, 0.13861231994628906, 0.13924855041503906, 0.13915863037109374, 0.1435022430419922, 0.143189697265625, 0.13988691711425782, 0.1390530548095703, 0.1380425567626953, 0.13966000366210937, 0.13976585388183593, 0.1424012145996094, 0.14237222290039062, 0.14077781677246093, 0.1395430145263672, 0.13835997009277343, 0.13971554565429686, 0.14009686279296876, 0.14205609130859376, 0.14201239013671876, 0.14112567138671875, 0.13977804565429688, 0.1396633605957031, 0.1391279296875, 0.14009228515625, 0.14183013916015624, 0.14207589721679686, 0.14121165466308594, 0.14034739685058595, 0.13980262756347656, 0.13923248291015625, 0.14000111389160155, 0.1414235534667969, 0.14240972900390625, 0.14161289978027344, 0.14021443176269532, 0.1406743927001953, 0.13865589904785156, 0.14065890502929687, 0.14117097473144533, 0.14227250671386718, 0.1412833251953125, 0.14036991882324218, 0.13982022094726562, 0.139582275390625, 0.14134786987304687, 0.14202908325195313, 0.14156825256347655, 0.14119775390625, 0.14098226928710939, 0.1401712646484375, 0.1401282501220703, 0.14094876098632814, 0.1412878723144531, 0.14152291870117187, 0.14177877807617187, 0.14070370483398437, 0.14077127075195311, 0.14025709533691405, 0.1412326965332031, 0.1412705535888672, 0.1489877471923828, 0.1398765106201172, 0.13932748413085938, 0.13985516357421876, 0.13885462951660157, 0.13921122741699218, 0.1424752655029297, 0.1421488952636719, 0.1400952911376953, 0.13960694885253908, 0.13870693969726564, 0.13906533813476563, 0.1397955780029297, 0.14144601440429688, 0.14147993469238282, 0.14121507263183594, 0.139993408203125, 0.13937405395507813, 0.13804576110839845, 0.14062649536132812, 0.14144511413574218, 0.14121778869628906, 0.1409249267578125, 0.13985285949707033, 0.13964300537109375, 0.13930723571777343, 0.14147030639648436, 0.14134886169433594, 0.14149017333984376, 0.1408389129638672, 0.1395793914794922, 0.14086326599121093, 0.13977778625488282, 0.14151496887207032, 0.141533447265625, 0.14157005310058593, 0.140260986328125, 0.14065040588378908, 0.1400568389892578, 0.140564697265625, 0.14235562133789062, 0.14007781982421874, 0.141447265625, 0.14062342834472658, 0.14132662963867187, 0.14012022399902344, 0.14108058166503906, 0.14186904907226563, 0.14129327392578125, 0.14117097473144533, 0.1406476745605469, 0.14087551879882812, 0.1407428436279297, 0.14174700927734374, 0.14151026916503906, 0.1414862060546875, 0.1415784912109375, 0.140868896484375, 0.14072291564941405, 0.14068450927734374, 0.14099874877929688, 0.140712646484375, 0.1418997802734375]",tokens/s,7.120191051948063,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,822.370304,4563.27168,0.0,4194.304,4190.217216,s,1,14.3222841796875,14.3222841796875,0.0,14.3222841796875,14.3222841796875,14.3222841796875,14.3222841796875,[14.3222841796875],,kWh,0.0001973945590750039,2.1766531401032557e-05,6.484310743000432e-05,0.00028400419790604076,,MB,1261.514752,5068.685312,0.0,4653.580288,4635.127808,s,10,9.68378887939453,0.9683788879394533,0.0066262685289509215,0.9698296203613281,0.9727113220214844,0.9749913421630859,0.9768153582763671,"[0.9507345581054687, 0.9681610717773438, 0.9670458374023437, 0.9701126098632813, 0.9718099975585938, 0.9656171264648438, 0.9722046508789063, 0.9712850341796875, 0.969546630859375, 0.9772713623046875]",tokens/s,264.3593361940436,kWh,2.8246840037495805e-05,3.115145426624928e-06,1.8696631118908262e-05,5.005861658302899e-05,tokens/kWh,5114004.690389103,MB,1294.647296,5068.685312,0.0,4653.580288,4635.130368,s,10,49.396622558593755,4.939662255859376,0.010073984830580331,4.93656884765625,4.95697099609375,4.95709755859375,4.95719880859375,"[4.9286748046875, 4.9313232421875, 4.93925146484375, 4.93434521484375, 4.938767578125, 4.929091796875, 4.9343701171875, 4.94663134765625, 4.95722412109375, 4.95694287109375]",tokens/s,12.753908412517488,kWh,0.00014480995308792187,1.597360295289865e-05,9.57515993284935e-05,0.00025653515536931405,tokens/kWh,245580.37633985767,,s,630,49.392901863098196,0.07840143152872722,0.001393014450808697,0.07815524673461914,0.07870554885864257,0.07915288352966308,0.08854910186767578,"[0.08888294219970704, 0.07910137939453125, 0.07842694091796874, 0.0780738525390625, 0.07779718780517578, 0.07795686340332031, 0.07768313598632813, 0.07768643188476562, 0.07806601715087891, 0.07795097351074219, 0.07783993530273438, 0.07785871887207031, 0.07761341094970703, 0.07794636535644531, 0.07780831909179688, 0.07786444854736328, 0.0777159652709961, 0.0780937271118164, 0.07795539093017578, 0.07799407958984375, 0.07776179504394531, 0.07795958709716796, 0.0777852783203125, 0.07801686096191406, 0.07779532623291016, 0.07784857940673828, 0.07806566619873047, 0.07785212707519532, 0.07789212799072266, 0.07791410827636719, 0.07810598754882812, 0.077957763671875, 0.07783628845214843, 0.07794406127929687, 0.07792511749267578, 0.07801036834716797, 0.07807180786132813, 0.07793869018554687, 0.07785266876220703, 0.07813660430908204, 0.07802953338623046, 0.07805542755126953, 0.07795507049560547, 0.07806156921386719, 0.07803903961181641, 0.07802243041992188, 0.07888508605957031, 0.07837427520751954, 0.07797004699707032, 0.0780263671875, 0.07790332794189453, 0.07803997039794922, 0.07812710571289062, 0.0783479995727539, 0.07889539337158204, 0.07844841766357422, 0.07843158721923828, 0.07839990234375, 0.07827670288085938, 0.07831366729736328, 0.07828908538818359, 0.07823075103759766, 0.07819747161865234, 0.08758096313476563, 0.07916819000244141, 0.07881932830810547, 0.07827046203613282, 0.07824384307861328, 0.07791529846191406, 0.07818073272705078, 0.07810095977783203, 0.0780021743774414, 0.07797760009765625, 0.07815987396240234, 0.07797110748291015, 0.07773824310302735, 0.0778240966796875, 0.07781581115722656, 0.07776790618896484, 0.07774425506591796, 0.07824041748046875, 0.07801856231689454, 0.07821926116943359, 0.077968994140625, 0.07803536224365235, 0.07790502166748046, 0.07832447814941407, 0.0779221420288086, 0.07799552154541016, 0.07898601531982422, 0.07809843444824219, 0.07783219146728515, 0.07784623718261718, 0.07788982391357421, 0.0780083236694336, 0.07798092651367187, 0.07829686737060547, 0.0781908187866211, 0.07803350067138672, 0.07783235168457031, 0.07803606414794922, 0.07780982208251953, 0.07786073303222656, 0.078501953125, 0.07824877166748047, 0.07823081970214844, 0.07817084503173828, 0.07876719665527344, 0.07833487701416016, 0.0780800018310547, 0.07788944244384766, 0.07818153381347656, 0.07806428527832031, 0.07806185913085938, 0.07808204650878907, 0.07806156921386719, 0.07805951690673828, 0.07800556945800781, 0.0779701156616211, 0.07806566619873047, 0.07827030181884766, 0.07824400329589844, 0.07831343841552735, 0.07808617401123047, 0.0786903076171875, 0.07797555541992188, 0.08936624145507813, 0.07957270050048829, 0.07883821105957031, 0.07838719940185547, 0.07808726501464844, 0.07797238159179687, 0.07795830535888672, 0.07814966583251953, 0.07803987121582032, 0.07803289794921875, 0.07799807739257812, 0.07798579406738282, 0.07780556488037109, 0.07788339233398438, 0.07778409576416015, 0.07804000091552735, 0.07802035522460937, 0.07792787170410156, 0.07796412658691407, 0.07819878387451172, 0.07802265930175781, 0.07818854522705078, 0.07843635559082031, 0.0791627197265625, 0.07824861145019531, 0.07805951690673828, 0.07816342163085938, 0.07807030487060547, 0.07798303985595703, 0.0782015380859375, 0.07793561553955078, 0.07791219329833984, 0.0781251220703125, 0.07814812469482423, 0.07810281372070313, 0.07845683288574219, 0.07815987396240234, 0.07819049835205077, 0.07796745300292969, 0.07809228515625, 0.07994108581542969, 0.07835254669189454, 0.07813516998291016, 0.07803545379638673, 0.07809843444824219, 0.07804716491699219, 0.07815379333496093, 0.07843341064453126, 0.0782693099975586, 0.07823503875732422, 0.07828131103515625, 0.07813120269775391, 0.07810867309570313, 0.07826828765869141, 0.07804121398925781, 0.07820492553710938, 0.07818828582763672, 0.07820313262939453, 0.07808604431152344, 0.07834838104248047, 0.07876588439941407, 0.07865302276611329, 0.07826083374023438, 0.08858985900878906, 0.07913145446777343, 0.07845180511474609, 0.08061344146728516, 0.07781462097167968, 0.0777823715209961, 0.0776824951171875, 0.07773065948486328, 0.07783424377441406, 0.0779261474609375, 0.07776895904541016, 0.07945011138916015, 0.07774412536621093, 0.07790796661376953, 0.07776860809326172, 0.07779273223876954, 0.07779801940917969, 0.07815731048583985, 0.0777603530883789, 0.07784515380859375, 0.07778304290771484, 0.07916854095458985, 0.07775536346435546, 0.07787014770507812, 0.0779537582397461, 0.07808432006835937, 0.07795212554931641, 0.07790067291259765, 0.07785654449462891, 0.07793686676025391, 0.07786653137207031, 0.07800380706787109, 0.07781260681152344, 0.07797964477539063, 0.07792845153808593, 0.07781552124023437, 0.07819849395751953, 0.07805190277099609, 0.07839894104003907, 0.07805570983886718, 0.07831372833251952, 0.0783421401977539, 0.07832166290283203, 0.07825612640380859, 0.07814672088623047, 0.0781750717163086, 0.07831142425537109, 0.07828221130371094, 0.07806591796875, 0.07823564910888672, 0.07838854217529297, 0.07862985229492188, 0.07821011352539063, 0.07831407928466796, 0.0781536636352539, 0.07829341125488282, 0.07847433471679688, 0.07815055847167969, 0.07807341003417968, 0.07830777740478516, 0.07828479766845703, 0.07821107482910156, 0.07811468505859374, 0.09018102264404297, 0.07987260437011719, 0.07920435333251953, 0.07829503631591797, 0.07777484893798828, 0.07782195281982422, 0.07771907043457031, 0.07771798706054688, 0.07780147552490234, 0.0778792953491211, 0.07784223937988281, 0.07788768005371094, 0.0778608627319336, 0.07797760009765625, 0.07789977264404296, 0.07796876525878907, 0.07811097717285156, 0.07793087768554688, 0.07804656219482421, 0.07800262451171874, 0.0779241943359375, 0.07790592193603516, 0.07799231719970703, 0.07778864288330078, 0.07808258819580079, 0.07813680267333985, 0.07833245086669922, 0.07812438201904297, 0.07811068725585937, 0.07828050994873047, 0.07802150726318359, 0.07788543701171875, 0.07798365020751953, 0.07809852600097657, 0.07791136169433593, 0.07816671752929688, 0.0780206069946289, 0.07817215728759766, 0.07792230224609376, 0.07825612640380859, 0.07802470397949218, 0.07832125091552734, 0.07838352203369141, 0.07823155212402344, 0.07815167999267578, 0.07828479766845703, 0.07809638214111328, 0.07841715240478515, 0.0780882568359375, 0.0782384033203125, 0.07811891174316406, 0.07812710571289062, 0.07835033416748047, 0.078129150390625, 0.07806259155273437, 0.08037254333496094, 0.07965827178955077, 0.0785785903930664, 0.07842582702636719, 0.07834371185302734, 0.07862963104248047, 0.07829408264160156, 0.07814012908935547, 0.08839910125732423, 0.07910409545898438, 0.07864179229736327, 0.078129150390625, 0.07768678283691406, 0.07914086151123047, 0.07798492431640625, 0.07789859008789063, 0.07806893157958984, 0.07802963256835938, 0.07821517181396484, 0.07833805084228515, 0.07781903839111329, 0.07788816070556641, 0.07807405090332031, 0.07864729309082032, 0.0779161605834961, 0.0780574722290039, 0.07783376312255859, 0.07792460632324219, 0.07784835052490234, 0.07800057220458985, 0.07783414459228516, 0.07788566589355468, 0.07792217254638673, 0.07798169708251954, 0.07815782165527344, 0.07794073486328125, 0.07878860473632812, 0.07790969848632813, 0.0778427505493164, 0.07788953399658204, 0.07796115112304687, 0.07797964477539063, 0.07788960266113282, 0.07809843444824219, 0.07813734436035157, 0.07801837158203125, 0.07797984313964844, 0.07831346893310547, 0.0779878387451172, 0.07811654663085937, 0.077932861328125, 0.07799788665771484, 0.07790531158447266, 0.07806566619873047, 0.07804771423339844, 0.07801667022705078, 0.07790799713134766, 0.07794700622558594, 0.07782511901855468, 0.07800035095214844, 0.07838585662841797, 0.07822335815429687, 0.07791206359863281, 0.0779530258178711, 0.07790592193603516, 0.0780343017578125, 0.07794137573242188, 0.07815987396240234, 0.07803193664550781, 0.07815058898925781, 0.07810185241699219, 0.08850832366943359, 0.07906015777587891, 0.07836310577392579, 0.07844703674316406, 0.07788748931884766, 0.07790182495117187, 0.07789920043945313, 0.07790819549560547, 0.07794700622558594, 0.07799625396728516, 0.07779328155517579, 0.07794278717041016, 0.07796294403076172, 0.07769884490966797, 0.07768224334716797, 0.07782089233398437, 0.07764582061767578, 0.07803903961181641, 0.07774822235107422, 0.077844482421875, 0.0777252197265625, 0.07779785919189453, 0.07795705413818359, 0.07814559936523438, 0.07782109069824218, 0.07790268707275391, 0.07784857940673828, 0.07786393737792968, 0.0778608627319336, 0.0781213150024414, 0.07797622680664062, 0.07789100646972656, 0.07807647705078125, 0.07827865600585937, 0.0778076171875, 0.07801548767089844, 0.07781683349609375, 0.07817011260986328, 0.0783556137084961, 0.0782672348022461, 0.07822335815429687, 0.07905484771728516, 0.07819264221191406, 0.07841382598876953, 0.07833328247070312, 0.07826812744140625, 0.07835298919677734, 0.07835887908935547, 0.07815782165527344, 0.07832371520996094, 0.07904665374755859, 0.07874281311035156, 0.0782476806640625, 0.0782548828125, 0.07836675262451172, 0.07876214599609375, 0.07857561492919922, 0.07838719940185547, 0.0783421401977539, 0.07852851104736328, 0.07837081909179687, 0.07835826873779297, 0.07853286743164062, 0.08901900482177734, 0.07928012847900391, 0.07839129638671875, 0.07815577697753906, 0.07798579406738282, 0.07806566619873047, 0.07791001892089844, 0.07804108428955078, 0.07800012969970703, 0.07810457611083985, 0.07821517181396484, 0.07800012969970703, 0.07805872344970703, 0.07809648132324219, 0.07801516723632812, 0.07798960113525391, 0.07794306945800782, 0.07811465454101563, 0.07804665374755859, 0.07814530944824219, 0.07817081451416015, 0.07812326049804688, 0.07828684997558594, 0.07818240356445312, 0.07820281219482422, 0.07827871704101562, 0.07818768310546875, 0.0782979507446289, 0.0780574722290039, 0.07958528137207031, 0.07825408172607422, 0.07818415832519532, 0.07808953857421876, 0.07815471649169922, 0.07811186981201172, 0.07818057250976562, 0.07821993255615234, 0.07831346893310547, 0.07830118560791016, 0.07840563201904296, 0.0783421401977539, 0.07835033416748047, 0.07831139373779297, 0.078349853515625, 0.07836723327636719, 0.081569091796875, 0.07824221038818359, 0.07834243011474609, 0.07846854400634766, 0.07825174713134765, 0.07823363494873047, 0.07845494079589843, 0.07825804901123047, 0.0783572769165039, 0.07817171478271484, 0.07827910614013672, 0.07843020629882813, 0.07840972900390625, 0.07859552001953125, 0.07889529418945312, 0.07888864135742188, 0.07880544281005859, 0.07870428466796875, 0.08914144134521484, 0.08006015777587891, 0.07911148834228515, 0.078910400390625, 0.07861248016357422, 0.07899545288085938, 0.07905894470214844, 0.078561279296875, 0.07848960113525391, 0.08080384063720703, 0.07856742095947265, 0.07839539337158204, 0.07808614349365234, 0.07829644775390625, 0.07805587005615235, 0.07839762878417969, 0.07798579406738282, 0.07896614074707031, 0.07945689392089844, 0.07844767761230469, 0.07810553741455079, 0.07831346893310547, 0.07816998291015625, 0.07843852996826171, 0.07832575988769531, 0.07809584045410156, 0.07806822204589844, 0.07948191833496093, 0.07819977569580078, 0.07827996826171875, 0.0782894744873047, 0.07869660949707032, 0.0782008285522461, 0.0783682861328125, 0.07826480102539063, 0.07827414703369141, 0.07834255981445312, 0.07834419250488281, 0.07814044952392578, 0.07809251403808594, 0.07824028778076172, 0.07836048126220703, 0.07837113952636719, 0.07855104064941407, 0.07871692657470702, 0.0782676773071289, 0.07860707092285156, 0.07860953521728516, 0.07816419219970704, 0.07852256011962891, 0.07824617767333984, 0.07835052490234375, 0.07831705474853516, 0.07849971008300781, 0.07827519989013672, 0.07836988830566406, 0.07890755462646484, 0.07870130920410157, 0.07834127807617187, 0.07839625549316406, 0.07847897338867188, 0.07838553619384765, 0.0782765121459961, 0.08856575775146484, 0.07917724609375, 0.0785412826538086, 0.07825196838378906, 0.07786886596679687, 0.0786250228881836, 0.07846649932861328, 0.07828451538085937, 0.07803734588623047, 0.07810304260253906, 0.07808163452148438, 0.0780251235961914, 0.07793814086914062, 0.07825667572021484, 0.07834563446044922, 0.07866134643554687, 0.07843222045898438, 0.07849452972412109, 0.07923081970214843, 0.07898076629638671, 0.07863945770263672, 0.07840998077392577, 0.07826227569580078, 0.07833805084228515, 0.07829708862304688, 0.07847731018066406, 0.07851538848876953, 0.07873379516601563, 0.07847971343994141, 0.0785980453491211, 0.07847731018066406, 0.07855318450927734, 0.07864115142822266, 0.07844659423828125, 0.0784158706665039, 0.07843635559082031, 0.07847526550292969, 0.07835350036621094, 0.07836150360107422, 0.07836038208007813, 0.07831775665283203, 0.07861248016357422, 0.07833958435058594, 0.0786328353881836, 0.07846361541748047, 0.07846495819091796, 0.0782889633178711, 0.07859168243408203, 0.07837318420410157, 0.07863091278076172, 0.07841535949707032, 0.07836278533935546, 0.07929257965087891, 0.07885990142822266, 0.07849222564697265, 0.07841792297363281, 0.07845216369628906, 0.0785393295288086, 0.07845683288574219, 0.07851321411132813, 0.07876847839355469, 0.08118688201904296, 0.07845072174072265]",tokens/s,12.754869145898033,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,819.122176,1266.548736,0.0,864.026624,816.853504,s,1,8.6759580078125,8.6759580078125,0.0,8.6759580078125,8.6759580078125,8.6759580078125,8.6759580078125,[8.6759580078125],,kWh,4.6099798266664034e-05,5.0782077941143054e-06,1.4546956081995521e-05,6.572496214277387e-05,,MB,1349.849088,1400.766464,0.0,983.564288,914.317824,s,10,1.5170658111572266,0.15170658111572263,0.0006555327350934891,0.1517241744995117,0.1524874313354492,0.152711092376709,0.1528900212097168,"[0.15293475341796875, 0.1510603790283203, 0.1512296905517578, 0.15048704528808593, 0.15165657043457031, 0.151932861328125, 0.1517917785644531, 0.15160380554199218, 0.15193119812011718, 0.15243772888183593]",tokens/s,1687.4679932620838,kWh,4.565467445181781e-06,5.034898890793193e-07,3.024260231906245e-06,8.093217566167344e-06,tokens/kWh,31631424.44979795,MB,1386.98752,1400.766464,0.0,983.564288,914.320384,s,10,26.656374999999993,2.6656375,0.02103091357439614,2.6599207763671875,2.6738716064453123,2.7002971313476563,2.7214375512695312,"[2.652419921875, 2.667999267578125, 2.72672265625, 2.652312744140625, 2.659500732421875, 2.656264404296875, 2.66656103515625, 2.6603408203125, 2.652974853515625, 2.661278564453125]",tokens/s,23.634121293686782,kWh,7.736648160773547e-05,8.53349058031665e-06,3.238604587409224e-05,0.00011828601806214434,tokens/kWh,532607.3278322843,,s,630,26.651494781494126,0.04230395997062562,0.0006855351394110717,0.04210579109191895,0.04296550407409668,0.04339107666015625,0.04571927845001223,"[0.042108638763427735, 0.042033313751220706, 0.04182233428955078, 0.04179510498046875, 0.041873790740966796, 0.041893985748291014, 0.0419760627746582, 0.04198988723754883, 0.04219827270507812, 0.041762561798095704, 0.04173871994018555, 0.04178179168701172, 0.04187136077880859, 0.04200601577758789, 0.041974273681640625, 0.04179868698120117, 0.04179452896118164, 0.04176003265380859, 0.04186345672607422, 0.04173388671875, 0.042516895294189457, 0.04200624084472656, 0.0418792953491211, 0.04166313552856445, 0.04185513687133789, 0.04172745513916016, 0.04165071868896485, 0.0417259521484375, 0.04218246459960937, 0.04532806396484375, 0.04344473648071289, 0.042638526916503904, 0.042289985656738284, 0.042248191833496096, 0.04209372711181641, 0.04191727828979492, 0.04201033782958984, 0.042008705139160156, 0.04190588760375977, 0.04242681503295898, 0.04238131332397461, 0.04212531280517578, 0.04195238494873047, 0.04205862426757812, 0.04256739044189453, 0.04224848175048828, 0.04215760040283203, 0.04200281524658203, 0.04248175811767578, 0.04369347381591797, 0.041939552307128904, 0.041993598937988284, 0.042006526947021484, 0.04203993606567383, 0.041902080535888675, 0.042077663421630856, 0.04202755355834961, 0.041885696411132815, 0.042024959564208986, 0.0417894401550293, 0.041777153015136716, 0.04180889511108398, 0.04173516845703125, 0.041655902862548826, 0.04224998474121094, 0.042166942596435546, 0.0420843505859375, 0.04197580718994141, 0.042062942504882815, 0.042011871337890624, 0.04202463912963867, 0.04177872085571289, 0.041966049194335935, 0.04172307205200195, 0.04188796615600586, 0.042007007598876954, 0.0423438720703125, 0.04245398330688477, 0.04199190521240234, 0.041826080322265625, 0.04192387390136719, 0.041900993347167965, 0.041955265045166015, 0.04257388687133789, 0.04198508834838867, 0.0419780158996582, 0.04193360137939453, 0.04184473419189453, 0.04226819229125976, 0.04276886367797852, 0.04303676986694336, 0.04262643051147461, 0.042582527160644534, 0.042484832763671876, 0.042382080078125, 0.04281520080566406, 0.043145694732666016, 0.04271664047241211, 0.04270284652709961, 0.04252121734619141, 0.04284406280517578, 0.04281139373779297, 0.04236236953735351, 0.04231174468994141, 0.04235513687133789, 0.042264575958251956, 0.042259681701660154, 0.042390304565429686, 0.04224204635620117, 0.0422089614868164, 0.04218502426147461, 0.04212227249145508, 0.042392543792724606, 0.04259401702880859, 0.042491584777832034, 0.04255807876586914, 0.042454975128173825, 0.04235865783691406, 0.04252687835693359, 0.04303257751464844, 0.042698177337646484, 0.04288937759399414, 0.04260291290283203, 0.04268751907348633, 0.042619873046875, 0.04280883026123047, 0.0435972785949707, 0.04296352005004883, 0.04294652938842773, 0.04276201629638672, 0.04261312103271484, 0.042831390380859376, 0.04290185546875, 0.04291561508178711, 0.042921184539794925, 0.0427344970703125, 0.042686527252197265, 0.0436383056640625, 0.046125438690185545, 0.04312310409545898, 0.04299753570556641, 0.043472801208496094, 0.043458560943603515, 0.04312473678588867, 0.04327414321899414, 0.04338220977783203, 0.043260383605957034, 0.04329081726074219, 0.04342169570922851, 0.044014625549316407, 0.043523040771484375, 0.043259422302246095, 0.04312723159790039, 0.04276841735839844, 0.04287692642211914, 0.04346233749389648, 0.04362070465087891, 0.04352959823608398, 0.04334371185302734, 0.04339788818359375, 0.04393369674682617, 0.04323942565917969, 0.0434516487121582, 0.0429835205078125, 0.04329948806762695, 0.04277657699584961, 0.04315135955810547, 0.04279935836791992, 0.04281727981567383, 0.04299529647827149, 0.04304323196411133, 0.04283919906616211, 0.047584095001220704, 0.042893310546875, 0.04260150527954101, 0.04263638305664062, 0.042610431671142576, 0.04273904037475586, 0.0428469123840332, 0.0426673583984375, 0.042809440612792966, 0.04262364959716797, 0.04246527862548828, 0.04604707336425781, 0.04336563110351563, 0.04364585494995117, 0.04338275146484375, 0.04340124893188477, 0.04309929656982422, 0.0426668815612793, 0.042528736114501954, 0.042242366790771486, 0.04210214233398438, 0.043380542755126955, 0.04213100814819336, 0.04193584060668945, 0.04177510452270508, 0.04219206237792969, 0.04186352157592774, 0.04190460968017578, 0.04208844757080078, 0.04227174377441406, 0.04221027374267578, 0.04205161666870117, 0.04218675231933594, 0.04228812789916992, 0.04212428665161133, 0.042143329620361325, 0.042068382263183594, 0.042204959869384766, 0.042000606536865236, 0.04191580963134765, 0.04190473556518555, 0.041859073638916014, 0.042081951141357425, 0.041961311340332035, 0.042141311645507815, 0.04272012710571289, 0.04187750244140625, 0.04189120101928711, 0.041808513641357424, 0.04223590469360351, 0.04227587127685547, 0.042119136810302736, 0.042126335144042966, 0.04210892868041992, 0.042016769409179686, 0.04196966552734375, 0.041885089874267575, 0.04207059097290039, 0.04197788619995117, 0.042049537658691405, 0.0420145263671875, 0.04241999816894531, 0.041898399353027346, 0.04189184188842773, 0.04189388656616211, 0.041926143646240234, 0.042094398498535156, 0.04230192184448242, 0.042066143035888674, 0.04274995040893555, 0.04197100830078125, 0.041858943939208984, 0.041869152069091795, 0.04203619384765625, 0.04177920150756836, 0.042051422119140626, 0.041947296142578125, 0.041973758697509765, 0.041902080535888675, 0.041879264831542966, 0.04289295959472656, 0.043281055450439455, 0.04211539077758789, 0.04205072021484375, 0.041906814575195316, 0.04188934326171875, 0.04207478332519531, 0.04207001495361328, 0.04290313720703125, 0.042757568359375, 0.0421426887512207, 0.04214476776123047, 0.0422553596496582, 0.04226047897338867, 0.04210681533813477, 0.04216569519042969, 0.04211328125, 0.04337088012695312, 0.043431007385253906, 0.042324897766113284, 0.04213897705078125, 0.04215584182739258, 0.042016639709472656, 0.04218159866333008, 0.04225651168823242, 0.04212723159790039, 0.04200223922729492, 0.042127552032470705, 0.04235599899291992, 0.042261215209960935, 0.04221952056884765, 0.042211231231689454, 0.042474815368652344, 0.04311939239501953, 0.042001567840576175, 0.041974143981933595, 0.041882080078125, 0.04214681625366211, 0.04198912048339844, 0.04197990417480469, 0.04186521530151367, 0.042102783203125, 0.04188313674926758, 0.041976318359375, 0.04187145614624024, 0.04189980697631836, 0.04182233428955078, 0.04192870330810547, 0.04184463882446289, 0.04216553497314453, 0.042052478790283204, 0.04197129440307617, 0.04186540985107422, 0.04189132690429687, 0.04197024154663086, 0.042371295928955076, 0.04249375915527344, 0.04274515151977539, 0.04224895858764648, 0.04191641616821289, 0.04231782531738281, 0.04197990417480469, 0.04196352005004883, 0.042229759216308595, 0.04262092971801758, 0.04308303833007812, 0.042164737701416016, 0.042125537872314454, 0.04186521530151367, 0.042039295196533204, 0.04194303894042969, 0.04196124649047851, 0.041853153228759765, 0.04181532669067383, 0.04191468811035156, 0.04197622299194336, 0.042616832733154295, 0.04274288177490235, 0.04205865478515625, 0.04189184188842773, 0.041963104248046876, 0.042248416900634765, 0.04220332717895508, 0.042254337310791014, 0.04199423980712891, 0.04217446517944336, 0.04222512054443359, 0.041899551391601564, 0.041864192962646485, 0.041902080535888675, 0.04181401443481445, 0.04191436767578125, 0.041885696411132815, 0.04197753524780273, 0.04173651123046875, 0.041842208862304685, 0.04179328155517578, 0.04198201751708985, 0.04190588760375977, 0.0422999038696289, 0.04191891098022461, 0.04270284652709961, 0.041983486175537106, 0.0420068473815918, 0.04222972869873047, 0.04203046417236328, 0.041958240509033205, 0.04193801498413086, 0.04195625686645508, 0.04204339218139649, 0.04226662445068359, 0.04436172866821289, 0.04238905715942383, 0.04205964660644531, 0.04256345748901367, 0.04263801574707031, 0.042434558868408204, 0.042390785217285155, 0.04246195220947266, 0.042188800811767575, 0.041817665100097656, 0.04227731323242188, 0.04202608108520508, 0.04195625686645508, 0.04258771133422851, 0.041828800201416015, 0.04214313507080078, 0.04215049743652344, 0.04245094299316406, 0.042188800811767575, 0.0421847038269043, 0.042024959564208986, 0.041875457763671874, 0.041990142822265625, 0.04238751983642578, 0.04194297790527344, 0.04204339218139649, 0.04215990447998047, 0.04229372787475586, 0.042108673095703125, 0.042331966400146484, 0.04226067352294922, 0.04215398406982422, 0.046927520751953125, 0.04285475158691406, 0.04212940979003906, 0.04216390228271484, 0.04208812713623047, 0.042427009582519534, 0.042003456115722655, 0.04192153549194336, 0.04185273742675781, 0.041936767578125, 0.0419249267578125, 0.0421497917175293, 0.04200252914428711, 0.042188800811767575, 0.04205363082885742, 0.04219004821777344, 0.04214815902709961, 0.042189281463623045, 0.04194454574584961, 0.04279555130004883, 0.0419587516784668, 0.042027328491210936, 0.04203139114379883, 0.04197123336791992, 0.042840801239013675, 0.04272723388671875, 0.04212092971801758, 0.041957664489746097, 0.041973758697509765, 0.042501758575439454, 0.042272991180419925, 0.04202921676635742, 0.04200396728515625, 0.04196198272705078, 0.04211891174316406, 0.04271948623657226, 0.042147838592529296, 0.042280960083007815, 0.04211916732788086, 0.04213926315307617, 0.045879070281982424, 0.0432523193359375, 0.04213452911376953, 0.04222252655029297, 0.04201232147216797, 0.04208636856079102, 0.041823646545410154, 0.0417815055847168, 0.042322273254394534, 0.041984001159667966, 0.04214169692993164, 0.04225632095336914, 0.041936958312988284, 0.04176617431640625, 0.04181270217895508, 0.04187142562866211, 0.041906112670898436, 0.04185497665405274, 0.04191606521606445, 0.041828704833984376, 0.041901729583740235, 0.04185696029663086, 0.04195100784301758, 0.0420214729309082, 0.04204336166381836, 0.04213945770263672, 0.042218753814697266, 0.04197795104980469, 0.04214409637451172, 0.042637889862060546, 0.042074111938476565, 0.04188896179199219, 0.04217689514160156, 0.04196710586547851, 0.042200000762939456, 0.0421429443359375, 0.042355487823486325, 0.04196303939819336, 0.04197833633422852, 0.0421212158203125, 0.041999423980712894, 0.047963008880615235, 0.04376553726196289, 0.04234377670288086, 0.04224710464477539, 0.04196966552734375, 0.04194246292114258, 0.04197593688964844, 0.04197737503051758, 0.042167198181152346, 0.04199603271484375, 0.04224339294433594, 0.042068767547607425, 0.04190969467163086, 0.041864990234375, 0.041883647918701174, 0.041947071075439456, 0.0419317741394043, 0.04177510452270508, 0.043018142700195314, 0.042495521545410156, 0.043656993865966796, 0.04252473449707031, 0.04223664093017578, 0.042659233093261716, 0.04215254211425781, 0.04226233673095703, 0.042041534423828124, 0.04187091064453125, 0.04189177703857422, 0.04185488128662109, 0.04186550521850586, 0.042071006774902345, 0.04199663925170898, 0.041828929901123045, 0.041957374572753905, 0.04179334259033203, 0.04189225769042969, 0.04185475158691406, 0.041826305389404295, 0.04196147155761719, 0.0420843505859375, 0.042008575439453126, 0.042041343688964845, 0.04196063995361328, 0.04210675048828125, 0.044539905548095705, 0.04187180709838867, 0.04306288146972656, 0.04208323287963867, 0.04187862396240234, 0.04181647872924805, 0.04179516983032226, 0.041696159362792966, 0.042162174224853514, 0.041734432220458986, 0.04215763092041016, 0.04185513687133789, 0.041837600708007815, 0.04177791976928711, 0.04498995208740234, 0.04263974380493164, 0.04224240112304688, 0.04184182357788086, 0.04190012741088867, 0.04203939056396484, 0.04192934417724609, 0.04262102508544922, 0.042983360290527343, 0.04242432022094727, 0.042210880279541015, 0.04215033721923828, 0.04195328140258789, 0.04200201416015625, 0.041822624206542966, 0.04282316970825195, 0.04225894546508789, 0.04189798355102539, 0.0421130256652832, 0.04193689727783203, 0.041852928161621096, 0.04185209655761719, 0.04172473526000976, 0.04184630584716797, 0.042116863250732425, 0.041898719787597655, 0.04172499084472656, 0.0419194221496582, 0.04182777786254883, 0.041871936798095706, 0.041981952667236325, 0.04184665679931641, 0.041938751220703126, 0.041885887145996094, 0.04187491226196289, 0.041683361053466796, 0.041807998657226564, 0.04183148956298828, 0.042163135528564454, 0.04190569686889648, 0.04197411346435547, 0.04180112075805664, 0.04190691375732422, 0.041838623046875, 0.04202204895019531, 0.04192694473266602, 0.041915969848632814, 0.04173923110961914, 0.04187955093383789, 0.0416317138671875, 0.04657702255249024, 0.04283561706542969, 0.04259328079223633, 0.042342430114746095, 0.04277648162841797, 0.04241731262207031, 0.042281089782714845, 0.04205033493041992, 0.04194508743286133, 0.04185702514648437, 0.04225462341308594, 0.042133216857910154, 0.04240911865234375, 0.0420667839050293, 0.04203724670410156, 0.04194879913330078, 0.04203558349609375, 0.042210880279541015, 0.041935295104980466, 0.041932224273681644, 0.04185964965820312, 0.04173148727416992, 0.04187340927124023, 0.04210287857055664, 0.042097152709960936, 0.04222796630859375, 0.042288894653320315, 0.042692607879638675, 0.0421580810546875, 0.04210483169555664, 0.04229119873046875, 0.04210675048828125, 0.04209654235839844, 0.04206409454345703, 0.04205881500244141, 0.04211193466186523, 0.04272537612915039, 0.04486150360107422, 0.04289529418945313, 0.04230553436279297, 0.04246732711791992, 0.04258947372436524, 0.04229808044433594, 0.04223385620117188, 0.04221283340454102]",tokens/s,23.638448993767128,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1045.479424,2578.382848,0.0,2183.135232,2081.564672,s,1,10.5770791015625,10.5770791015625,0.0,10.5770791015625,10.5770791015625,10.5770791015625,10.5770791015625,[10.5770791015625],,kWh,0.00010194741089165973,1.1238392177900514e-05,3.7579752286007184e-05,0.00015076555535556744,,MB,1333.641216,3134.128128,0.0,2726.2976,2478.999552,s,10,3.938599304199219,0.3938599304199219,0.0017934917827119408,0.3933694305419922,0.39635830993652343,0.3964963394165039,0.3966067630004883,"[0.3938117370605469, 0.39111972045898435, 0.3923058166503906, 0.39292218017578123, 0.39435189819335936, 0.3929271240234375, 0.3923180541992187, 0.3966343688964844, 0.3958807678222656, 0.39632763671875]",tokens/s,649.977264067102,kWh,1.18270040326679e-05,1.304301034693343e-06,7.834928490159654e-06,2.0966233557520897e-05,tokens/kWh,12210109.140378674,MB,1336.283136,3134.128128,0.0,2726.2976,2479.002112,s,10,29.3740537109375,2.93740537109375,0.008384870051051997,2.9368660888671876,2.9470647216796877,2.9493862426757813,2.951243459472656,"[2.931232666015625, 2.943474609375, 2.942299072265625, 2.951707763671875, 2.9376552734375, 2.936076904296875, 2.92938134765625, 2.946548828125, 2.933615966796875, 2.922061279296875]",tokens/s,21.447499422438177,kWh,8.554523406858199e-05,9.43588446525466e-06,5.5108344086640394e-05,0.00015008946262047703,tokens/kWh,419749.65397340804,,s,630,29.3706841659546,0.04662013359675332,0.0010946513975805792,0.04650251197814942,0.046934130477905275,0.047460646247863766,0.053646552581787126,"[0.055274463653564455, 0.04717689514160156, 0.04647200012207031, 0.046667934417724606, 0.04673110580444336, 0.04661814498901367, 0.046505664825439455, 0.04643849563598633, 0.046566078186035156, 0.046538177490234374, 0.04629766464233399, 0.04651580810546875, 0.04624220657348633, 0.04637696075439453, 0.04657356643676758, 0.046706687927246096, 0.04660784149169922, 0.04659568023681641, 0.04652742385864258, 0.04671078491210937, 0.046898303985595705, 0.04666419219970703, 0.04669683074951172, 0.04677983856201172, 0.04677280044555664, 0.04683270263671875, 0.046795711517333985, 0.046683616638183593, 0.0466910400390625, 0.04672198486328125, 0.04665977478027344, 0.04673411178588867, 0.046415775299072266, 0.04641996765136719, 0.04639670562744141, 0.04617903900146485, 0.04619878387451172, 0.04626153564453125, 0.04617113494873047, 0.04588246536254883, 0.04555948638916016, 0.04559503936767578, 0.0457938232421875, 0.045486209869384765, 0.04540790557861328, 0.04561100769042969, 0.04621657562255859, 0.0463246078491211, 0.04655628967285156, 0.04625084686279297, 0.047179489135742186, 0.047085758209228515, 0.046155872344970705, 0.04644659042358398, 0.0461187858581543, 0.04601663970947266, 0.04620083236694336, 0.046380126953125, 0.04631228637695312, 0.04603696060180664, 0.04574745559692383, 0.045652801513671876, 0.0457933120727539, 0.05317043304443359, 0.04674764633178711, 0.04611481475830078, 0.046482719421386716, 0.04663894271850586, 0.046777217864990235, 0.047067134857177735, 0.046929920196533206, 0.046822689056396485, 0.046887649536132815, 0.04668236923217774, 0.04716313552856445, 0.04681942367553711, 0.04751353454589844, 0.047610145568847656, 0.04680672073364258, 0.046787681579589846, 0.046776737213134766, 0.04670515060424805, 0.04679884719848633, 0.04666995239257812, 0.046884735107421874, 0.04641296005249024, 0.04611932754516602, 0.046026657104492184, 0.04617020797729492, 0.04643475341796875, 0.046473567962646484, 0.04660134506225586, 0.04678915023803711, 0.047500415802001955, 0.04629721450805664, 0.046418014526367186, 0.04607657623291016, 0.045719520568847656, 0.046366111755371094, 0.0456808967590332, 0.04589920043945313, 0.046591007232666015, 0.046454689025878904, 0.046364223480224606, 0.04667030334472656, 0.04705072021484375, 0.048023551940917966, 0.04751795196533203, 0.0461841926574707, 0.04625420761108399, 0.045894718170166014, 0.04598255920410156, 0.04637488174438477, 0.04639916610717774, 0.046502559661865235, 0.046507678985595706, 0.04635238265991211, 0.046350017547607425, 0.047669151306152346, 0.046520736694335936, 0.04705811309814453, 0.04649657440185547, 0.04643635177612305, 0.046443870544433594, 0.04643292617797851, 0.04671078491210937, 0.0531517448425293, 0.04692089462280274, 0.046750720977783204, 0.04663065719604492, 0.04665507125854492, 0.04672876739501953, 0.0469922866821289, 0.04702406311035156, 0.04661868667602539, 0.04665753555297852, 0.04635174560546875, 0.046473567962646484, 0.046596641540527346, 0.04647091293334961, 0.0462042236328125, 0.04593670272827149, 0.04610240173339844, 0.046051361083984374, 0.045856544494628906, 0.04601103973388672, 0.04621500778198242, 0.04623193740844726, 0.046146720886230466, 0.04636127853393555, 0.0461080322265625, 0.04627132797241211, 0.046171424865722656, 0.04638560104370117, 0.046653728485107425, 0.04651007843017578, 0.04660134506225586, 0.047352703094482425, 0.046577247619628906, 0.04649001693725586, 0.04689715194702149, 0.046268062591552736, 0.04622489547729492, 0.04626512145996094, 0.0460882568359375, 0.046452320098876954, 0.04612752151489258, 0.04610367965698242, 0.046504833221435546, 0.04641094589233399, 0.04657833480834961, 0.046792606353759765, 0.04641203308105469, 0.05075961685180664, 0.04893443298339844, 0.046271007537841795, 0.04631177520751953, 0.046706336975097656, 0.04658585739135742, 0.04681235122680664, 0.0466126708984375, 0.04705731201171875, 0.04673763275146484, 0.04692892837524414, 0.04660732650756836, 0.04663296127319336, 0.04647670364379883, 0.04652412796020508, 0.046578559875488285, 0.053878719329833985, 0.04689686584472656, 0.046040481567382815, 0.04676492691040039, 0.0466431999206543, 0.050100223541259765, 0.04675529479980469, 0.04656387329101563, 0.04658790588378906, 0.04649369430541992, 0.04673535919189453, 0.046663681030273435, 0.04648729705810547, 0.046735614776611326, 0.04653468704223633, 0.046630878448486325, 0.04680534362792969, 0.04664115142822266, 0.04652934265136719, 0.04639625549316406, 0.047083518981933595, 0.04674764633178711, 0.046615745544433596, 0.04669462585449219, 0.046618560791015624, 0.046676639556884766, 0.04672480010986328, 0.046681854248046876, 0.04711888122558594, 0.047048736572265625, 0.04653247833251953, 0.046773536682128906, 0.04650675201416016, 0.04658595275878906, 0.04702207946777344, 0.046837760925292966, 0.046780193328857425, 0.04669257736206055, 0.04666163253784179, 0.0468047981262207, 0.04668230438232422, 0.04666147232055664, 0.04684406280517578, 0.04671619033813477, 0.046871265411376956, 0.04680089569091797, 0.046663681030273435, 0.046657470703125, 0.04680710220336914, 0.046565376281738284, 0.04677344131469727, 0.04676051330566406, 0.04664713668823242, 0.04664976119995117, 0.04665135955810547, 0.04662275314331055, 0.046868480682373044, 0.04661248016357422, 0.04650393676757812, 0.04650723266601563, 0.04638595199584961, 0.04641996765136719, 0.04661270523071289, 0.053278942108154294, 0.047230400085449216, 0.04696886444091797, 0.046320159912109374, 0.04601241683959961, 0.04637491226196289, 0.04706304168701172, 0.046622718811035156, 0.04655513763427734, 0.04652236938476562, 0.04671920013427734, 0.04635145568847656, 0.04640652847290039, 0.047839038848876955, 0.046153728485107424, 0.046009662628173825, 0.046195392608642576, 0.046222816467285155, 0.046373409271240236, 0.046647296905517575, 0.04621516799926758, 0.046059200286865234, 0.04614393615722656, 0.04647910308837891, 0.04640131378173828, 0.046070110321044924, 0.04576976013183594, 0.046726112365722654, 0.04682547378540039, 0.04806278228759766, 0.047351486206054685, 0.046862335205078126, 0.04665340805053711, 0.04648553466796875, 0.04661862564086914, 0.04681670379638672, 0.04662931060791015, 0.04686985778808594, 0.04687478256225586, 0.04680563354492188, 0.04680031967163086, 0.046707263946533205, 0.04672691345214844, 0.04836172866821289, 0.048070655822753904, 0.046300830841064455, 0.04581343841552735, 0.046030815124511716, 0.045763233184814456, 0.04655241775512695, 0.04630729675292969, 0.04649852752685547, 0.04611670303344727, 0.04663107299804688, 0.045494529724121095, 0.0453752326965332, 0.04572345733642578, 0.046081630706787106, 0.04611894226074219, 0.045969696044921876, 0.04633536148071289, 0.04613580703735352, 0.04680745697021484, 0.05426502227783203, 0.04705974578857422, 0.04629094314575195, 0.046174144744873045, 0.045776958465576174, 0.04534716796875, 0.04581763076782226, 0.045636768341064456, 0.04623215866088867, 0.046159999847412106, 0.046321727752685546, 0.04657478332519531, 0.04656534576416015, 0.0465948486328125, 0.04638544082641602, 0.046120670318603514, 0.04619820785522461, 0.046617153167724606, 0.046497791290283204, 0.046274208068847654, 0.046598495483398436, 0.04653081512451172, 0.04658560180664063, 0.04642403030395508, 0.0466473274230957, 0.046330974578857424, 0.04636156845092773, 0.0463647346496582, 0.04688883209228516, 0.04652019119262695, 0.04645286560058594, 0.047110145568847656, 0.04661455917358399, 0.04657183837890625, 0.04653430557250977, 0.04639968109130859, 0.04644025421142578, 0.04658585739135742, 0.04676028823852539, 0.04649526214599609, 0.04625625610351562, 0.046145183563232425, 0.046426464080810546, 0.04617334365844727, 0.04643721771240234, 0.04625532913208008, 0.04666009521484375, 0.04792348861694336, 0.050051071166992187, 0.04649356842041016, 0.046017982482910155, 0.04681590270996094, 0.04646656036376953, 0.0466126708984375, 0.04659872055053711, 0.04675302505493164, 0.046502464294433596, 0.04659811019897461, 0.046165599822998046, 0.04633414459228516, 0.04589779281616211, 0.04589968109130859, 0.04618230438232422, 0.05379670333862305, 0.04705295944213867, 0.0465541763305664, 0.04670969772338867, 0.04680310440063477, 0.04692361450195313, 0.04660380935668945, 0.046806655883789065, 0.04657852935791015, 0.04652646255493164, 0.0466328010559082, 0.0464447021484375, 0.04642406463623047, 0.046853664398193356, 0.046711166381835936, 0.04641756820678711, 0.04655763244628906, 0.046442497253417966, 0.04661676788330078, 0.04640729522705078, 0.047607425689697266, 0.04633785629272461, 0.04632038497924805, 0.045921344757080075, 0.04639839935302734, 0.046542526245117184, 0.04653292846679687, 0.046292991638183595, 0.04679244613647461, 0.045919551849365234, 0.04603385543823242, 0.04597126388549805, 0.0458856315612793, 0.04591820907592774, 0.04600377655029297, 0.046319873809814456, 0.046345600128173826, 0.04615670394897461, 0.04625507354736328, 0.0460010871887207, 0.04596652984619141, 0.04573267364501953, 0.046448863983154294, 0.04647488021850586, 0.04584431838989258, 0.04603526306152344, 0.04635622406005859, 0.0465164794921875, 0.04666739273071289, 0.046422401428222654, 0.04604927825927734, 0.0469700813293457, 0.046051361083984374, 0.0460063362121582, 0.045714111328125, 0.04583187103271484, 0.046061790466308594, 0.04661695861816406, 0.046387168884277345, 0.04643392181396484, 0.04644038391113281, 0.046401153564453124, 0.04619327926635742, 0.05415302276611328, 0.046733505249023435, 0.046814849853515625, 0.04660876846313477, 0.046663902282714845, 0.046685985565185543, 0.04678451156616211, 0.046755840301513675, 0.04689452743530274, 0.04678303909301758, 0.04673535919189453, 0.046778144836425783, 0.04653452682495117, 0.04693027114868164, 0.04719830322265625, 0.04617820739746094, 0.046004222869873046, 0.04616499328613281, 0.04659251022338867, 0.04668371200561523, 0.0465786247253418, 0.04628275299072265, 0.04630745697021484, 0.0465775375366211, 0.04620671844482422, 0.04613132858276367, 0.046702014923095704, 0.04661932754516602, 0.04686454391479492, 0.04741247940063477, 0.04827324676513672, 0.04704742431640625, 0.04636467361450195, 0.045806655883789064, 0.046301185607910154, 0.04625680160522461, 0.04587753677368164, 0.04561100769042969, 0.045891422271728516, 0.04624819183349609, 0.04667382431030274, 0.0505489616394043, 0.046739166259765624, 0.047457408905029294, 0.04746329498291016, 0.04634758377075195, 0.045970176696777346, 0.046010433197021486, 0.04616799926757813, 0.046160961151123045, 0.04663814544677734, 0.04661644744873047, 0.04659609603881836, 0.04631552124023437, 0.046159999847412106, 0.0464956169128418, 0.04667529678344726, 0.04692649459838867, 0.046622718811035156, 0.046706302642822266, 0.046782081604003906, 0.04714368057250977, 0.04692172622680664, 0.05454275131225586, 0.04701593780517578, 0.04641011047363281, 0.04595059204101563, 0.045827167510986325, 0.046088897705078125, 0.04599420928955078, 0.04572979354858398, 0.045564064025878905, 0.04577654266357422, 0.045809856414794924, 0.0465524787902832, 0.046674591064453125, 0.04648758316040039, 0.046550399780273435, 0.04633571243286133, 0.046226238250732424, 0.04600012969970703, 0.04588544082641602, 0.04590959930419922, 0.04586665725708008, 0.04614643096923828, 0.04626992034912109, 0.04617631912231445, 0.04604451370239258, 0.04556492614746094, 0.045663360595703126, 0.04574860763549805, 0.04614604949951172, 0.04577004623413086, 0.04577964782714844, 0.04595257568359375, 0.04619228744506836, 0.04671123123168945, 0.04655686569213867, 0.04660812759399414, 0.0464716796875, 0.04661699295043945, 0.046647296905517575, 0.04658585739135742, 0.04685356903076172, 0.046047679901123045, 0.046233985900878904, 0.046350078582763674, 0.04647116851806641, 0.04645808029174805, 0.04639401626586914, 0.04907952117919922, 0.047012767791748046, 0.051078975677490236, 0.04701196670532227, 0.046593822479248044, 0.04673116683959961, 0.04669478225708008, 0.04662681579589844, 0.046669151306152346, 0.04670140838623047, 0.04667168045043945, 0.04670259094238281, 0.046502174377441405, 0.046599903106689454, 0.046638751983642576, 0.04630876922607422, 0.05410128021240234, 0.04685094451904297, 0.04615753555297852, 0.04612928009033203, 0.04614348983764648, 0.04659366226196289, 0.046328094482421874, 0.04647366333007812, 0.04615542221069336, 0.04589363098144531, 0.04579935836791992, 0.04587116622924805, 0.04557823944091797, 0.045686847686767576, 0.046196670532226564, 0.04599593734741211, 0.04611660766601562, 0.04633020782470703, 0.04620083236694336, 0.04595302581787109, 0.04573308944702149, 0.045756671905517576, 0.04572243118286133, 0.04566191864013672, 0.04571955108642578, 0.045985790252685545, 0.04609356689453125, 0.04638307189941406, 0.04644134521484375, 0.04640143966674805, 0.04673126220703125, 0.04643814468383789, 0.04631196975708008, 0.046388961791992187, 0.046439872741699216, 0.04656185531616211, 0.04669235229492188, 0.046515872955322266, 0.04650368118286133, 0.046723392486572264, 0.046446880340576174, 0.04651200103759766, 0.046517887115478516, 0.04631196975708008, 0.04639740753173828, 0.04658160018920898, 0.04640576171875, 0.04653897476196289, 0.04660614395141602, 0.04661862564086914, 0.04630454254150391, 0.04616300964355469, 0.045819198608398434, 0.04579568099975586, 0.045873153686523435, 0.04593862533569336, 0.04598585510253906, 0.048132095336914066, 0.04631289672851562, 0.046259040832519534, 0.04630857467651367, 0.04624435043334961, 0.045914112091064455]",tokens/s,21.449959981874464,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 272.12 MiB is free. Process 103258 has 14.47 GiB memory in use. Of the allocated memory 14.35 GiB is allocated by PyTorch, and 13.83 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1300.811776,1093.599232,0.0,698.351616,690.178048,s,1,9.0914990234375,9.0914990234375,0.0,9.0914990234375,9.0914990234375,9.0914990234375,9.0914990234375,[9.0914990234375],,kWh,4.987506294163874e-05,5.4943037951315234e-06,1.7437791727969998e-05,7.280715846474025e-05,,MB,1435.410432,1408.172032,0.0,1000.341504,957.775872,s,10,0.6234354209899903,0.062343542098999026,0.0003477882052090371,0.062364255905151364,0.06268343505859375,0.06280832557678223,0.06290823799133301,"[0.06265568161010743, 0.06167334365844727, 0.06215894317626953, 0.06193344116210937, 0.06265401458740234, 0.0624372787475586, 0.0629332160949707, 0.06226099014282226, 0.062372673034667966, 0.06235583877563477]",tokens/s,4106.279357587388,kWh,1.909804073257004e-06,2.104694802594633e-07,1.2647159718300294e-06,3.3849895253464964e-06,tokens/kWh,75628003.59738046,MB,1462.214656,1420.754944,0.0,1012.924416,957.778432,s,10,27.223863525390627,2.7223863525390626,0.009427315305820406,2.7192373046875,2.733354736328125,2.7382030029296875,2.7420816162109376,"[2.719857666015625, 2.74305126953125, 2.718616943359375, 2.712888671875, 2.7300703125, 2.73227734375, 2.7155078125, 2.71289599609375, 2.714979248046875, 2.72371826171875]",tokens/s,23.141461880031237,kWh,7.949106879882261e-05,8.767922032024084e-06,3.395020108897235e-05,0.00012220919191981903,tokens/kWh,515509.50472967734,,s,630,27.221254787445105,0.04320834093245249,0.000528114719663762,0.04307913780212402,0.043606554794311525,0.04398572731018066,0.045682334403991706,"[0.04331292724609375, 0.04353705596923828, 0.04330876922607422, 0.04297286224365234, 0.04332556915283203, 0.04328905487060547, 0.043525375366210935, 0.04314966583251953, 0.04310467147827148, 0.04306262588500977, 0.04283824157714844, 0.04278726577758789, 0.04292748641967774, 0.043000446319580075, 0.042723232269287106, 0.04281967926025391, 0.04296192169189453, 0.04319302368164062, 0.04285984039306641, 0.04284108734130859, 0.04309401702880859, 0.043036670684814454, 0.04317113494873047, 0.04297798538208008, 0.043749183654785154, 0.04318207931518555, 0.04293593597412109, 0.0432624626159668, 0.04321900939941406, 0.044125953674316404, 0.043983104705810544, 0.043259361267089846, 0.04342428970336914, 0.043177982330322266, 0.04291142272949219, 0.04317740631103516, 0.04305964660644531, 0.04289785766601562, 0.04282598495483399, 0.042845279693603515, 0.0428243522644043, 0.043062335968017576, 0.04314822387695313, 0.043169792175292966, 0.0433889274597168, 0.04375542449951172, 0.04366723251342773, 0.04313119888305664, 0.04322035217285156, 0.04311715316772461, 0.04311151885986328, 0.04311088180541992, 0.0431129264831543, 0.043222881317138674, 0.04331660842895508, 0.04331804656982422, 0.043046913146972655, 0.04304399871826172, 0.04364988708496094, 0.04316556930541992, 0.04310371017456055, 0.04298614501953125, 0.04307129669189453, 0.04358121490478516, 0.04322601699829102, 0.043093311309814454, 0.042998462677001956, 0.04298342514038086, 0.04320665740966797, 0.04326009750366211, 0.04308176040649414, 0.04334979248046875, 0.04304230499267578, 0.042797569274902345, 0.04351385498046875, 0.04329372787475586, 0.04331414413452148, 0.04313497543334961, 0.04342281723022461, 0.04294902420043945, 0.042895870208740236, 0.04317184066772461, 0.04345446395874023, 0.04315913772583008, 0.04325827026367188, 0.04321484756469727, 0.04322089767456055, 0.042948703765869144, 0.043038719177246096, 0.043597824096679685, 0.043243518829345705, 0.04331235122680664, 0.043408096313476564, 0.04340073776245117, 0.04339766311645508, 0.04352000045776367, 0.04429414367675781, 0.04410508728027344, 0.04393638229370117, 0.04432486343383789, 0.04337667083740234, 0.04334982299804688, 0.04304003143310547, 0.04301001739501953, 0.04333353424072266, 0.04302707290649414, 0.0455601921081543, 0.043480064392089846, 0.043240447998046876, 0.043235328674316405, 0.04336844635009766, 0.044423168182373046, 0.04343952178955078, 0.04313763046264649, 0.043218944549560545, 0.043142814636230466, 0.04346505737304687, 0.04343369674682617, 0.04901286315917969, 0.04343619155883789, 0.045797279357910156, 0.044500225067138674, 0.043916126251220704, 0.04352000045776367, 0.04396819305419922, 0.0432061767578125, 0.04354339218139648, 0.04353023910522461, 0.04313497543334961, 0.043140705108642576, 0.04313731384277344, 0.04325593566894531, 0.042985183715820316, 0.04349900817871094, 0.04306537628173828, 0.043340286254882815, 0.04307942581176758, 0.043143871307373044, 0.042968894958496096, 0.04340326309204102, 0.043053054809570314, 0.04294819259643555, 0.043006305694580076, 0.04301942443847656, 0.04306403350830078, 0.04291392135620117, 0.04290943908691406, 0.04313958358764648, 0.04306515121459961, 0.04350886535644531, 0.04284288024902344, 0.04305897521972656, 0.042928417205810546, 0.042942527770996095, 0.04389068984985352, 0.04332313537597656, 0.04322739028930664, 0.0429486083984375, 0.04315135955810547, 0.04291788864135742, 0.042995712280273435, 0.04340550231933594, 0.043111743927001955, 0.043364864349365234, 0.04288716888427734, 0.0429027214050293, 0.04285862350463867, 0.043005790710449215, 0.043299678802490235, 0.04647222518920899, 0.04318038558959961, 0.042870944976806644, 0.04300009536743164, 0.0429917106628418, 0.04297475051879883, 0.043149505615234375, 0.042772254943847655, 0.04297500610351562, 0.04275008010864258, 0.042947265625, 0.04288694381713867, 0.04284147262573242, 0.04283014297485352, 0.04289590454101563, 0.04319641494750977, 0.04319641494750977, 0.04328857421875, 0.043020320892333985, 0.04319638442993164, 0.04338278579711914, 0.043046913146972655, 0.042949790954589846, 0.04304326248168945, 0.042970783233642576, 0.04273638534545898, 0.042643329620361325, 0.04326768112182617, 0.04267385482788086, 0.044544864654541015, 0.043257217407226566, 0.04276287841796875, 0.044203231811523434, 0.04311324691772461, 0.042979328155517575, 0.04294377517700195, 0.0428100814819336, 0.04300799942016602, 0.0430489616394043, 0.044076671600341795, 0.04280767822265625, 0.04281958389282227, 0.042864543914794925, 0.043140480041503906, 0.042909950256347654, 0.04300233459472656, 0.04299980926513672, 0.04300764846801758, 0.04282198333740234, 0.042799102783203126, 0.04255081558227539, 0.04291823959350586, 0.04311257553100586, 0.0432988166809082, 0.04283343887329102, 0.04346928024291992, 0.0426776008605957, 0.04306358337402344, 0.042946945190429686, 0.043009342193603514, 0.04275651168823242, 0.04286707305908203, 0.04303766250610352, 0.04277958297729492, 0.04321279907226563, 0.04302188873291016, 0.04343427276611328, 0.04293257522583008, 0.042854209899902344, 0.04310537719726563, 0.04291676712036133, 0.042698879241943356, 0.04298124694824219, 0.04295270538330078, 0.042995712280273435, 0.0430489616394043, 0.042935871124267576, 0.04336051177978516, 0.043853759765625, 0.043398944854736325, 0.04289788818359375, 0.043063297271728515, 0.04301615905761719, 0.04342950439453125, 0.043686176300048826, 0.04339279937744141, 0.043112319946289064, 0.04310844802856445, 0.043450721740722655, 0.04312188720703125, 0.04293711853027344, 0.04311856079101563, 0.0431016960144043, 0.04301375961303711, 0.0430294075012207, 0.04290764617919922, 0.0430571517944336, 0.04295065689086914, 0.043364158630371095, 0.04347859191894531, 0.04414323043823242, 0.04336819076538086, 0.04299599838256836, 0.042958465576171875, 0.04307612609863281, 0.04299142456054687, 0.043087390899658205, 0.0431416015625, 0.04309196853637695, 0.0440709114074707, 0.0432182731628418, 0.04351990509033203, 0.043610527038574216, 0.04306774520874023, 0.043140670776367185, 0.04336240005493164, 0.04652431869506836, 0.04355920028686523, 0.043233440399169924, 0.04339494323730469, 0.043469024658203126, 0.043267871856689455, 0.043094142913818356, 0.04362444686889649, 0.04385177612304687, 0.04328243255615234, 0.04350566482543945, 0.043261951446533206, 0.043200511932373044, 0.04305920028686523, 0.04333315277099609, 0.042923904418945315, 0.04351587295532226, 0.04314380645751953, 0.04310630416870117, 0.043104255676269534, 0.04324723052978516, 0.04347942352294922, 0.04353843307495117, 0.04329203033447265, 0.04327619171142578, 0.04382547378540039, 0.043413921356201174, 0.04303664016723633, 0.043078849792480466, 0.043053184509277344, 0.0435316162109375, 0.04356121444702148, 0.04343235015869141, 0.043409374237060545, 0.044272960662841795, 0.043686622619628905, 0.04333363342285156, 0.04520959854125976, 0.04346060943603516, 0.043225086212158204, 0.04300566482543945, 0.043098209381103515, 0.04302608108520508, 0.0434672966003418, 0.04306243133544922, 0.04299043273925781, 0.04484719848632813, 0.04319551849365234, 0.043635009765625, 0.043467231750488285, 0.043278175354003905, 0.04308803176879883, 0.04317753601074219, 0.043199935913085935, 0.04315785598754883, 0.043251583099365234, 0.04360611343383789, 0.04343840026855469, 0.043176319122314455, 0.04327219009399414, 0.04302345657348633, 0.04430665588378906, 0.04368864059448242, 0.04332940673828125, 0.042903678894042965, 0.04308377456665039, 0.04295430374145508, 0.04397715377807617, 0.04289263916015625, 0.042789215087890624, 0.042936641693115236, 0.04312268829345703, 0.04296438217163086, 0.043072097778320315, 0.043071487426757815, 0.043757312774658205, 0.042938625335693356, 0.04282483291625976, 0.042875232696533205, 0.0430695686340332, 0.042961055755615235, 0.043456768035888674, 0.04512768173217773, 0.04321279907226563, 0.04338390350341797, 0.04312771224975586, 0.04374118423461914, 0.043159358978271486, 0.04323142242431641, 0.043374591827392575, 0.04364492797851562, 0.043415550231933595, 0.0430301742553711, 0.043464702606201173, 0.043587039947509766, 0.04299216079711914, 0.04273779296875, 0.04291929626464844, 0.04280985641479492, 0.04284524917602539, 0.04270585632324219, 0.04292393493652344, 0.04301193618774414, 0.04262937545776367, 0.04298652648925781, 0.042646495819091794, 0.04301811218261719, 0.042821758270263674, 0.04291788864135742, 0.04281686401367187, 0.04616463851928711, 0.04294460678100586, 0.04307958221435547, 0.04362444686889649, 0.042967041015625, 0.043103649139404294, 0.04319049453735352, 0.04335577774047852, 0.04310009765625, 0.0429444808959961, 0.043404193878173826, 0.04290460968017578, 0.04308198547363281, 0.04296732711791992, 0.04308211135864258, 0.04290150451660156, 0.042893310546875, 0.043151168823242186, 0.043270336151123044, 0.04348928070068359, 0.0428928337097168, 0.04288876724243164, 0.0429202880859375, 0.04272800064086914, 0.042819263458251954, 0.04301446533203125, 0.04298950576782227, 0.042700672149658205, 0.0426740493774414, 0.04279299163818359, 0.044973728179931644, 0.043171489715576175, 0.04297830581665039, 0.04313699340820312, 0.04308915328979492, 0.04289779281616211, 0.04301968002319336, 0.042937313079833984, 0.04354048156738281, 0.04287612915039062, 0.04281238555908203, 0.043736385345458983, 0.04322140884399414, 0.04288726425170898, 0.043063297271728515, 0.043020286560058595, 0.04394128036499023, 0.04323705673217773, 0.04304777526855469, 0.042958911895751954, 0.04312460708618164, 0.04356927871704101, 0.04311228942871094, 0.042931934356689454, 0.04291628646850586, 0.04331315231323242, 0.04278793716430664, 0.04287376022338867, 0.042913791656494144, 0.04338687896728516, 0.04290764617919922, 0.0428642578125, 0.04534214401245117, 0.04285740661621094, 0.04300595092773438, 0.04288476943969727, 0.04316732788085938, 0.04296371078491211, 0.04296192169189453, 0.04295372772216797, 0.04302403259277344, 0.04320086288452148, 0.04312255859375, 0.04303007888793945, 0.043033153533935546, 0.04300163269042969, 0.04294697570800781, 0.04270371246337891, 0.04403094482421875, 0.04295212936401367, 0.04383318328857422, 0.04317871856689453, 0.04277248001098633, 0.04288003158569336, 0.042855392456054686, 0.042962944030761716, 0.043053054809570314, 0.04324726486206055, 0.042924385070800784, 0.042821632385253904, 0.04273971176147461, 0.042643455505371096, 0.042828895568847655, 0.04291267013549805, 0.04302438354492188, 0.04287897491455078, 0.0431629753112793, 0.04278054428100586, 0.04274665451049805, 0.04290457534790039, 0.042787841796875, 0.04315964889526367, 0.042831775665283206, 0.04279203033447266, 0.04288358306884766, 0.04282614517211914, 0.04301728057861328, 0.0430621452331543, 0.04305417633056641, 0.043278335571289066, 0.042999168395996094, 0.04303116989135742, 0.04303244781494141, 0.043014015197753906, 0.043016384124755856, 0.04287289428710937, 0.04285955047607422, 0.042912734985351565, 0.042796222686767575, 0.04293305587768555, 0.042978401184082034, 0.042793632507324215, 0.04269068908691406, 0.042762081146240236, 0.043010337829589844, 0.0429417610168457, 0.04293292617797852, 0.04284620666503906, 0.0429219856262207, 0.04295065689086914, 0.04294220733642578, 0.04281779098510742, 0.04283801651000976, 0.04319609451293945, 0.04308614349365234, 0.043499519348144534, 0.04290150451660156, 0.0431800651550293, 0.0429951057434082, 0.04386377716064453, 0.04373385620117187, 0.04396236801147461, 0.0432657585144043, 0.043296993255615236, 0.043320926666259765, 0.043065536499023435, 0.043355873107910156, 0.04292870330810547, 0.042689697265625, 0.04293427276611328, 0.04292256164550781, 0.042942752838134764, 0.043090110778808595, 0.04313065719604492, 0.044378143310546875, 0.04294831848144531, 0.04303244781494141, 0.04281590270996094, 0.043345569610595706, 0.04263955307006836, 0.043159713745117186, 0.04272742462158203, 0.042660896301269534, 0.04355753707885742, 0.04448281478881836, 0.04276025772094726, 0.04283564758300781, 0.04304313659667969, 0.04314217758178711, 0.043987873077392575, 0.04300316619873047, 0.0426644172668457, 0.0430362548828125, 0.04306179046630859, 0.04347804641723633, 0.04297987365722656, 0.04308329772949219, 0.04576950454711914, 0.04435968017578125, 0.04356915283203125, 0.042897407531738284, 0.043068992614746095, 0.04286918258666992, 0.04297727966308594, 0.042942272186279294, 0.04310371017456055, 0.042963550567626956, 0.04291551971435547, 0.042856094360351565, 0.042928993225097654, 0.04279289627075195, 0.04353551864624024, 0.0431808967590332, 0.04286646270751953, 0.042899070739746095, 0.04286323165893555, 0.04283184051513672, 0.0429136962890625, 0.04300352096557617, 0.04308816146850586, 0.04351961517333985, 0.04313350296020508, 0.04307558441162109, 0.0432988166809082, 0.04390092849731445, 0.04332748794555664, 0.043328510284423825, 0.04436067199707031, 0.04342284774780274, 0.043289505004882815, 0.04304076766967774, 0.042990718841552734, 0.04297203063964844, 0.04298137664794922, 0.04309196853637695, 0.04287286376953125, 0.04294652938842773, 0.042897151947021483, 0.04314751815795898, 0.0433070068359375, 0.04314278411865234, 0.0429428482055664, 0.04294192123413086, 0.04319855880737305, 0.04299753570556641, 0.042877601623535155, 0.04314089584350586, 0.043001697540283206, 0.04573222351074219, 0.043284481048583984, 0.043804672241210936, 0.04327654266357422, 0.04304886245727539, 0.043326366424560545, 0.04299462509155273]",tokens/s,23.14367963267319,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1843.068928,2899.247104,0.0,2503.999488,2349.010944,s,1,10.456041015625,10.456041015625,0.0,10.456041015625,10.456041015625,10.456041015625,10.456041015625,[10.456041015625],,kWh,8.935959978747783e-05,9.849528328410333e-06,3.258947051595906e-05,0.00013179859863184722,,MB,1900.003328,3310.288896,0.0,2902.458368,2642.29888,s,10,2.0673072662353515,0.2067307266235352,0.0008982250054187398,0.20664173126220703,0.20775204620361326,0.20799231033325194,0.20818452163696288,"[0.20692445373535157, 0.20612460327148438, 0.2050182342529297, 0.20689356994628907, 0.2063273620605469, 0.206389892578125, 0.20607283020019532, 0.20762509155273437, 0.20769865417480468, 0.20823257446289062]",tokens/s,1238.3258366144385,kWh,6.304979010637554e-06,6.953266835215809e-07,4.206173577701955e-06,1.1206479271861089e-05,tokens/kWh,22843927.498514477,MB,1928.25344,3310.288896,0.0,2902.458368,2642.30144,s,10,27.798505859375002,2.7798505859375,0.005764667646533437,2.7790865478515627,2.78595625,2.7890772460937496,2.7915740429687497,"[2.7921982421875, 2.77327392578125, 2.784954345703125, 2.77960205078125, 2.774796875, 2.77301171875, 2.778571044921875, 2.779670654296875, 2.777164306640625, 2.7852626953125]",tokens/s,22.663088555442396,kWh,8.138066803019641e-05,8.976314009735155e-06,4.321750325009662e-05,0.0001335744852900282,tokens/kWh,471646.9605943761,,s,630,27.79591621780394,0.04412050193302215,0.000516710041676491,0.0439946231842041,0.04450067062377929,0.04506403846740722,0.04631580402374268,"[0.044828033447265624, 0.04420832061767578, 0.0445997428894043, 0.044377952575683596, 0.0446869125366211, 0.044015262603759764, 0.043929695129394535, 0.04409417724609375, 0.04395596694946289, 0.043976959228515626, 0.04378432083129883, 0.04444979095458984, 0.043996768951416014, 0.043932064056396485, 0.04451327896118164, 0.04449484634399414, 0.04423788833618164, 0.044372928619384765, 0.04410761642456055, 0.04410383987426758, 0.044486560821533204, 0.04422364807128906, 0.044208385467529296, 0.04423126220703125, 0.044324222564697265, 0.044283649444580075, 0.0444958381652832, 0.044012542724609374, 0.04399411010742187, 0.044224510192871096, 0.04400921630859375, 0.04558643341064453, 0.04415536117553711, 0.04415875244140625, 0.04443340682983398, 0.04434124755859375, 0.04403200149536133, 0.044146270751953126, 0.044157344818115236, 0.04425932693481445, 0.043919361114501954, 0.04392550277709961, 0.04409366226196289, 0.04408707046508789, 0.043905025482177736, 0.04395827102661133, 0.04412416076660156, 0.0481629753112793, 0.044257118225097654, 0.04415692901611328, 0.044281856536865234, 0.04526899337768555, 0.04436550521850586, 0.04407843017578125, 0.044430305480957034, 0.0442347526550293, 0.044339199066162106, 0.0441366081237793, 0.044029792785644534, 0.04398636627197266, 0.044228225708007815, 0.043971359252929686, 0.04554908752441406, 0.044955646514892575, 0.04426156616210938, 0.04414236831665039, 0.044270782470703124, 0.043827713012695314, 0.044034400939941404, 0.043954177856445314, 0.04416259384155274, 0.04402214431762695, 0.043888511657714846, 0.04381497573852539, 0.044312736511230466, 0.0439417610168457, 0.04402969741821289, 0.04429647827148438, 0.04401926422119141, 0.043812641143798826, 0.04373977661132813, 0.04399116897583008, 0.043894302368164065, 0.04430281448364258, 0.04376332855224609, 0.04365555191040039, 0.043898017883300784, 0.04376611328125, 0.04399980926513672, 0.04425459289550781, 0.0440038070678711, 0.04427916717529297, 0.04411164855957031, 0.04416198348999024, 0.04397875213623047, 0.043821407318115235, 0.04395894241333008, 0.04409590530395508, 0.044067424774169923, 0.04398662567138672, 0.044017822265625, 0.04401168060302734, 0.043937793731689455, 0.0443675537109375, 0.04392086410522461, 0.043737953186035156, 0.04469952011108398, 0.043913345336914066, 0.04382428741455078, 0.04384444808959961, 0.04402928161621094, 0.04386207962036133, 0.04424499130249023, 0.04384771347045899, 0.04393548965454101, 0.04371696090698242, 0.04393417739868164, 0.04364492797851562, 0.043796607971191406, 0.04376972961425781, 0.04496822357177734, 0.04392726516723633, 0.04398479843139649, 0.04372079849243164, 0.04403519821166992, 0.043852161407470704, 0.045122974395751955, 0.044552799224853515, 0.04426342391967773, 0.04404838562011719, 0.044181503295898435, 0.043640830993652346, 0.04356262588500977, 0.04372876739501953, 0.04357980728149414, 0.043934814453125, 0.04440524673461914, 0.04399267196655274, 0.0444169921875, 0.04378675079345703, 0.043950111389160156, 0.043919231414794924, 0.04418956756591797, 0.044698272705078125, 0.04395622253417969, 0.04404800033569336, 0.04388288116455078, 0.04561103820800781, 0.043870174407958984, 0.043911167144775394, 0.04375305557250977, 0.04382147216796875, 0.043821056365966796, 0.04370841598510742, 0.043753471374511715, 0.04670198440551758, 0.043960929870605465, 0.043886592864990234, 0.0438579216003418, 0.04393164825439453, 0.044308353424072265, 0.044179073333740236, 0.04399564743041992, 0.04398899078369141, 0.04451737594604492, 0.0440684814453125, 0.04390131378173828, 0.044083198547363284, 0.04394803237915039, 0.046118911743164064, 0.047486942291259764, 0.04407455825805664, 0.044110305786132814, 0.04392745590209961, 0.0438172492980957, 0.044353343963623046, 0.04415283203125, 0.04407910537719727, 0.04450060653686523, 0.044026241302490235, 0.04400566482543945, 0.04385516738891602, 0.04410723114013672, 0.044429824829101565, 0.044052928924560544, 0.04429414367675781, 0.04393686294555664, 0.043982975006103514, 0.043938591003417966, 0.045195167541503906, 0.044284000396728515, 0.04431702423095703, 0.044159809112548826, 0.04410796737670898, 0.04448332977294922, 0.04419164657592774, 0.04393155288696289, 0.04369740676879883, 0.043950592041015625, 0.043657569885253905, 0.04382720184326172, 0.04409513473510742, 0.04378249740600586, 0.04434534454345703, 0.04413849639892578, 0.04377804946899414, 0.04417740631103516, 0.04411404800415039, 0.0444087028503418, 0.044418785095214845, 0.044292320251464845, 0.04381907272338867, 0.043974655151367184, 0.04367577743530274, 0.04383932876586914, 0.04365929412841797, 0.04408438491821289, 0.04374819183349609, 0.043859519958496095, 0.043951553344726564, 0.044153057098388675, 0.04420483016967774, 0.04428799819946289, 0.044227584838867184, 0.04481536102294922, 0.044111488342285156, 0.044181537628173825, 0.04387363052368164, 0.04414361572265625, 0.04386611175537109, 0.043993087768554685, 0.0438249282836914, 0.0439769287109375, 0.04371212768554687, 0.043964126586914065, 0.04396908950805664, 0.044042335510253904, 0.043845630645751955, 0.044069183349609374, 0.04405420684814453, 0.044294113159179686, 0.0438109130859375, 0.0441855354309082, 0.04587724685668945, 0.045590526580810545, 0.044041889190673825, 0.04480422210693359, 0.04386991882324219, 0.04420454406738281, 0.04387014389038086, 0.043775840759277346, 0.04374755096435547, 0.044886688232421874, 0.04413788986206055, 0.044485214233398435, 0.044265472412109375, 0.04807884979248047, 0.04404207992553711, 0.04431683349609375, 0.04420556640625, 0.04394966506958008, 0.044167743682861325, 0.04405692672729492, 0.04401561737060547, 0.044191871643066406, 0.04393535995483398, 0.04385817718505859, 0.043845630645751955, 0.043786239624023435, 0.04391686248779297, 0.04375494384765625, 0.044163265228271485, 0.04398899078369141, 0.043780929565429685, 0.044015201568603515, 0.044043807983398436, 0.04392617416381836, 0.04426953506469727, 0.04380697631835938, 0.043902366638183594, 0.04367136001586914, 0.04392806243896484, 0.04362473678588867, 0.04381292724609375, 0.04376383972167969, 0.04440969467163086, 0.043817951202392576, 0.044490463256835935, 0.04398688125610352, 0.04420016098022461, 0.0440874252319336, 0.04388249588012695, 0.044043807983398436, 0.04392995071411133, 0.043548191070556644, 0.04397116851806641, 0.04378009414672852, 0.04391945648193359, 0.04362128067016602, 0.043784832000732424, 0.043778430938720705, 0.044025856018066405, 0.043870208740234375, 0.043601665496826175, 0.04389503860473633, 0.04379238510131836, 0.043796478271484376, 0.04399718475341797, 0.04372172927856445, 0.04452233505249024, 0.04402191925048828, 0.043783679962158206, 0.04385411071777344, 0.0440219841003418, 0.043802623748779294, 0.04484713745117187, 0.04450649642944336, 0.04398076629638672, 0.04394784164428711, 0.04374348831176758, 0.043911201477050785, 0.04366739273071289, 0.043875038146972654, 0.04457231903076172, 0.044447967529296875, 0.04414214324951172, 0.04390956878662109, 0.04407814407348633, 0.04392832183837891, 0.04367536163330078, 0.043905120849609375, 0.04362688064575195, 0.043541919708251955, 0.04342572784423828, 0.044137119293212894, 0.043375808715820315, 0.04391814422607422, 0.043730945587158204, 0.04399513626098633, 0.04386108779907227, 0.04388751983642578, 0.043753025054931644, 0.045131744384765624, 0.04390959930419922, 0.04385756683349609, 0.043811168670654294, 0.04385507202148437, 0.043780895233154295, 0.04383334350585937, 0.04377731323242187, 0.04401635360717773, 0.044467201232910154, 0.04394313430786133, 0.043797279357910154, 0.04382371139526367, 0.04382352066040039, 0.043659233093261716, 0.04363267135620117, 0.0437657585144043, 0.04366124725341797, 0.04541622543334961, 0.04379644775390625, 0.04390943908691406, 0.043862014770507815, 0.04404633712768555, 0.044023391723632815, 0.04440031814575195, 0.045103839874267575, 0.04544924926757812, 0.043764991760253905, 0.043936481475830076, 0.043757568359375, 0.04393772888183594, 0.04437139129638672, 0.04405632019042969, 0.04390591812133789, 0.04399871826171875, 0.04407551956176758, 0.04632620620727539, 0.04447641754150391, 0.04417536163330078, 0.04393497467041016, 0.04398771286010742, 0.043905025482177736, 0.04381846237182617, 0.0438625602722168, 0.044021343231201174, 0.04410793685913086, 0.04399539184570313, 0.04390102386474609, 0.04388035202026367, 0.04470579147338867, 0.043804672241210936, 0.044010784149169924, 0.043880542755126956, 0.04412416076660156, 0.04378416061401367, 0.04379084777832031, 0.04374748611450195, 0.04385123062133789, 0.04382742309570312, 0.04382751846313476, 0.043648193359375, 0.043805503845214845, 0.04383129501342774, 0.04380672073364258, 0.04379852676391602, 0.044010688781738284, 0.04390380859375, 0.044611583709716796, 0.04365107345581055, 0.04365311813354492, 0.04365673446655274, 0.04409801483154297, 0.04450124740600586, 0.044077056884765625, 0.04401273727416992, 0.04460355377197266, 0.04715708923339844, 0.04436640167236328, 0.04416307067871094, 0.04399513626098633, 0.04401776123046875, 0.044574623107910154, 0.04399718475341797, 0.04401372909545898, 0.04393967819213867, 0.0440709114074707, 0.04388249588012695, 0.04396182250976562, 0.04382486343383789, 0.043915233612060546, 0.043878528594970705, 0.043909854888916015, 0.043862014770507815, 0.04404339218139648, 0.044311424255371094, 0.044519233703613284, 0.04400505447387695, 0.04421683120727539, 0.04426137542724609, 0.04469887924194336, 0.044218463897705076, 0.043981121063232424, 0.04406070327758789, 0.04400979232788086, 0.04415488052368164, 0.044070049285888674, 0.04378915023803711, 0.04397590255737305, 0.04425315093994141, 0.044663616180419925, 0.04657171249389649, 0.04411782455444336, 0.04419184112548828, 0.04500060653686523, 0.0439496955871582, 0.04398102569580078, 0.043745662689208986, 0.04382287979125977, 0.04386975860595703, 0.04403350448608399, 0.045386112213134766, 0.04393331146240234, 0.04387052917480469, 0.04376630401611328, 0.04386624145507813, 0.04381081771850586, 0.044298110961914064, 0.04571968078613281, 0.04579244613647461, 0.043977344512939456, 0.04386732864379883, 0.044080127716064454, 0.044439552307128906, 0.04396783828735352, 0.043698753356933594, 0.04418569564819336, 0.043931838989257815, 0.04391916656494141, 0.04377395248413086, 0.04368588638305664, 0.043802623748779294, 0.04370943832397461, 0.04363161468505859, 0.04429619216918945, 0.04385516738891602, 0.04411257553100586, 0.04382534408569336, 0.044000129699707034, 0.04402272033691406, 0.044072769165039063, 0.04426716613769531, 0.043772735595703126, 0.04395798492431641, 0.04357734298706055, 0.04368809509277344, 0.043796607971191406, 0.04376287841796875, 0.04390044784545898, 0.043977920532226565, 0.044154464721679686, 0.04414486312866211, 0.04395609664916992, 0.044929088592529295, 0.044113246917724606, 0.044243167877197266, 0.0439156494140625, 0.044133792877197264, 0.04394659042358398, 0.04427571105957031, 0.04396156692504883, 0.04397545623779297, 0.04391424179077148, 0.044055553436279295, 0.043902976989746094, 0.04397369766235352, 0.043928512573242186, 0.04440883255004883, 0.04392784118652344, 0.04410543823242188, 0.04393318557739258, 0.0438823356628418, 0.04384739303588867, 0.04437702560424805, 0.04402294540405274, 0.04371462249755859, 0.04361065673828125, 0.043587841033935544, 0.0437657585144043, 0.043763233184814454, 0.04374166488647461, 0.04381491088867188, 0.043872257232666016, 0.04376166534423828, 0.04384470367431641, 0.04461846542358398, 0.04397439956665039, 0.04394044876098633, 0.04411580657958984, 0.0460167350769043, 0.044684864044189455, 0.04405295944213867, 0.04448748779296875, 0.04389177703857422, 0.04390694427490234, 0.04375660705566406, 0.043920318603515626, 0.043914848327636716, 0.044120128631591794, 0.04518265533447265, 0.045015392303466795, 0.044046657562255856, 0.04400678253173828, 0.04408793640136719, 0.04443312072753906, 0.04408044815063476, 0.04367459106445312, 0.04395161437988281, 0.043926017761230465, 0.04394803237915039, 0.044071937561035154, 0.04363161468505859, 0.04433852767944336, 0.04401795196533203, 0.04397673416137695, 0.04386003112792969, 0.04535500717163086, 0.04629033660888672, 0.044274272918701174, 0.043963520050048825, 0.0440840950012207, 0.044074878692626954, 0.0440239372253418, 0.04402380752563476, 0.045147327423095705, 0.04591203308105469, 0.043893600463867186, 0.04386611175537109, 0.04390707015991211, 0.04385126495361328, 0.0443922233581543, 0.04425187301635742, 0.0437592658996582, 0.04407126235961914, 0.04397875213623047, 0.04385721588134766, 0.04371510314941406, 0.04394927978515625, 0.04373600006103515, 0.04583769607543945, 0.04435212707519531, 0.0439400634765625, 0.04369935989379883, 0.04390879821777344, 0.043780448913574216, 0.04401776123046875, 0.04398745727539063, 0.04370636749267578, 0.043703392028808595, 0.04411427307128906, 0.043737567901611325, 0.04393360137939453, 0.04391110229492187, 0.04434560012817383, 0.04413433456420898, 0.04441708755493164, 0.04422860717773437, 0.04404537582397461, 0.04385475158691406, 0.04522601699829101, 0.04541644668579101, 0.04417740631103516, 0.04442521667480469, 0.04389823913574219, 0.043976478576660157, 0.04405539321899414, 0.044056129455566403, 0.04437369537353516, 0.044065536499023436, 0.044050430297851564, 0.044047679901123044, 0.043920063018798826, 0.04377743911743164, 0.04408947372436523, 0.044028385162353516, 0.04439769744873047, 0.04409433746337891, 0.044090625762939456, 0.04479244613647461]",tokens/s,22.665199990654376,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,826.908672,554.631168,0.0,159.383552,142.313472,s,1,7.770806640625,7.770806640625,0.0,7.770806640625,7.770806640625,7.770806640625,7.770806640625,[7.770806640625],,kWh,2.044441207081036e-05,2.2443550499971597e-06,7.322505857998429e-06,3.001127297880595e-05,,MB,1169.485824,630.12864,0.0,222.298112,185.324544,s,16,0.20787631988525393,0.012992269992828369,0.00010642136649962598,0.012965983867645263,0.01313753604888916,0.01321061611175537,0.013223422050476075,"[0.013057024002075195, 0.012937984466552734, 0.012915424346923828, 0.012964415550231933, 0.013205280303955078, 0.012908672332763672, 0.012927007675170899, 0.01290614414215088, 0.01287775993347168, 0.012967552185058594, 0.012977215766906738, 0.01322662353515625, 0.013063615798950195, 0.012843647956848144, 0.013028160095214843, 0.013069791793823242]",tokens/s,19704.024018998218,kWh,3.869097831898266e-07,4.2669298453619785e-08,2.1264954465961984e-07,6.422286263030662e-07,tokens/kWh,398611942.0955151,MB,1203.05664,632.225792,0.0,224.395264,185.327104,s,16,10.13089453125,0.633180908203125,0.002444518329385662,0.6324399108886718,0.6361362304687499,0.6373408050537109,0.6391272918701172,"[0.6326646728515625, 0.6314248657226562, 0.63187353515625, 0.6320014038085937, 0.6395739135742188, 0.6317808227539062, 0.6325137329101562, 0.6321677856445312, 0.6285827026367188, 0.632541259765625, 0.635676025390625, 0.636596435546875, 0.63510498046875, 0.6323660888671875, 0.6337172241210938, 0.63230908203125]",tokens/s,99.49763043043228,kWh,1.8280941349102588e-05,2.0160788581939183e-06,7.300429382464556e-06,2.7597449589761054e-05,tokens/kWh,2282819.6422677287,,s,1008,10.12196175956727,0.010041628729729425,0.00013469295719927956,0.010021120071411132,0.01016208667755127,0.010236100769042968,0.010545873079299927,"[0.010080096244812011, 0.010078240394592285, 0.010184703826904297, 0.01003276824951172, 0.010035584449768067, 0.010023039817810058, 0.009983872413635254, 0.010057408332824707, 0.00999782371520996, 0.009972512245178223, 0.009994272232055665, 0.010015744209289551, 0.0099901762008667, 0.009982943534851074, 0.010076416015625, 0.010059295654296875, 0.010041567802429199, 0.010051039695739747, 0.010069791793823241, 0.01007487964630127, 0.010076160430908204, 0.010049535751342773, 0.010023967742919922, 0.009998815536499023, 0.010030912399291993, 0.009947392463684082, 0.009940640449523926, 0.010007552146911621, 0.009925567626953125, 0.010027104377746583, 0.010471648216247558, 0.010013216018676758, 0.010033023834228516, 0.010035296440124512, 0.0100797119140625, 0.009951168060302735, 0.0100131196975708, 0.00998198413848877, 0.01001471996307373, 0.009976320266723633, 0.010020511627197266, 0.010004351615905762, 0.010023296356201172, 0.010024991989135743, 0.010053152084350587, 0.00999135971069336, 0.010028160095214844, 0.010000255584716797, 0.010061823844909668, 0.010028703689575195, 0.01005350399017334, 0.010074591636657715, 0.010034527778625488, 0.010099360466003418, 0.010057472229003906, 0.010082559585571289, 0.010045439720153808, 0.010000672340393067, 0.010023903846740723, 0.009988224029541016, 0.010031200408935547, 0.009931167602539063, 0.01003974437713623, 0.01005827236175537, 0.01026198387145996, 0.010103679656982422, 0.009968799591064453, 0.01021020793914795, 0.009917887687683105, 0.00992454433441162, 0.009894016265869141, 0.009880000114440918, 0.010145279884338379, 0.009979488372802735, 0.00996835231781006, 0.010484031677246094, 0.010042431831359864, 0.010009471893310546, 0.009977855682373048, 0.010002367973327637, 0.009854047775268555, 0.00990499210357666, 0.00983084774017334, 0.009921631813049316, 0.009884384155273438, 0.010014304161071777, 0.00992899227142334, 0.009907999992370605, 0.009842368125915527, 0.009896479606628417, 0.009881600379943848, 0.009955072402954101, 0.010100992202758789, 0.009965824127197266, 0.009908991813659668, 0.010023712158203125, 0.009987808227539063, 0.009957375526428223, 0.009959936141967773, 0.009960607528686523, 0.009955519676208497, 0.010175135612487793, 0.010004320144653321, 0.010037247657775878, 0.009946399688720704, 0.010049823760986329, 0.010068256378173828, 0.010084192276000976, 0.009973440170288086, 0.010070367813110352, 0.009991871833801269, 0.010067744255065918, 0.010121376037597656, 0.010181440353393554, 0.010033151626586915, 0.010397695541381835, 0.009973759651184083, 0.010012127876281739, 0.010014528274536133, 0.010066847801208496, 0.009985856056213379, 0.010016672134399414, 0.010029215812683106, 0.010004511833190918, 0.010025152206420898, 0.010077407836914062, 0.00999014377593994, 0.00997811222076416, 0.010060832023620606, 0.010040032386779784, 0.009973471641540527, 0.010031167984008788, 0.009971936225891114, 0.010061280250549316, 0.010215007781982421, 0.010039615631103515, 0.010074751853942871, 0.010033087730407715, 0.010063712120056151, 0.009977215766906739, 0.009930784225463868, 0.009915200233459473, 0.009983424186706544, 0.009973823547363281, 0.010095135688781738, 0.00998192024230957, 0.009953280448913575, 0.009934847831726074, 0.010031455993652344, 0.009957023620605469, 0.010072352409362793, 0.009940320014953614, 0.009972448348999024, 0.009899680137634276, 0.010077695846557617, 0.009957247734069825, 0.009969311714172363, 0.009978560447692872, 0.010075519561767578, 0.00997283172607422, 0.010050880432128905, 0.010131967544555665, 0.010129216194152833, 0.01005615997314453, 0.010149503707885742, 0.010113120079040527, 0.009996288299560547, 0.009961471557617188, 0.010024959564208985, 0.009958815574645997, 0.010046112060546874, 0.009981760025024414, 0.010281087875366211, 0.009984000205993653, 0.010051584243774414, 0.009936800003051758, 0.009896032333374024, 0.009928607940673828, 0.009949567794799805, 0.00992579174041748, 0.009960000038146972, 0.009922335624694825, 0.009986559867858886, 0.009882719993591308, 0.010039903640747071, 0.009917792320251464, 0.01088582420349121, 0.009995488166809082, 0.009978655815124512, 0.010388768196105957, 0.010091487884521484, 0.009969056129455567, 0.009969504356384277, 0.009972703933715821, 0.01012492847442627, 0.0107357759475708, 0.010024959564208985, 0.010000384330749512, 0.01013766384124756, 0.010038944244384765, 0.010061183929443359, 0.00996390438079834, 0.00999385643005371, 0.009986080169677735, 0.00997871971130371, 0.010040384292602539, 0.010044384002685546, 0.010020928382873535, 0.010071680068969727, 0.009944864273071289, 0.010028639793395995, 0.009942975997924805, 0.009963871955871582, 0.009929375648498535, 0.01044809627532959, 0.009967519760131835, 0.009957759857177735, 0.009953056335449219, 0.009978591918945312, 0.01004150390625, 0.010051039695739747, 0.009952735900878907, 0.009947648048400879, 0.010096735954284668, 0.009927167892456054, 0.00993222427368164, 0.009914175987243652, 0.009861503601074219, 0.009957247734069825, 0.00986348819732666, 0.009975808143615723, 0.009926655769348144, 0.009931008338928222, 0.010016287803649902, 0.009920736312866212, 0.009869152069091797, 0.009939104080200196, 0.009762816429138184, 0.009883392333984374, 0.009894495964050292, 0.00993017578125, 0.009898207664489745, 0.009979904174804688, 0.010231807708740234, 0.010201087951660156, 0.010425760269165038, 0.01040777587890625, 0.010033920288085937, 0.00999014377593994, 0.009942784309387206, 0.010053536415100098, 0.009961215972900391, 0.010162176132202149, 0.010028415679931641, 0.01015449619293213, 0.010082079887390136, 0.01011081600189209, 0.010142208099365235, 0.010115424156188964, 0.010049344062805176, 0.010077695846557617, 0.010057727813720703, 0.010262880325317383, 0.010105119705200196, 0.010146656036376954, 0.010034048080444335, 0.010962016105651855, 0.010491999626159668, 0.010020704269409179, 0.010036319732666015, 0.010016639709472657, 0.010117119789123535, 0.010100735664367675, 0.01019264030456543, 0.010218784332275391, 0.010041567802429199, 0.010033920288085937, 0.010037247657775878, 0.009959615707397462, 0.009999648094177246, 0.009953824043273926, 0.009971487998962402, 0.009912320137023926, 0.009957152366638184, 0.009943488121032715, 0.009891424179077148, 0.009933216094970703, 0.010217472076416016, 0.009850655555725098, 0.009901887893676757, 0.009992159843444825, 0.009971263885498047, 0.010804096221923827, 0.00999830436706543, 0.010704671859741211, 0.011363615989685058, 0.010138591766357423, 0.010112128257751464, 0.010101696014404297, 0.010153951644897461, 0.01015993595123291, 0.010092576026916504, 0.010076319694519043, 0.010106528282165527, 0.01003551959991455, 0.01006345558166504, 0.010547264099121094, 0.010184191703796386, 0.010191712379455566, 0.010370047569274902, 0.010287455558776856, 0.009998592376708984, 0.010023327827453613, 0.010027008056640625, 0.010217472076416016, 0.010015904426574707, 0.01003996753692627, 0.009907327651977538, 0.009960320472717285, 0.009910271644592286, 0.009970911979675293, 0.009878399848937989, 0.00998799991607666, 0.009897983551025391, 0.009994048118591309, 0.00987564754486084, 0.010032832145690918, 0.01001318359375, 0.009981599807739258, 0.00994320011138916, 0.010028160095214844, 0.009929439544677735, 0.009971232414245605, 0.009943679809570312, 0.00996735954284668, 0.009912575721740723, 0.009993247985839844, 0.009975775718688965, 0.009976832389831543, 0.010002207756042481, 0.0101112003326416, 0.010013824462890624, 0.010042655944824218, 0.010014143943786621, 0.010026623725891113, 0.010125856399536132, 0.010012672424316407, 0.009951295852661133, 0.009940896034240723, 0.010027071952819825, 0.010067296028137208, 0.010041600227355956, 0.010082752227783202, 0.010090687751770019, 0.010233792304992675, 0.010153696060180665, 0.0101561279296875, 0.010079327583312989, 0.010116000175476075, 0.010032159805297852, 0.01005078411102295, 0.010077280044555664, 0.009931039810180665, 0.009963071823120118, 0.010002400398254395, 0.010072928428649902, 0.009998527526855469, 0.010325599670410156, 0.010276800155639648, 0.01004588794708252, 0.010190400123596191, 0.010073535919189453, 0.009984864234924316, 0.009973759651184083, 0.009971551895141602, 0.010009920120239258, 0.00993065643310547, 0.009923232078552247, 0.009963711738586426, 0.010039520263671876, 0.009981375694274902, 0.009996159553527832, 0.010037952423095704, 0.00997935962677002, 0.01031164836883545, 0.009951807975769044, 0.009991488456726073, 0.009922271728515624, 0.010007519721984864, 0.010045439720153808, 0.009954463958740235, 0.00986832046508789, 0.010155808448791505, 0.009941023826599121, 0.010065600395202637, 0.010195584297180175, 0.009957056045532226, 0.009932767868041993, 0.009913503646850586, 0.010164128303527833, 0.009930015563964844, 0.01005945587158203, 0.010005824089050293, 0.010312383651733398, 0.010337599754333496, 0.01001542377471924, 0.010156031608581542, 0.009947135925292968, 0.009904128074645996, 0.01002195167541504, 0.009986111640930176, 0.010068863868713378, 0.009973471641540527, 0.009944831848144531, 0.009996064186096191, 0.009890527725219726, 0.009960576057434082, 0.010148127555847168, 0.01001302433013916, 0.01020751953125, 0.00994918441772461, 0.01009171199798584, 0.00997049617767334, 0.009989888191223144, 0.010010880470275879, 0.010034912109375, 0.010064064025878906, 0.01009059238433838, 0.010081376075744629, 0.010260928153991699, 0.009970144271850586, 0.009979647636413574, 0.009938976287841798, 0.010422495841979981, 0.009987615585327148, 0.00998243236541748, 0.009946911811828614, 0.009948384284973145, 0.009976287841796876, 0.01002131175994873, 0.009973695755004883, 0.010036160469055175, 0.01002297592163086, 0.010003552436828614, 0.009898816108703613, 0.00994211196899414, 0.009949503898620605, 0.009945631980895996, 0.009950528144836426, 0.00997862434387207, 0.00994262409210205, 0.009959263801574707, 0.009937472343444824, 0.00997760009765625, 0.009988351821899414, 0.009968704223632812, 0.010005375862121583, 0.010073408126831055, 0.010031904220581055, 0.010045408248901367, 0.010005696296691895, 0.010152768135070801, 0.010227711677551269, 0.010043392181396485, 0.009963520050048828, 0.010045439720153808, 0.01001478385925293, 0.01004047966003418, 0.01063811206817627, 0.010142848014831543, 0.009988991737365723, 0.010065919876098632, 0.009861120223999023, 0.009873408317565918, 0.010079936027526855, 0.009865632057189941, 0.010037280082702637, 0.009916288375854492, 0.010100735664367675, 0.009934271812438965, 0.009968192100524902, 0.009924448013305665, 0.010090368270874023, 0.009965855598449707, 0.009942367553710937, 0.009921183586120605, 0.009960576057434082, 0.009941472053527831, 0.010010111808776855, 0.009994720458984375, 0.01010483169555664, 0.010020511627197266, 0.010066368103027344, 0.010026816368103028, 0.010039839744567871, 0.009991711616516113, 0.009992735862731934, 0.010043328285217284, 0.01016204833984375, 0.010082752227783202, 0.010082240104675294, 0.010195775985717774, 0.010172863960266114, 0.010243935585021973, 0.009994239807128906, 0.00994934368133545, 0.009987168312072754, 0.00993177604675293, 0.009983903884887695, 0.009971712112426758, 0.009894975662231445, 0.009872063636779786, 0.009883904457092285, 0.00992255973815918, 0.009830400466918946, 0.009822208404541016, 0.009836640357971192, 0.009872960090637208, 0.009902432441711425, 0.009945088386535645, 0.009817664146423339, 0.009761504173278809, 0.009874272346496581, 0.009923456192016601, 0.010098079681396484, 0.010019424438476563, 0.010088447570800782, 0.010010623931884765, 0.010063743591308594, 0.009932928085327149, 0.010174400329589843, 0.010009696006774902, 0.010039360046386718, 0.01002940845489502, 0.009999103546142578, 0.010015680313110351, 0.010034048080444335, 0.00991641616821289, 0.010033151626586915, 0.009930751800537109, 0.009967616081237793, 0.009881440162658692, 0.00999020767211914, 0.009944864273071289, 0.010117631912231445, 0.009971520423889161, 0.01001471996307373, 0.009981951713562011, 0.009973055839538574, 0.009964223861694335, 0.009997568130493163, 0.009949952125549317, 0.009959551811218261, 0.00993062400817871, 0.010055423736572266, 0.010010368347167968, 0.010009183883666992, 0.009985823631286622, 0.010030847549438476, 0.009940447807312012, 0.009996576309204102, 0.009986687660217285, 0.010166272163391114, 0.009943039894104003, 0.009969663619995118, 0.009906304359436036, 0.010002304077148437, 0.010103167533874512, 0.01006156826019287, 0.010104512214660645, 0.010009183883666992, 0.009942015647888184, 0.009925344467163085, 0.00984505558013916, 0.010026111602783203, 0.010039872169494628, 0.009992128372192382, 0.01002735996246338, 0.010075167655944824, 0.01009545612335205, 0.010280320167541504, 0.010119839668273926, 0.010053440093994141, 0.010160127639770507, 0.01014134407043457, 0.01001308822631836, 0.009975744247436523, 0.010032992362976074, 0.009994751930236816, 0.010031840324401856, 0.01002947235107422, 0.009986240386962891, 0.010057567596435547, 0.009968128204345703, 0.01005350399017334, 0.010026752471923828, 0.010038911819458008, 0.010035072326660156, 0.009997280120849609, 0.010198495864868163, 0.010084511756896973, 0.010324511528015137, 0.010237343788146972, 0.010080927848815919, 0.010016448020935058, 0.010022496223449707, 0.0100763521194458, 0.009992416381835938, 0.01002729606628418, 0.009965279579162597, 0.009897983551025391, 0.01000160026550293, 0.009961919784545898, 0.009924032211303712, 0.010001343727111816, 0.009928159713745117, 0.010045408248901367, 0.009955743789672851, 0.010006591796875, 0.00999456024169922, 0.010012063980102539, 0.010055295944213867, 0.010087167739868164, 0.009945088386535645, 0.01000483226776123, 0.009989791870117188, 0.009995936393737793, 0.009963808059692382, 0.009982015609741211, 0.009946528434753419, 0.009978303909301758, 0.00991049575805664, 0.00998076820373535, 0.009935711860656738, 0.009977696418762207, 0.009968000411987305, 0.010034175872802734, 0.010059935569763184, 0.010045087814331055, 0.010028127670288087, 0.009989343643188476, 0.009996928215026856, 0.010046496391296386, 0.01006486415863037, 0.010028415679931641, 0.010846847534179688, 0.01026598358154297, 0.010695199966430664, 0.010376992225646973, 0.01004371166229248, 0.01003321647644043, 0.01005356788635254, 0.010080256462097169, 0.010036704063415527, 0.010222111701965332, 0.010050975799560546, 0.01002064037322998, 0.010039775848388673, 0.01032636833190918, 0.010078207969665527, 0.010004799842834472, 0.010021984100341797, 0.010033760070800781, 0.010006272315979003, 0.010115679740905761, 0.010014592170715332, 0.010008352279663086, 0.009995840072631835, 0.009974368095397949, 0.010028960227966309, 0.01001260757446289, 0.010067968368530274, 0.010042880058288574, 0.009986111640930176, 0.010016192436218261, 0.010210304260253907, 0.01002905559539795, 0.010029312133789062, 0.010309439659118652, 0.010041279792785645, 0.010245311737060546, 0.010175135612487793, 0.010059904098510743, 0.009950336456298827, 0.00997868824005127, 0.010008671760559081, 0.01002086353302002, 0.010079680442810058, 0.010224191665649415, 0.010054752349853516, 0.010021439552307129, 0.010060128211975097, 0.010079327583312989, 0.010355392456054687, 0.01003228759765625, 0.01010159969329834, 0.010022303581237794, 0.010035455703735352, 0.01010927963256836, 0.010483712196350097, 0.010182656288146973, 0.010124320030212403, 0.010240192413330079, 0.010119199752807617, 0.010058496475219727, 0.01008249568939209, 0.010073920249938965, 0.010106176376342773, 0.010014623641967773, 0.010002559661865234, 0.010005151748657227, 0.009969663619995118, 0.010117119789123535, 0.010184639930725098, 0.009947199821472168, 0.010011712074279784, 0.009950143814086914, 0.01003929615020752, 0.010008895874023437, 0.00998367977142334, 0.009973183631896972, 0.01004963207244873, 0.010002528190612793, 0.01018051242828369, 0.010019200325012207, 0.010055007934570313, 0.010027071952819825, 0.010056672096252441, 0.009998047828674316, 0.010051839828491211, 0.010033920288085937, 0.009987263679504395, 0.010045248031616211, 0.010117119789123535, 0.010012800216674805, 0.010004096031188965, 0.010051072120666504, 0.010062591552734375, 0.010049152374267577, 0.01010643196105957, 0.010076191902160645, 0.010017024040222169, 0.010080063819885254, 0.010181344032287598, 0.010194944381713868, 0.010133503913879394, 0.010147232055664063, 0.010216032028198242, 0.010190848350524903, 0.010223615646362304, 0.01031116771697998, 0.01033785629272461, 0.01013856029510498, 0.010230048179626465, 0.010174176216125489, 0.010123583793640136, 0.010527392387390137, 0.009928192138671875, 0.010109439849853515, 0.010034912109375, 0.010055968284606934, 0.01012940788269043, 0.01002620792388916, 0.009893856048583984, 0.010000608444213866, 0.010023520469665528, 0.00996339225769043, 0.009951359748840332, 0.00999014377593994, 0.009928383827209473, 0.010048992156982422, 0.009942879676818847, 0.010118144035339355, 0.009989983558654785, 0.009996607780456543, 0.010164287567138672, 0.01005292797088623, 0.010065631866455078, 0.01009712028503418, 0.010016256332397461, 0.010029855728149415, 0.010143744468688964, 0.010186079978942872, 0.010082207679748535, 0.010056351661682129, 0.010106975555419923, 0.009985983848571777, 0.010120320320129394, 0.010018912315368653, 0.010069952011108399, 0.010095199584960938, 0.010172032356262206, 0.01015379238128662, 0.010133888244628906, 0.010035103797912597, 0.010191455841064453, 0.010051584243774414, 0.010122624397277832, 0.010416511535644532, 0.01023203182220459, 0.010033184051513672, 0.010090496063232422, 0.010160415649414063, 0.01011616039276123, 0.010209664344787598, 0.010174752235412598, 0.010128640174865722, 0.010099455833435059, 0.010125632286071778, 0.0099999361038208, 0.010042559623718262, 0.009911231994628907, 0.010011903762817383, 0.009921279907226563, 0.01002905559539795, 0.010064031600952148, 0.010034784317016602, 0.009982208251953124, 0.00998528003692627, 0.009955936431884766, 0.009906623840332031, 0.009952608108520508, 0.009917119979858398, 0.009972736358642579, 0.009852928161621094, 0.009888704299926759, 0.0098405122756958, 0.009862239837646485, 0.009911264419555664, 0.00994707202911377, 0.00999619197845459, 0.009973983764648437, 0.009992128372192382, 0.009981696128845214, 0.010051136016845702, 0.009999008178710937, 0.010153280258178712, 0.009998911857604981, 0.010057696342468262, 0.010016608238220214, 0.010153471946716308, 0.010100640296936036, 0.010015680313110351, 0.010069536209106446, 0.010011103630065918, 0.010104384422302246, 0.009994688034057618, 0.010106880187988282, 0.010007935523986817, 0.010048192024230957, 0.010110912322998046, 0.010067968368530274, 0.011309056282043458, 0.010070015907287597, 0.01004537582397461, 0.00998851203918457, 0.01005123233795166, 0.01004047966003418, 0.01012617588043213, 0.010128576278686524, 0.010193504333496094, 0.010010848045349121, 0.010043199539184571, 0.010125151634216309, 0.010019071578979492, 0.00997100830078125, 0.010070816040039062, 0.009989312171936035, 0.009963711738586426, 0.00990886402130127, 0.009926848411560059, 0.009840703964233399, 0.009862912178039552, 0.010123264312744141, 0.00998566436767578, 0.009941375732421875, 0.009975808143615723, 0.009930751800537109, 0.010059935569763184, 0.009982848167419433, 0.010136287689208984, 0.009953248023986816, 0.010096544265747071, 0.010003647804260253, 0.009958304405212403, 0.01002291202545166, 0.009955679893493653, 0.009985376358032226, 0.010051456451416016, 0.010141887664794923, 0.010014752388000488, 0.010047712326049806, 0.010134688377380371, 0.010056544303894042, 0.010078207969665527, 0.009991583824157714, 0.010320063591003417, 0.010178879737854004, 0.009965663909912109, 0.010120448112487792, 0.010245887756347657, 0.010159104347229005, 0.010109184265136718, 0.009997695922851562, 0.009984640121459961, 0.010018560409545898, 0.010073823928833008, 0.010084639549255371, 0.00994092845916748, 0.010018336296081542, 0.009988639831542969, 0.010086239814758301, 0.010076319694519043, 0.010077183723449706, 0.010150912284851075, 0.010126784324645996, 0.010179136276245117, 0.010120287895202636, 0.010009183883666992, 0.009928607940673828, 0.010049951553344727, 0.01006710433959961, 0.01010364818572998, 0.010023263931274414, 0.009954879760742188, 0.009846879959106445, 0.009904128074645996, 0.009946816444396973, 0.010084671974182129, 0.010080351829528808, 0.010025983810424804, 0.010062623977661132, 0.010006655693054199, 0.0100164155960083, 0.010115424156188964, 0.010250240325927735, 0.010010751724243164, 0.009980095863342284, 0.009989215850830077, 0.01005241584777832, 0.010112735748291015, 0.009948287963867188, 0.010039263725280762, 0.009937888145446777, 0.010053312301635742, 0.010089632034301757, 0.010132160186767578, 0.01011680030822754, 0.009981823921203612, 0.010037792205810548, 0.010039199829101562, 0.01005568027496338, 0.009953056335449219, 0.010023200035095214, 0.009908160209655762, 0.010036383628845215, 0.009964384078979493, 0.00998140811920166, 0.009945055961608888, 0.009968192100524902, 0.00995680046081543, 0.01001529598236084, 0.00994707202911377, 0.010065919876098632, 0.010040639877319336, 0.010058367729187011, 0.010064288139343262, 0.01003433609008789, 0.0101212797164917, 0.010105183601379394, 0.010104031562805175, 0.010088895797729492, 0.01001699161529541, 0.009986623764038086, 0.010045023918151855, 0.009945216178894043, 0.00999014377593994, 0.010057888031005859, 0.009989919662475586, 0.010064288139343262, 0.010005472183227539, 0.010085056304931641, 0.009932064056396484, 0.009891648292541504, 0.009973055839538574, 0.0100098237991333, 0.010073856353759765, 0.009986687660217285, 0.010029088020324707, 0.010014687538146973, 0.010035039901733399, 0.010034943580627441, 0.0100797119140625, 0.009981023788452148, 0.00995894432067871, 0.010339936256408692, 0.010226752281188965, 0.009983776092529296, 0.010042495727539062, 0.010005248069763184, 0.010038463592529297, 0.01008518409729004, 0.010026975631713867, 0.009981951713562011, 0.009946880340576171, 0.010042816162109376, 0.010007391929626465, 0.010006303787231446]",tokens/s,99.58543846969587,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,910.45888,2631.794688,0.0,2246.049792,2230.657024,s,1,9.4035830078125,9.4035830078125,0.0,9.4035830078125,9.4035830078125,9.4035830078125,9.4035830078125,[9.4035830078125],,kWh,7.738652238332786e-05,8.52603533086044e-06,2.5719465020002108e-05,0.0001116320227341904,,MB,1417.207808,3244.163072,0.0,2826.960896,2571.087872,s,10,2.913348236083984,0.29133482360839846,0.0007813960967849343,0.2916429595947266,0.29223916015625,0.29230079345703125,0.2923501000976562,"[0.29067422485351563, 0.29047738647460936, 0.28993109130859374, 0.2916012878417969, 0.29168463134765626, 0.29071881103515623, 0.29169784545898436, 0.2919750671386719, 0.2922254638671875, 0.2923624267578125]",tokens/s,878.7140405298948,kWh,8.735644790317342e-06,9.630639536467272e-07,5.819620668764795e-06,1.5518329412728865e-05,tokens/kWh,16496621.07249874,MB,1472.917504,3244.163072,0.0,2826.960896,2571.090432,s,10,23.91964453125,2.391964453125,0.006041429819287744,2.3928662109375,2.398121313476562,2.4007634155273436,2.402877097167969,"[2.38767236328125, 2.391570068359375, 2.381670166015625, 2.390513427734375, 2.384261474609375, 2.394666015625, 2.39418896484375, 2.394162353515625, 2.403405517578125, 2.3975341796875]",tokens/s,26.338184046879615,kWh,6.972878158801776e-05,7.691416231974285e-06,4.14922227362337e-05,0.00011891242055622572,tokens/kWh,529801.6784563856,,s,630,23.917694896697988,0.0379645950741238,0.00045990279258632555,0.03787476921081543,0.03829424209594727,0.03869937973022461,0.03957243930816651,"[0.0388935661315918, 0.03823795318603516, 0.03804390335083008, 0.037841182708740234, 0.03777711868286133, 0.03778355026245117, 0.03787776184082031, 0.038019073486328124, 0.03785014343261719, 0.03786441421508789, 0.03779142379760742, 0.03800243377685547, 0.0378631362915039, 0.03790643310546875, 0.03833123016357422, 0.038150142669677735, 0.03805184173583984, 0.03797148895263672, 0.03807075119018555, 0.03801241683959961, 0.03799296188354492, 0.03791462326049805, 0.03809212875366211, 0.037945121765136716, 0.038962047576904295, 0.037968959808349606, 0.03795859146118164, 0.037773311614990236, 0.0377913932800293, 0.03772383880615234, 0.03771017456054687, 0.03776339340209961, 0.03790137481689453, 0.0378930549621582, 0.03785932922363281, 0.03784435272216797, 0.0377760009765625, 0.03770499038696289, 0.03774537658691406, 0.03769753646850586, 0.03794739151000977, 0.03783216094970703, 0.038010784149169925, 0.03790707015991211, 0.037786911010742184, 0.03763209533691406, 0.03777190399169922, 0.03770163345336914, 0.037795841217041014, 0.03772998428344727, 0.037822784423828124, 0.03777740859985351, 0.037778751373291015, 0.037591617584228514, 0.037580928802490234, 0.03767910385131836, 0.037689342498779296, 0.0377446403503418, 0.037950687408447266, 0.037800350189208985, 0.037878143310546876, 0.03785932922363281, 0.03785116958618164, 0.038992385864257816, 0.03817881774902344, 0.037838241577148435, 0.03766057586669922, 0.03786108779907227, 0.037696094512939454, 0.03777280044555664, 0.03775481414794922, 0.03762886428833008, 0.0376995849609375, 0.03766678237915039, 0.037721759796142576, 0.03780441665649414, 0.03787980651855469, 0.03786751937866211, 0.03788390350341797, 0.03783679962158203, 0.03778559875488281, 0.037804031372070314, 0.03819513702392578, 0.04011123275756836, 0.03800979232788086, 0.03806339263916016, 0.03846121597290039, 0.037792064666748046, 0.03800947189331055, 0.03803459167480469, 0.03800559997558594, 0.03800678253173828, 0.03781836700439453, 0.03787107086181641, 0.037732448577880856, 0.038121921539306644, 0.037664287567138674, 0.03784675216674805, 0.03785599899291992, 0.039593727111816406, 0.037703712463378905, 0.03795711898803711, 0.03787196731567383, 0.037660385131835936, 0.03788159942626953, 0.03782748794555664, 0.03776921463012695, 0.03790233612060547, 0.03781532669067383, 0.037729248046875, 0.0378081283569336, 0.037725921630859374, 0.038098400115966796, 0.037954368591308595, 0.03785318374633789, 0.03782559967041016, 0.03795596694946289, 0.038023582458496095, 0.03792278289794922, 0.03780422210693359, 0.037899391174316406, 0.03856582260131836, 0.0379986572265625, 0.03778035354614258, 0.03774054336547852, 0.037781246185302736, 0.03876704025268555, 0.03801900863647461, 0.03783126449584961, 0.0376995849609375, 0.03800643157958984, 0.03857993698120117, 0.037885982513427736, 0.03760598373413086, 0.03762995147705078, 0.03777740859985351, 0.037684574127197265, 0.03784502410888672, 0.037805824279785155, 0.03771273422241211, 0.0377938232421875, 0.03788800048828125, 0.03772111892700195, 0.037622753143310546, 0.037443584442138675, 0.03749068832397461, 0.03765404891967773, 0.037693248748779294, 0.03745654296875, 0.037564414978027344, 0.037485950469970705, 0.03749951934814453, 0.037553665161132815, 0.03748294448852539, 0.037428737640380856, 0.03752812957763672, 0.03746627044677735, 0.03787152099609375, 0.03753977584838867, 0.037515262603759765, 0.03769900894165039, 0.03762438583374023, 0.03764131164550781, 0.03778780746459961, 0.037878528594970706, 0.03778355026245117, 0.03805996704101562, 0.03781228637695312, 0.03781017684936523, 0.03779593658447265, 0.0378160629272461, 0.037814369201660154, 0.039069759368896485, 0.03795558547973633, 0.03800796890258789, 0.03777212905883789, 0.03773392105102539, 0.03777788925170898, 0.037781505584716796, 0.037703678131103514, 0.03796329498291016, 0.03783462524414063, 0.03777391815185547, 0.038569984436035154, 0.038623199462890626, 0.037765216827392575, 0.03780307388305664, 0.03766361618041992, 0.03760947036743164, 0.03898601531982422, 0.038317790985107424, 0.03805344009399414, 0.03801123046875, 0.03805142211914062, 0.03892070388793945, 0.038981536865234374, 0.03817299270629883, 0.03792060852050781, 0.03797164916992188, 0.037811519622802735, 0.03773235321044922, 0.03771897506713867, 0.03806032180786133, 0.037709537506103515, 0.03765862274169922, 0.03757660675048828, 0.03754966354370117, 0.03758729553222656, 0.037521568298339844, 0.03766825485229492, 0.03823471832275391, 0.03770751953125, 0.03760947036743164, 0.03765478515625, 0.03768729782104492, 0.037574657440185545, 0.037591041564941405, 0.03765980911254883, 0.037614433288574216, 0.03762176132202148, 0.03912704086303711, 0.038168575286865236, 0.038125568389892575, 0.03787756729125977, 0.03778579330444336, 0.03784499359130859, 0.03767014312744141, 0.03766758346557617, 0.03805753707885742, 0.03846166229248047, 0.03770111846923828, 0.03778579330444336, 0.03777328109741211, 0.03771449661254883, 0.03773177719116211, 0.03778003311157226, 0.03786342239379883, 0.03802422332763672, 0.03808150482177734, 0.03798428726196289, 0.03793916702270508, 0.03814604949951172, 0.037846656799316404, 0.037846782684326175, 0.037889984130859374, 0.037997249603271485, 0.038293342590332034, 0.0380335693359375, 0.038084606170654296, 0.03816447830200195, 0.03787776184082031, 0.03803353500366211, 0.03904665756225586, 0.03844796752929688, 0.038160255432128906, 0.03836415863037109, 0.038161407470703124, 0.042984798431396486, 0.038253215789794924, 0.03813510513305664, 0.03793580627441406, 0.03784089660644531, 0.03783996963500977, 0.03773532867431641, 0.0378875846862793, 0.037885726928710936, 0.037911041259765625, 0.03777132797241211, 0.037838016510009766, 0.03759183883666992, 0.03773593521118164, 0.03770588684082031, 0.037641857147216795, 0.0376734733581543, 0.03808633422851562, 0.037876350402832035, 0.03781017684936523, 0.038178176879882814, 0.03768143844604492, 0.037707263946533204, 0.03760639953613281, 0.037512767791748045, 0.03754422378540039, 0.0375041618347168, 0.03757494354248047, 0.03773891067504883, 0.03767311859130859, 0.03758451080322266, 0.03752793502807617, 0.03757056045532227, 0.038172351837158204, 0.0376129264831543, 0.037982719421386715, 0.03766316986083985, 0.03770163345336914, 0.03754396820068359, 0.037626049041748044, 0.037606239318847656, 0.03754671859741211, 0.03751139068603516, 0.03760918426513672, 0.03758457565307617, 0.03782511901855469, 0.03748454284667969, 0.037491809844970705, 0.03753257751464844, 0.03750899124145508, 0.037482528686523436, 0.03744918441772461, 0.037528095245361326, 0.037531742095947264, 0.03748771286010742, 0.037595329284667967, 0.037595870971679685, 0.03766886520385742, 0.03875020980834961, 0.04199814224243164, 0.03843910217285156, 0.0381357421875, 0.03859872055053711, 0.03774857711791992, 0.03804585647583008, 0.037850334167480466, 0.037929534912109375, 0.03790460968017578, 0.03785728073120117, 0.037727550506591795, 0.03767359924316406, 0.037761089324951175, 0.03792060852050781, 0.03784908676147461, 0.03774294281005859, 0.03800864028930664, 0.037778976440429685, 0.03773235321044922, 0.03772822570800781, 0.03770214462280273, 0.037738494873046875, 0.037684223175048825, 0.03763302230834961, 0.037824127197265626, 0.03780646514892578, 0.0377446403503418, 0.037763072967529294, 0.0378240966796875, 0.037759391784667966, 0.03811123275756836, 0.037797534942626956, 0.03780771255493164, 0.037792510986328125, 0.041299968719482424, 0.03795881652832031, 0.03786163330078125, 0.03795606231689453, 0.03807040023803711, 0.038184959411621096, 0.03785478210449219, 0.03791507339477539, 0.03788214492797851, 0.038085887908935544, 0.03813206481933594, 0.038034622192382815, 0.03783292770385742, 0.03793379211425781, 0.037967872619628903, 0.037824222564697266, 0.03788828659057617, 0.03785318374633789, 0.03768854522705078, 0.03772086334228516, 0.0377977294921875, 0.03803104019165039, 0.03795558547973633, 0.037953983306884764, 0.03760131072998047, 0.037649791717529295, 0.03768384170532227, 0.03767705535888672, 0.039212703704833984, 0.03832032012939453, 0.03825680160522461, 0.03791257476806641, 0.03778950500488281, 0.03778169631958008, 0.03772979354858398, 0.037712383270263675, 0.03767465591430664, 0.03771945571899414, 0.03791558456420899, 0.03775068664550781, 0.037699680328369144, 0.03781740951538086, 0.03793392181396484, 0.03779107284545898, 0.037693984985351564, 0.03763017654418945, 0.03769548797607422, 0.03773440170288086, 0.03771507263183594, 0.03777830505371094, 0.0376995849609375, 0.03772387313842773, 0.03772649765014648, 0.037705726623535156, 0.037718017578125, 0.03771187210083008, 0.03791257476806641, 0.038029312133789066, 0.03784524917602539, 0.0379343376159668, 0.03792284774780273, 0.03790217590332031, 0.03791964721679687, 0.037924575805664065, 0.037945343017578126, 0.03792486572265625, 0.03789823913574219, 0.038268417358398435, 0.037951168060302735, 0.03775775909423828, 0.03776883316040039, 0.03780956649780273, 0.03785238265991211, 0.038731521606445315, 0.03915724945068359, 0.038085121154785156, 0.038019073486328124, 0.03794464111328125, 0.037961505889892576, 0.03800060653686523, 0.037970848083496093, 0.03795916748046875, 0.03789433670043945, 0.03837936019897461, 0.03811331176757812, 0.03821718215942383, 0.04090560150146484, 0.038079872131347656, 0.03813625717163086, 0.03818118286132813, 0.038135009765625, 0.03931907272338867, 0.0384040641784668, 0.038894142150878906, 0.03816864013671875, 0.03807164764404297, 0.037978206634521484, 0.037988929748535155, 0.037819454193115234, 0.03793196868896485, 0.03793878555297851, 0.03783926391601562, 0.0390714225769043, 0.03917855834960938, 0.03886025619506836, 0.03825923156738281, 0.0382033920288086, 0.03814310455322266, 0.038155136108398435, 0.03806617736816406, 0.038211742401123044, 0.038499969482421875, 0.03839814376831055, 0.03800271987915039, 0.03795296096801758, 0.03804940795898438, 0.03789686584472656, 0.037908767700195314, 0.03779993438720703, 0.037789695739746096, 0.037771263122558595, 0.037748737335205076, 0.03772825622558594, 0.03765663909912109, 0.037672000885009764, 0.03773324966430664, 0.037653728485107424, 0.03769014358520508, 0.0376995849609375, 0.03826480102539063, 0.03786697769165039, 0.03782092666625977, 0.037711936950683596, 0.037788959503173826, 0.0377371826171875, 0.03769699096679688, 0.03774723052978515, 0.037848190307617186, 0.03786636734008789, 0.03766681671142578, 0.037730079650878906, 0.03774281692504883, 0.03782835388183594, 0.03815641784667969, 0.037699329376220704, 0.037902721405029295, 0.03785302352905273, 0.03851689529418945, 0.03787776184082031, 0.03785923385620117, 0.03776265716552735, 0.03764476776123047, 0.037598529815673826, 0.03762643051147461, 0.03952032089233398, 0.038528705596923826, 0.03911507034301758, 0.03866009521484375, 0.03850048065185547, 0.03817254257202148, 0.03839084625244141, 0.03818102264404297, 0.03820828628540039, 0.0381399040222168, 0.03808448028564453, 0.038174846649169925, 0.0381165771484375, 0.03801279830932617, 0.03809167861938476, 0.03801702499389648, 0.03797808074951172, 0.03793923187255859, 0.03784019088745117, 0.03876870346069336, 0.03788864135742188, 0.03789004898071289, 0.03809689712524414, 0.03846665573120117, 0.03817932891845703, 0.038089118957519534, 0.037969921112060545, 0.03808639907836914, 0.03804595184326172, 0.03825392150878906, 0.0380340805053711, 0.038002113342285156, 0.03797414398193359, 0.037902496337890626, 0.03795587158203125, 0.03787891387939453, 0.037843841552734375, 0.03781840133666992, 0.03784086227416992, 0.03784406280517578, 0.03787664031982422, 0.03787571334838867, 0.03804726409912109, 0.03796783828735351, 0.037859840393066405, 0.03784294509887695, 0.03788553619384766, 0.037865886688232424, 0.03784294509887695, 0.03794243240356445, 0.038084865570068356, 0.03807497787475586, 0.03991142272949219, 0.038305343627929686, 0.038264289855957034, 0.03860550308227539, 0.03843100738525391, 0.038047679901123045, 0.038154209136962894, 0.03791881561279297, 0.038032737731933594, 0.03787174224853516, 0.03799407958984375, 0.03899596786499023, 0.038581760406494144, 0.03820703887939453, 0.03800364685058594, 0.037983551025390624, 0.03830233764648437, 0.0383488655090332, 0.038042686462402345, 0.03792483139038086, 0.03802006530761719, 0.037974174499511716, 0.037975711822509764, 0.038013118743896485, 0.037892127990722654, 0.0380489616394043, 0.03789078521728516, 0.0379536018371582, 0.037844032287597654, 0.037832897186279295, 0.0379411506652832, 0.03808752059936524, 0.03790028762817383, 0.03790028762817383, 0.03796377563476563, 0.03791987228393555, 0.037950336456298826, 0.03819708633422852, 0.03787408065795898, 0.03787545776367188, 0.03791452789306641, 0.03787356948852539, 0.037807487487792966, 0.03789292907714844, 0.037995903015136716, 0.038152416229248046, 0.03804572677612305, 0.03831232070922851, 0.038520832061767575, 0.038217727661132815, 0.037961761474609376, 0.037887966156005856, 0.03796582412719727, 0.03783865737915039, 0.03806022262573242, 0.037894142150878905, 0.037910526275634765, 0.037951488494873044, 0.0378675537109375, 0.03773846435546875, 0.03889891052246094, 0.03891004943847656, 0.03814675140380859, 0.03794124984741211, 0.038029312133789066, 0.037988353729248046, 0.03821158218383789, 0.03782364654541016, 0.03791049575805664, 0.038052734375, 0.0379183349609375, 0.03793753433227539, 0.038227935791015626, 0.03807644653320313]",tokens/s,26.34033098595032,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 128.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 20.12 MiB is free. Process 205541 has 14.72 GiB memory in use. Of the allocated memory 14.54 GiB is allocated by PyTorch, and 67.18 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,902.107136,6589.120512,0.0,6186.5984,6098.951168,s,1,14.927158203125,14.927158203125,0.0,14.927158203125,14.927158203125,14.927158203125,14.927158203125,[14.927158203125],,kWh,0.00023453259544164667,2.585945365850905e-05,7.786450673599732e-05,0.00033825655583615303,,MB,1400.89344,7306.346496,0.0,6891.241472,6575.183872,s,10,11.447591552734373,1.1447591552734377,0.0066483095537777275,1.1478670043945312,1.1498494506835937,1.1500173767089843,1.1501517175292968,"[1.12945166015625, 1.135996337890625, 1.1472989501953126, 1.1492005615234375, 1.1413291015625, 1.150185302734375, 1.1464898681640625, 1.14843505859375, 1.1498121337890626, 1.149392578125]",tokens/s,223.62782496275534,kWh,3.3365034042085424e-05,3.6795308681339335e-06,2.2279823379398044e-05,5.932438828961741e-05,tokens/kWh,4315257.306155883,MB,1456.68096,7310.5408,0.0,6893.338624,6575.186432,s,10,57.65196728515625,5.765196728515625,0.02080366906057327,5.76491748046875,5.7923897460937495,5.793774462890625,5.794882236328125,"[5.72457958984375, 5.74243505859375, 5.7578486328125, 5.75404638671875, 5.7656162109375, 5.76421875, 5.7739482421875, 5.782033203125, 5.79208203125, 5.7951591796875]",tokens/s,10.927640975093095,kWh,0.00016985143455291036,1.8736066947479215e-05,0.00011250575667120233,0.00030109325817159196,tokens/kWh,209237.49798508117,,s,630,57.64940335845947,0.09150698945787218,0.0016950634204783265,0.09123726654052734,0.09286253204345704,0.09322157020568847,0.101445630569458,"[0.10121647644042969, 0.08966780853271485, 0.08922537231445313, 0.08915487670898438, 0.09003692626953125, 0.08983961486816407, 0.08947917175292969, 0.09059532928466797, 0.08961023712158203, 0.09085660552978515, 0.09208303833007812, 0.09220438385009766, 0.09039740753173828, 0.08951769256591798, 0.0895450210571289, 0.09141001892089844, 0.08947958374023438, 0.0914698257446289, 0.08953788757324219, 0.0909208984375, 0.08975775909423828, 0.09286835479736329, 0.09045699310302735, 0.0915241928100586, 0.08966851043701173, 0.09152102661132812, 0.08971017456054688, 0.09150498962402344, 0.08960556793212891, 0.09152780914306641, 0.08961228942871094, 0.09190809631347656, 0.09010336303710938, 0.09192214202880859, 0.09018646240234375, 0.09156198120117187, 0.08967782592773438, 0.09199929809570312, 0.08970950317382813, 0.09160054779052734, 0.08963107299804687, 0.09187657928466797, 0.09007315063476562, 0.09160492706298828, 0.09037257385253906, 0.09103330993652343, 0.09028668975830079, 0.09203481292724609, 0.09042537689208985, 0.09145571136474609, 0.09033113861083984, 0.09146572875976562, 0.09111750030517578, 0.09093497467041016, 0.09116915130615234, 0.09134454345703125, 0.09070217895507812, 0.09110291290283203, 0.09078982543945313, 0.09091487884521485, 0.09079952239990234, 0.09054009246826172, 0.09168284606933594, 0.10139017486572266, 0.08969174194335937, 0.09037001800537109, 0.08935485076904297, 0.090640380859375, 0.08945664215087891, 0.09170086669921874, 0.08948365020751953, 0.09100262451171875, 0.09016140747070313, 0.09541232299804688, 0.09139600372314453, 0.09091891479492188, 0.09050630187988282, 0.08950675201416015, 0.09040076446533203, 0.09044985961914062, 0.09014265441894531, 0.0903578872680664, 0.08976179504394531, 0.09309184265136719, 0.09180364990234376, 0.09114796447753906, 0.0908639373779297, 0.09072230529785157, 0.09059123229980469, 0.09023078155517578, 0.08986809539794922, 0.09058528137207031, 0.09040220642089844, 0.09055702209472656, 0.0919019546508789, 0.09110733032226563, 0.09238118743896484, 0.09095513916015625, 0.09222412872314453, 0.08983248138427734, 0.09218883514404297, 0.08979948425292969, 0.09198579406738282, 0.0897100830078125, 0.09271974182128906, 0.09098854064941406, 0.09213712310791015, 0.09093564605712891, 0.09181292724609375, 0.09072278594970704, 0.09037667083740235, 0.09083673858642578, 0.09036998748779297, 0.09073827362060546, 0.09103433227539062, 0.09114419555664062, 0.09107004547119141, 0.09293567657470703, 0.0913130874633789, 0.09229043579101562, 0.09051197052001952, 0.09167769622802735, 0.09055744171142578, 0.09177702331542968, 0.09058665466308594, 0.09160243225097656, 0.10264575958251954, 0.09019187164306641, 0.09170329284667969, 0.08954879760742188, 0.09166438293457031, 0.09041919708251953, 0.0918363494873047, 0.08955430603027344, 0.09165484619140625, 0.09019596862792968, 0.09459916687011719, 0.09141452789306641, 0.09145279693603516, 0.09027238464355469, 0.09097984313964844, 0.09035008239746094, 0.09144319915771484, 0.09037004852294922, 0.0912212142944336, 0.09038313293457031, 0.09215580749511719, 0.09109718322753907, 0.09172480010986328, 0.09140038299560548, 0.09038726043701172, 0.09060294342041016, 0.09033990478515624, 0.09088124847412109, 0.09023158264160157, 0.09143004608154297, 0.09100319671630859, 0.09175421142578125, 0.09121692657470704, 0.09231747436523438, 0.09041468811035157, 0.09281986999511718, 0.08968192291259766, 0.09274956512451171, 0.08981529235839844, 0.0923156509399414, 0.09091417694091797, 0.0923592987060547, 0.09069686126708984, 0.09299849700927734, 0.09096141052246094, 0.09121616363525391, 0.09057091522216797, 0.09105241394042969, 0.09052310180664062, 0.09085155487060546, 0.09131593322753906, 0.09124073791503906, 0.09198387145996094, 0.09140016174316407, 0.0930570526123047, 0.09055010986328126, 0.09247350311279297, 0.0906055679321289, 0.09169058990478515, 0.09055785369873047, 0.09122303771972656, 0.09117286682128906, 0.09196297454833985, 0.10156230163574219, 0.09029017639160156, 0.09007984161376953, 0.09041731262207031, 0.09024716949462891, 0.09015090942382813, 0.09080825805664063, 0.08971180725097656, 0.0909648666381836, 0.09030802917480468, 0.09574774169921875, 0.09189263916015625, 0.09077760314941406, 0.09129984283447265, 0.09010176086425781, 0.09058509063720703, 0.08998502349853515, 0.08987612915039063, 0.09058544158935547, 0.09011814117431641, 0.09172809600830079, 0.09310950469970704, 0.09163565063476563, 0.09323785400390625, 0.09032825469970703, 0.09164268493652344, 0.09031849670410157, 0.09128569793701172, 0.09035298919677734, 0.090947998046875, 0.09105206298828125, 0.09196173095703125, 0.09110844421386718, 0.0919766082763672, 0.09117820739746094, 0.09121257781982423, 0.09103769683837891, 0.09020620727539062, 0.09142998504638672, 0.09011510467529296, 0.09125446319580079, 0.09093897247314453, 0.09284690856933593, 0.0911951675415039, 0.09304473876953125, 0.09109503936767578, 0.09125682830810547, 0.09090866851806641, 0.09049088287353516, 0.09079190063476562, 0.09110470581054687, 0.09105862426757813, 0.09128771209716798, 0.09121711730957031, 0.09197872161865234, 0.09255302429199219, 0.0912691192626953, 0.09279283142089843, 0.0905502700805664, 0.09164383697509766, 0.09047456359863282, 0.09191171264648437, 0.09076719665527344, 0.10215833282470703, 0.09035065460205079, 0.09016995239257812, 0.08977238464355469, 0.08957513427734375, 0.0908864288330078, 0.09039052581787109, 0.0906688003540039, 0.09020960235595703, 0.09092995452880859, 0.09434915161132812, 0.09378438568115234, 0.09082669067382812, 0.091840576171875, 0.09077750396728515, 0.09144086456298828, 0.09026358032226563, 0.09150089263916016, 0.08957286071777344, 0.09143551635742188, 0.09098239898681641, 0.0939391326904297, 0.09181603240966797, 0.0911200942993164, 0.09139417266845704, 0.09033920288085938, 0.09097740936279297, 0.09051840209960937, 0.0910355224609375, 0.08971046447753907, 0.09105795288085937, 0.09225059509277343, 0.09352191925048828, 0.09122406768798828, 0.09279692840576172, 0.09046985626220704, 0.0915297622680664, 0.0909656982421875, 0.0915766372680664, 0.09101312255859376, 0.0904599380493164, 0.09120956420898438, 0.09112204742431641, 0.09225830078125, 0.09166175842285157, 0.09293385314941406, 0.09054908752441407, 0.09258092498779297, 0.09091168212890625, 0.09167871856689454, 0.09093318176269531, 0.0916808319091797, 0.09107977294921875, 0.09135945892333984, 0.09204601287841797, 0.09132441711425782, 0.0921743392944336, 0.09144115447998047, 0.09288089752197265, 0.09043312072753906, 0.0924224624633789, 0.09062963104248047, 0.0924534683227539, 0.10141020965576172, 0.09019248199462891, 0.09008451080322266, 0.09047030639648437, 0.09102022552490234, 0.09014476776123047, 0.08947731018066406, 0.09133856201171875, 0.09017139434814453, 0.09084928131103516, 0.09460518646240235, 0.09316365051269532, 0.09069977569580077, 0.09201010894775391, 0.0901349105834961, 0.09245491027832031, 0.0903024673461914, 0.09112576293945313, 0.0901242904663086, 0.09138585662841797, 0.09148947143554688, 0.09286943817138672, 0.09161433410644532, 0.09100291442871093, 0.09156301116943359, 0.09045565032958984, 0.09191817474365234, 0.09026719665527344, 0.09181475067138672, 0.09029631805419921, 0.09318195343017578, 0.09116835021972657, 0.09304220581054687, 0.09118790435791016, 0.09089984130859376, 0.09105680084228515, 0.09109315490722657, 0.0912056655883789, 0.09095574188232422, 0.09136911773681641, 0.09099858856201172, 0.09231209564208985, 0.09117036437988281, 0.09326432037353516, 0.09088956451416015, 0.0918736343383789, 0.09122191619873046, 0.09069551849365234, 0.09149225616455078, 0.09056534576416016, 0.09120146942138672, 0.09111254119873047, 0.09277327728271484, 0.09197567749023437, 0.09308319854736329, 0.09136582183837891, 0.09119033813476562, 0.09163785552978515, 0.09048355102539063, 0.09140428924560547, 0.09122815704345703, 0.0912523193359375, 0.09115471649169922, 0.10269324493408204, 0.09141862487792969, 0.09014697265625, 0.09181577301025391, 0.09017139434814453, 0.09228278350830078, 0.0902053451538086, 0.09170630645751954, 0.09024307250976563, 0.09203305816650391, 0.09287881469726562, 0.09213951873779297, 0.09153065490722656, 0.09098262023925781, 0.091432861328125, 0.09021692657470703, 0.09136089324951172, 0.09021887969970703, 0.09215122985839844, 0.09051155090332032, 0.09339737701416016, 0.09118659210205078, 0.09306758117675781, 0.09121974182128906, 0.09139401245117187, 0.09074127960205078, 0.0909271011352539, 0.09044172668457032, 0.09096924591064454, 0.09097711944580078, 0.09110118103027344, 0.09378585815429688, 0.09133286285400391, 0.09300502777099609, 0.09118595123291015, 0.09097974395751954, 0.09155763244628906, 0.09106031799316407, 0.09041177368164062, 0.09095362854003906, 0.09096403503417969, 0.09157756805419921, 0.09304966735839844, 0.09125254058837891, 0.09286188507080079, 0.09126579284667968, 0.09176678466796875, 0.09115647888183594, 0.09054726409912109, 0.09125910186767579, 0.09106854248046875, 0.09166108703613281, 0.09130496215820312, 0.09306768035888671, 0.09153372955322266, 0.09287184143066406, 0.09142972564697266, 0.0911911392211914, 0.0912088623046875, 0.09096089935302734, 0.09147142028808594, 0.09125523376464843, 0.09310569763183593, 0.10366973114013672, 0.09028937530517578, 0.0913620834350586, 0.09063795471191406, 0.09198963165283203, 0.09009011077880859, 0.09199014282226563, 0.09014886474609375, 0.09206169891357421, 0.09041059112548828, 0.09555804443359375, 0.09187052917480469, 0.090908447265625, 0.09234114837646484, 0.09024259185791016, 0.09269705963134765, 0.09040275573730469, 0.0922542724609375, 0.09056230163574219, 0.0913466567993164, 0.09153145599365234, 0.09285257720947265, 0.09170697784423829, 0.09103404998779296, 0.09189542388916015, 0.09099292755126953, 0.09288854217529297, 0.09037910461425781, 0.09227852630615234, 0.09052159881591797, 0.0916883544921875, 0.09114450836181641, 0.09198210906982422, 0.09136029052734375, 0.09187161254882813, 0.09194489288330078, 0.09117558288574219, 0.09296470642089844, 0.09048489379882813, 0.09230038452148437, 0.09058544158935547, 0.09129427337646484, 0.0921190414428711, 0.09133897399902344, 0.09106604766845704, 0.09249187469482421, 0.09364198303222657, 0.09128947448730469, 0.09166118621826172, 0.0912691192626953, 0.09107046508789063, 0.09123379516601562, 0.09105059051513673, 0.09198560333251952, 0.09134246063232422, 0.09273404693603515, 0.09158793640136718, 0.09339561462402343, 0.0913563232421875, 0.09112012481689453, 0.09172415924072265, 0.09143292999267578, 0.0911475830078125, 0.10435788726806641, 0.09028755187988281, 0.0912266845703125, 0.09029017639160156, 0.0921144027709961, 0.09020470428466797, 0.09199411010742188, 0.09095967864990234, 0.09219296264648437, 0.09089024353027343, 0.09495120239257812, 0.092253662109375, 0.09111833953857422, 0.09260147094726562, 0.09063308715820312, 0.09234226989746094, 0.09036956787109375, 0.09147440338134766, 0.0912547836303711, 0.09158656311035156, 0.09188524627685547, 0.0926104965209961, 0.09202291107177735, 0.09128937530517578, 0.09292642974853516, 0.09048831939697266, 0.09298297882080078, 0.09065760040283204, 0.09161231994628906, 0.09045247650146485, 0.09225865936279297, 0.092065185546875, 0.09184111785888673, 0.091580322265625, 0.09169673919677734, 0.09371273803710937, 0.09116687774658203, 0.09214771270751954, 0.0909097900390625, 0.09163459014892578, 0.09096742248535156, 0.09152780914306641, 0.09228902435302734, 0.09166643524169922, 0.0932715835571289, 0.0917418212890625, 0.09217865753173828, 0.09193126678466797, 0.09094758605957032, 0.09138380432128906, 0.09111734771728516, 0.09273065948486328, 0.09158707427978516, 0.09291926574707031, 0.09136022186279297, 0.0912702407836914, 0.09252249908447266, 0.09195315551757813, 0.0931583023071289, 0.09145958709716796, 0.09208627319335938, 0.09133257293701172, 0.09134697723388673, 0.10146009826660156, 0.09062973022460938, 0.09108723449707032, 0.090833984375, 0.090251708984375, 0.09109490966796875, 0.0908909149169922, 0.09223065948486328, 0.0908338851928711, 0.09241603088378907, 0.09315225219726563, 0.09313775634765625, 0.09122988891601562, 0.09091053009033204, 0.09173011016845703, 0.09083740997314453, 0.09170130920410156, 0.09057689666748046, 0.09371033477783203, 0.09081231689453124, 0.09320166778564454, 0.09199702453613282, 0.09194493103027344, 0.09177295684814453, 0.09119676971435547, 0.09367619323730468, 0.0908431396484375, 0.09206121826171874, 0.09158499145507812, 0.09200230407714843, 0.09126834869384766, 0.09211119842529297, 0.092170654296875, 0.09201805114746094, 0.09293875122070312, 0.09115046691894531, 0.09165542602539062, 0.09118386840820313, 0.09130802917480468, 0.09106022644042969, 0.0920821762084961, 0.09167839813232422, 0.09141305541992187, 0.09445756530761719, 0.09153708648681641, 0.09223203277587891, 0.09159423828125, 0.09156835174560547, 0.09279427337646484, 0.0910831069946289, 0.09275590515136718, 0.09085398101806641, 0.09253017425537109, 0.0918075180053711, 0.09201123046875, 0.09254297637939453, 0.09201590728759766, 0.09356502532958984, 0.09128409576416016, 0.0928047332763672, 0.09166639709472656, 0.09145999908447265, 0.09243430328369141]",tokens/s,10.928126976140748,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,818.302976,543.031296,0.0,140.509184,133.641728,s,1,7.93151171875,7.93151171875,0.0,7.93151171875,7.93151171875,7.93151171875,7.93151171875,[7.93151171875],,kWh,1.779743300417446e-05,1.954670319338106e-06,5.504171069994901e-06,2.5256274393507465e-05,,MB,1260.531712,620.62592,0.0,205.520896,173.492224,s,10,0.25205052757263186,0.025205052757263185,0.0001848238910571806,0.025174415588378907,0.025339369010925292,0.02549709234237671,0.02562327100753784,"[0.025654815673828126, 0.02525788879394531, 0.025054943084716796, 0.025115840911865233, 0.025073888778686524, 0.025129535675048827, 0.02494883155822754, 0.025291168212890625, 0.025304319381713868, 0.025219295501708986]",tokens/s,10156.693678263779,kWh,7.407411223244105e-07,8.168971985955282e-08,4.841402632284253e-07,1.3065711054123886e-06,tokens/kWh,195932696.61294064,MB,1293.385728,631.11168,0.0,216.006656,173.494784,s,10,11.386755737304687,1.1386755737304688,0.0034856672807422025,1.1382359619140625,1.1425543701171874,1.1431753173828123,1.1436720751953124,"[1.1424163818359374, 1.137698486328125, 1.133499755859375, 1.1366456298828125, 1.1437962646484374, 1.133226318359375, 1.1374332275390624, 1.1422152099609375, 1.141051025390625, 1.1387734375]",tokens/s,55.32743606118003,kWh,3.308174633517634e-05,3.648430849238522e-06,1.211159203557092e-05,4.884176921998579e-05,tokens/kWh,1289879.564277142,,s,630,11.380668195724484,0.01806455269162617,0.00033418859248189305,0.017995391845703125,0.018252387046813964,0.01851439199447632,0.01969503170013428,"[0.018423776626586914, 0.01856384086608887, 0.01987583923339844, 0.018100351333618165, 0.018136831283569337, 0.018093759536743165, 0.018092063903808592, 0.018157567977905274, 0.0180863037109375, 0.018036735534667968, 0.017948352813720703, 0.01815497589111328, 0.017996639251708985, 0.018062847137451172, 0.018010656356811525, 0.017973215103149413, 0.017999872207641602, 0.018146495819091796, 0.01799616050720215, 0.018112319946289063, 0.01868377685546875, 0.018018047332763673, 0.017927072525024415, 0.018001087188720705, 0.01967401695251465, 0.01828220748901367, 0.018009408950805664, 0.01850979232788086, 0.01797427177429199, 0.017954816818237306, 0.01795686340332031, 0.01807360076904297, 0.01787487983703613, 0.01790771293640137, 0.017950527191162108, 0.017973503112792968, 0.018062496185302736, 0.017862911224365233, 0.017877599716186524, 0.01787273597717285, 0.01788444709777832, 0.017937631607055665, 0.01815657615661621, 0.017903232574462892, 0.01804128074645996, 0.017983392715454103, 0.018069503784179687, 0.017927871704101563, 0.017928863525390627, 0.017979711532592774, 0.01812380790710449, 0.018017248153686525, 0.018652191162109377, 0.01810736083984375, 0.017970848083496093, 0.01832943916320801, 0.01805539131164551, 0.01812713623046875, 0.017915903091430666, 0.018190336227416993, 0.01808380889892578, 0.01807142448425293, 0.0179703369140625, 0.01793075180053711, 0.019703615188598634, 0.01792223930358887, 0.018018304824829103, 0.017917919158935546, 0.018026016235351563, 0.018045440673828125, 0.018056383132934572, 0.018015039443969726, 0.018046016693115233, 0.017984256744384766, 0.018449983596801757, 0.018068096160888673, 0.018060895919799806, 0.018122783660888674, 0.018049407958984375, 0.018059263229370116, 0.017991680145263672, 0.017991296768188475, 0.018166143417358397, 0.017934335708618163, 0.01785817527770996, 0.018087968826293946, 0.01801046371459961, 0.01804287910461426, 0.018011615753173827, 0.01803264045715332, 0.017895967483520507, 0.01792006492614746, 0.01795270347595215, 0.01812396812438965, 0.018025279998779297, 0.017872032165527345, 0.01828646469116211, 0.017990623474121094, 0.017882240295410155, 0.018341920852661134, 0.018076511383056642, 0.018095808029174806, 0.017883455276489258, 0.017901567459106444, 0.017869855880737303, 0.01802684783935547, 0.017981632232666016, 0.018014944076538086, 0.018002687454223634, 0.018193376541137694, 0.018077695846557617, 0.018500799179077147, 0.017973600387573244, 0.018061376571655272, 0.01799740791320801, 0.017956832885742187, 0.018107040405273438, 0.018004512786865233, 0.01787664031982422, 0.01803878402709961, 0.01785683250427246, 0.017866432189941408, 0.018050752639770507, 0.017895744323730468, 0.01786675262451172, 0.018010112762451173, 0.017723007202148436, 0.018086784362792967, 0.018089984893798827, 0.01801366424560547, 0.01813327980041504, 0.018134559631347656, 0.018285280227661134, 0.018025503158569336, 0.017998815536499024, 0.01790924835205078, 0.018054752349853515, 0.01800284767150879, 0.017934335708618163, 0.01802073669433594, 0.018062976837158202, 0.01795686340332031, 0.017971359252929687, 0.018020191192626954, 0.017923871994018556, 0.017912031173706055, 0.01801420783996582, 0.017901567459106444, 0.01787392044067383, 0.017853439331054686, 0.018331392288208008, 0.01782707214355469, 0.017813888549804688, 0.017864864349365236, 0.017935007095336915, 0.017999616622924805, 0.017865983963012696, 0.01794358444213867, 0.017956352233886717, 0.017965024948120117, 0.018524831771850585, 0.01812575912475586, 0.017890207290649413, 0.018267648696899414, 0.01805500793457031, 0.01795529556274414, 0.018048704147338866, 0.017946943283081055, 0.017876991271972655, 0.01794156837463379, 0.018139583587646484, 0.017999935150146484, 0.01800966453552246, 0.018104223251342772, 0.018063711166381835, 0.01809056091308594, 0.017921472549438478, 0.01789606475830078, 0.01789132881164551, 0.017893247604370117, 0.017907520294189454, 0.01781171226501465, 0.017845855712890626, 0.017869279861450194, 0.017819232940673828, 0.017768863677978516, 0.017836032867431642, 0.018025503158569336, 0.0179683837890625, 0.0178853759765625, 0.01798384094238281, 0.0180916805267334, 0.01817763137817383, 0.01796131134033203, 0.018026079177856445, 0.018030464172363283, 0.01796156883239746, 0.017938432693481447, 0.018036735534667968, 0.017881088256835938, 0.0178668155670166, 0.017946687698364258, 0.01792959976196289, 0.017857023239135742, 0.018010112762451173, 0.018251775741577148, 0.017872896194458008, 0.01799724769592285, 0.017966880798339843, 0.017998655319213866, 0.018015647888183595, 0.017970048904418945, 0.01793404769897461, 0.017787103652954103, 0.017960319519042967, 0.017860992431640625, 0.018455615997314455, 0.01792508888244629, 0.017919008255004882, 0.017914304733276366, 0.01797088050842285, 0.017990720748901366, 0.017803199768066408, 0.017876256942749025, 0.018042623519897463, 0.018069568634033202, 0.01781407928466797, 0.01782956886291504, 0.01799622344970703, 0.017850143432617187, 0.017911840438842773, 0.018381023406982423, 0.020151391983032226, 0.018241600036621095, 0.018010976791381837, 0.01800396728515625, 0.017961183547973634, 0.018250879287719728, 0.018117471694946288, 0.017980703353881834, 0.01786729621887207, 0.018021568298339844, 0.017883968353271485, 0.017913375854492188, 0.017875423431396486, 0.017835744857788084, 0.017825279235839844, 0.01884649658203125, 0.018314783096313476, 0.01801215934753418, 0.01801468849182129, 0.017969152450561524, 0.01794892883300781, 0.01790540885925293, 0.01793619155883789, 0.01794256019592285, 0.0179182071685791, 0.017846176147460938, 0.017874847412109374, 0.019107551574707032, 0.01793471908569336, 0.017926015853881837, 0.017924224853515625, 0.01804060745239258, 0.018143455505371095, 0.017980928421020507, 0.017974815368652343, 0.017926944732666015, 0.018097728729248048, 0.017958784103393556, 0.01790883255004883, 0.018085535049438477, 0.017985536575317384, 0.018002143859863283, 0.018480480194091795, 0.018103967666625975, 0.017941375732421876, 0.017925247192382813, 0.01803446388244629, 0.018338560104370117, 0.01858892822265625, 0.01793715286254883, 0.01797657585144043, 0.01788751983642578, 0.01793849563598633, 0.017885471343994142, 0.018157951354980467, 0.01798963165283203, 0.017955167770385742, 0.01805619239807129, 0.017928831100463866, 0.017960992813110352, 0.018003807067871094, 0.01798303985595703, 0.018020959854125978, 0.01827395248413086, 0.01801865577697754, 0.017895423889160156, 0.01803264045715332, 0.01789923286437988, 0.01794895935058594, 0.021154815673828126, 0.01819340705871582, 0.01816307258605957, 0.01790224075317383, 0.017919967651367188, 0.018029920578002928, 0.02151897621154785, 0.01803696060180664, 0.018132863998413087, 0.01793609619140625, 0.017968639373779297, 0.018524608612060546, 0.017970783233642578, 0.018221376419067382, 0.01778518486022949, 0.017970720291137696, 0.017866111755371093, 0.018699424743652344, 0.018043807983398438, 0.01795568084716797, 0.017977344512939454, 0.018051168441772462, 0.017932191848754882, 0.01792848014831543, 0.01810345649719238, 0.017891199111938476, 0.01839993667602539, 0.017876991271972655, 0.018008064270019532, 0.01795430374145508, 0.018076032638549806, 0.018069120407104493, 0.017992191314697266, 0.017840192794799804, 0.01791414451599121, 0.01796879959106445, 0.017934335708618163, 0.01804697608947754, 0.01806924819946289, 0.01802470397949219, 0.017987648010253907, 0.0180795841217041, 0.017981536865234377, 0.018098175048828127, 0.018708351135253907, 0.018067583084106446, 0.01790127944946289, 0.017760480880737305, 0.017961023330688476, 0.017893600463867187, 0.017825567245483398, 0.018098175048828127, 0.01784364891052246, 0.017900096893310548, 0.017995264053344725, 0.01786931228637695, 0.01793235206604004, 0.01782707214355469, 0.017881792068481447, 0.01782579231262207, 0.017946399688720704, 0.017863168716430664, 0.017919200897216797, 0.018039295196533203, 0.01788688087463379, 0.017903583526611328, 0.018338176727294923, 0.018104320526123048, 0.017890655517578125, 0.017998048782348633, 0.017885248184204103, 0.01790604782104492, 0.017731584548950196, 0.017907039642333984, 0.017780704498291014, 0.017809120178222657, 0.017895872116088868, 0.017824575424194335, 0.017995519638061522, 0.017981056213378907, 0.017986335754394532, 0.01851254463195801, 0.018015359878540037, 0.01791484832763672, 0.017987680435180665, 0.018182655334472657, 0.018188543319702148, 0.018165760040283203, 0.017897472381591797, 0.017942752838134766, 0.017984928131103514, 0.01797158432006836, 0.01805308723449707, 0.0181309757232666, 0.017968544006347655, 0.018020351409912108, 0.01798761558532715, 0.018010688781738282, 0.017932031631469728, 0.01796441650390625, 0.01802764892578125, 0.018083839416503905, 0.018167232513427733, 0.017960384368896486, 0.018010591506958006, 0.018122463226318358, 0.018127552032470705, 0.018075647354125975, 0.01806048011779785, 0.018203456878662108, 0.018107999801635744, 0.017967519760131837, 0.01801625633239746, 0.018051231384277344, 0.01806729507446289, 0.017946624755859376, 0.017959199905395507, 0.0179770565032959, 0.01806070327758789, 0.018024799346923828, 0.018032800674438475, 0.018257888793945312, 0.018097824096679687, 0.018016895294189452, 0.018022239685058592, 0.01796656036376953, 0.017981983184814452, 0.018189760208129884, 0.018067903518676758, 0.017958303451538087, 0.018120832443237304, 0.01799440002441406, 0.01800726318359375, 0.017971359252929687, 0.01809286308288574, 0.018034431457519533, 0.01851590347290039, 0.017933759689331055, 0.018018911361694336, 0.017885215759277345, 0.01802579116821289, 0.018065343856811522, 0.018141952514648438, 0.018034624099731444, 0.017995168685913086, 0.01790764808654785, 0.017737855911254884, 0.017744640350341796, 0.017671615600585937, 0.017727903366088867, 0.017917951583862304, 0.017901567459106444, 0.01843404769897461, 0.01813692855834961, 0.01809174346923828, 0.0180229434967041, 0.018302879333496093, 0.018218143463134766, 0.017916288375854492, 0.017957183837890626, 0.01786422348022461, 0.017928831100463866, 0.01799273681640625, 0.018351072311401366, 0.01803878402709961, 0.01791155242919922, 0.01878451156616211, 0.019398656845092774, 0.0191712646484375, 0.018351455688476563, 0.018002687454223634, 0.01803865623474121, 0.01801126480102539, 0.017964000701904296, 0.017921087265014648, 0.018146240234375, 0.01794598388671875, 0.018020383834838866, 0.017915712356567384, 0.018123552322387694, 0.017936384201049805, 0.017958303451538087, 0.018233951568603517, 0.017976640701293945, 0.017924800872802734, 0.01804083251953125, 0.01930847930908203, 0.017997472763061524, 0.01800783920288086, 0.017922271728515626, 0.0180567684173584, 0.018154304504394533, 0.018314464569091797, 0.017906496047973633, 0.01799782371520996, 0.01805721664428711, 0.018259424209594727, 0.01809391975402832, 0.018275007247924805, 0.018313440322875976, 0.018149152755737304, 0.018197919845581053, 0.018651744842529298, 0.01801081657409668, 0.018035903930664062, 0.018023231506347655, 0.018067455291748045, 0.017936384201049805, 0.018232799530029296, 0.017948383331298827, 0.017992031097412108, 0.01795840072631836, 0.017943552017211914, 0.018005823135375975, 0.017964799880981444, 0.018057504653930665, 0.018095455169677734, 0.01801091194152832, 0.01817558479309082, 0.018010047912597655, 0.01798601531982422, 0.018116607666015624, 0.018180416107177733, 0.01824483108520508, 0.017955583572387697, 0.01799344062805176, 0.017946624755859376, 0.017913728713989257, 0.017886560440063478, 0.01817884826660156, 0.0180118408203125, 0.017958976745605468, 0.01788153648376465, 0.017839935302734374, 0.017958911895751953, 0.018108448028564452, 0.017954784393310545, 0.017989376068115234, 0.017930912017822265, 0.01795840072631836, 0.017934431076049806, 0.017960960388183594, 0.018882688522338868, 0.01867884826660156, 0.019063648223876954, 0.018167007446289064, 0.01803539276123047, 0.01790883255004883, 0.017939456939697264, 0.017959999084472655, 0.018060192108154297, 0.018054176330566406, 0.01806399917602539, 0.018025983810424806, 0.018043584823608398, 0.019472543716430663, 0.018141536712646483, 0.017948320388793945, 0.017885183334350584, 0.017938432693481447, 0.017949983596801757, 0.01976304054260254, 0.01807142448425293, 0.01798246383666992, 0.01793040084838867, 0.018093599319458007, 0.018042367935180666, 0.01857244873046875, 0.018023551940917967, 0.01805708885192871, 0.017957759857177735, 0.018136064529418947, 0.017945472717285155, 0.017964895248413087, 0.01797292709350586, 0.01803926467895508, 0.018941152572631837, 0.01812944030761719, 0.018024799346923828, 0.018001823425292968, 0.01792812728881836, 0.018829376220703124, 0.018120447158813478, 0.01801785659790039, 0.017910463333129883, 0.01788649559020996, 0.017934335708618163, 0.018035135269165038, 0.018001855850219725, 0.017942943572998048, 0.017929824829101562, 0.01800543975830078, 0.01793484878540039, 0.017995552062988283, 0.017840320587158204, 0.01783852767944336, 0.017897472381591797, 0.01784012794494629, 0.018301120758056642, 0.01812646484375, 0.017901952743530274, 0.017945695877075195, 0.01787788772583008, 0.017971200942993162, 0.01794595146179199, 0.01807356834411621, 0.020451871871948243, 0.01797324752807617, 0.017932479858398437, 0.018052928924560546, 0.01805120086669922, 0.018089279174804688, 0.01791644859313965, 0.017883167266845704, 0.017932159423828125, 0.01836454391479492, 0.017868896484375, 0.01787718391418457, 0.017880800247192383, 0.01790771293640137, 0.017913471221923827, 0.017912191390991213, 0.017936639785766602, 0.017866655349731444, 0.017897087097167967, 0.01796329689025879, 0.01864406394958496, 0.01808060836791992, 0.017928192138671875]",tokens/s,55.357030814471834,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.52 GiB is free. Process 113745 has 13.22 GiB memory in use. Of the allocated memory 13.10 GiB is allocated by PyTorch, and 6.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,819.63008,652.0832,0.0,249.561088,236.183552,s,1,8.46774609375,8.46774609375,0.0,8.46774609375,8.46774609375,8.46774609375,8.46774609375,[8.46774609375],,kWh,2.948941470418352e-05,3.2457430116192323e-06,9.203062917989557e-06,4.1938220633792314e-05,,MB,1289.990144,756.9408,0.0,341.835776,295.187456,s,10,0.5692166366577148,0.056921663665771484,0.00019196185068508464,0.05693083190917969,0.057161652755737306,0.05721489009857177,0.05725747997283935,"[0.05726812744140625, 0.056790206909179686, 0.05685891342163086, 0.05668412780761719, 0.05702627182006836, 0.05691455841064453, 0.056947105407714846, 0.05714982223510742, 0.056599296569824216, 0.05697820663452149]",tokens/s,4497.408956687604,kWh,1.7516466204091658e-06,1.9305352877369698e-07,1.1612284738922726e-06,3.105928623075135e-06,tokens/kWh,82423014.52070656,MB,1322.528768,794.689536,0.0,379.584512,300.017664,s,10,22.01041723632812,2.201041723632813,0.006431360996584057,2.2009888916015625,2.2076276123046874,2.211374719238281,2.214372404785156,"[2.201778076171875, 2.19366552734375, 2.198657470703125, 2.206794921875, 2.192263671875, 2.2041005859375, 2.215121826171875, 2.20248828125, 2.19534716796875, 2.20019970703125]",tokens/s,28.622810428154306,kWh,6.389133519250488e-05,7.047098314760946e-06,2.3644680259707176e-05,9.4583113766973e-05,tokens/kWh,666080.8413985483,,s,630,22.003552829742453,0.034926274332924494,0.0005763521804872355,0.03481252861022949,0.03523347625732422,0.035550552749633785,0.03739608402252199,"[0.03458649444580078, 0.03475263977050781, 0.03467468643188477, 0.034772991180419925, 0.03588323211669922, 0.03486697769165039, 0.0348600959777832, 0.03484153747558594, 0.03538556671142578, 0.03483606338500977, 0.03547155380249024, 0.03490560150146484, 0.03508889770507812, 0.03470547103881836, 0.03510470581054687, 0.03498416137695313, 0.034993953704833984, 0.034960960388183596, 0.034939327239990235, 0.03522969436645508, 0.03476492691040039, 0.03482611083984375, 0.03505155181884766, 0.034799583435058595, 0.03487526321411133, 0.03465836715698242, 0.03500038528442383, 0.035227840423583984, 0.034778526306152344, 0.03479776000976562, 0.03471987152099609, 0.03475868988037109, 0.03479344177246094, 0.03482444763183594, 0.03561888122558594, 0.03530115127563477, 0.03655680084228516, 0.03482419204711914, 0.03509417724609375, 0.034689342498779294, 0.034782463073730466, 0.0348454704284668, 0.0347589111328125, 0.034774784088134766, 0.034727745056152344, 0.03496777725219727, 0.0349159049987793, 0.03478073501586914, 0.035255390167236327, 0.034862846374511716, 0.03474249649047852, 0.03453740692138672, 0.034518878936767576, 0.034846942901611326, 0.03459612655639648, 0.03492095947265625, 0.03570687866210937, 0.03468479919433594, 0.03482575988769531, 0.03521356964111328, 0.03472828674316406, 0.03470252990722656, 0.03460742568969727, 0.03450281524658203, 0.03496780776977539, 0.03501702499389649, 0.03484902572631836, 0.03482406234741211, 0.03473436737060547, 0.03458428955078125, 0.034840576171875, 0.03465011215209961, 0.03466854476928711, 0.03460121536254883, 0.03483126449584961, 0.03518960189819336, 0.034748416900634765, 0.034649120330810544, 0.034695457458496094, 0.03455408096313477, 0.03463756942749024, 0.0345972785949707, 0.0349136962890625, 0.03467516708374024, 0.03471404647827148, 0.034646305084228515, 0.03459033584594726, 0.034865249633789064, 0.034619102478027346, 0.03678031921386719, 0.03500249481201172, 0.03476601409912109, 0.035678241729736326, 0.03491088104248047, 0.03470336151123047, 0.03478348922729492, 0.03480140686035156, 0.03456723022460938, 0.034864063262939456, 0.03466009521484375, 0.03471747207641602, 0.03483884811401367, 0.03457219314575195, 0.034684513092041014, 0.035506847381591794, 0.03483427047729492, 0.03475487899780273, 0.03472777557373047, 0.03471974563598633, 0.03474563217163086, 0.034777984619140626, 0.03478307342529297, 0.03470892715454101, 0.03475750350952148, 0.03478051376342774, 0.03466864013671875, 0.03472969436645508, 0.034681472778320316, 0.03456195068359375, 0.035108863830566404, 0.03496755218505859, 0.034705406188964845, 0.03476684951782227, 0.03472339248657227, 0.0346190414428711, 0.034863903045654294, 0.03452489471435547, 0.03501289749145508, 0.0349648323059082, 0.03496435165405273, 0.03543427276611328, 0.035194847106933595, 0.035340320587158205, 0.034993377685546875, 0.03470655822753906, 0.034868896484375, 0.0349349136352539, 0.03489769744873047, 0.03465020751953125, 0.035027328491210936, 0.034873184204101564, 0.034998046875, 0.0349200325012207, 0.034595230102539065, 0.03467164611816406, 0.034789985656738284, 0.03491187286376953, 0.0347371826171875, 0.035902175903320316, 0.034729248046875, 0.03478255844116211, 0.03482799911499023, 0.03475088119506836, 0.034730239868164064, 0.03817385482788086, 0.03470003128051758, 0.03495043182373047, 0.03475724792480469, 0.03467695999145508, 0.035019840240478516, 0.034958240509033206, 0.03537424087524414, 0.03491721725463867, 0.03482172775268555, 0.034557567596435544, 0.034694049835205076, 0.03487321472167969, 0.034692832946777344, 0.03466009521484375, 0.034628128051757814, 0.03495110321044922, 0.03456726455688477, 0.03462470245361328, 0.034678558349609374, 0.03481625747680664, 0.03491984176635742, 0.03454524612426758, 0.0347061767578125, 0.034680831909179685, 0.03480899047851563, 0.034820606231689456, 0.034550113677978514, 0.03459664154052734, 0.034916416168212894, 0.03490012741088867, 0.034823646545410154, 0.03464246368408203, 0.03458867263793945, 0.03464191818237305, 0.03482611083984375, 0.035175838470458985, 0.03515055847167969, 0.03470950317382813, 0.03471571350097656, 0.03476678466796875, 0.03492454528808594, 0.03506758499145508, 0.03493900680541992, 0.03488358306884766, 0.034606624603271484, 0.034616222381591795, 0.034608062744140626, 0.034929473876953124, 0.03502905654907226, 0.0351591682434082, 0.03469305419921875, 0.034723968505859376, 0.03472256088256836, 0.0348359375, 0.03466870498657226, 0.0354574089050293, 0.040374305725097655, 0.03490403366088867, 0.03481190490722656, 0.03482419204711914, 0.034885215759277347, 0.03493497467041016, 0.03541219329833984, 0.034985183715820316, 0.035174560546875, 0.034961536407470704, 0.0349285774230957, 0.035037151336669924, 0.034677631378173826, 0.03558355331420898, 0.03474198532104492, 0.03474419021606445, 0.034772991180419925, 0.034759231567382816, 0.03477231979370117, 0.034530303955078126, 0.034649761199951175, 0.0344637451171875, 0.034782367706298827, 0.034648929595947266, 0.03475247955322266, 0.0346399040222168, 0.034672767639160156, 0.034912128448486325, 0.034815841674804685, 0.03509027099609375, 0.034972129821777345, 0.03485606384277344, 0.034809921264648436, 0.034703296661376955, 0.034872318267822264, 0.03558371353149414, 0.03491823959350586, 0.034885345458984376, 0.034810497283935544, 0.034727073669433596, 0.03848777770996094, 0.03473788833618164, 0.03496956634521484, 0.034896705627441404, 0.034890846252441404, 0.03506867218017578, 0.03551420974731445, 0.03495670318603516, 0.03534716796875, 0.03459423828125, 0.03487705612182617, 0.03477561569213867, 0.034672382354736325, 0.03478799819946289, 0.03472959899902344, 0.034613632202148435, 0.034781150817871094, 0.034608577728271486, 0.034654815673828124, 0.03455590438842773, 0.03472505569458008, 0.03461775970458984, 0.03500070571899414, 0.035906913757324216, 0.034957759857177736, 0.034762622833251954, 0.03490832138061523, 0.03468729782104492, 0.03459040069580078, 0.034637409210205077, 0.03466700744628906, 0.034871425628662106, 0.03479667282104492, 0.03570163345336914, 0.03459107208251953, 0.03504707336425781, 0.03459052658081055, 0.03523334503173828, 0.034769664764404296, 0.034627166748046875, 0.034465473175048826, 0.034726593017578126, 0.03474422454833984, 0.03451289749145508, 0.034598911285400394, 0.03457843017578125, 0.034715648651123046, 0.034582527160644534, 0.034539039611816404, 0.034724319458007816, 0.03461702346801758, 0.03454937744140625, 0.034936641693115236, 0.03466976165771484, 0.0347022705078125, 0.03479014587402344, 0.03472588729858399, 0.03450217437744141, 0.03453180694580078, 0.034662654876708984, 0.03469465637207031, 0.03467001724243164, 0.034668479919433594, 0.03493308639526367, 0.03582969665527344, 0.03489388656616211, 0.0347685432434082, 0.034853214263916014, 0.03470764923095703, 0.03474003219604492, 0.034764801025390625, 0.034816001892089846, 0.03469881439208984, 0.034855358123779295, 0.034648063659667966, 0.03471516799926758, 0.03479167938232422, 0.03487772750854492, 0.03467900848388672, 0.034816001892089846, 0.03480137634277344, 0.034932319641113284, 0.03492291259765625, 0.03505500793457031, 0.03512790298461914, 0.034971519470214846, 0.03474444961547852, 0.03480575942993164, 0.035059711456298825, 0.03494086456298828, 0.034949184417724606, 0.035160064697265625, 0.03502284622192383, 0.03496992111206055, 0.03528262329101563, 0.03484201431274414, 0.035011390686035156, 0.035274528503417966, 0.03477004623413086, 0.03496217727661133, 0.034968894958496095, 0.03495151901245117, 0.034964160919189455, 0.0348034553527832, 0.034875423431396484, 0.03491839981079101, 0.034805023193359375, 0.03506454467773437, 0.03499808120727539, 0.03500870513916016, 0.034879615783691406, 0.03505548858642578, 0.034821182250976565, 0.03483334350585938, 0.03486294555664062, 0.03485295867919922, 0.03500185775756836, 0.034781150817871094, 0.03766947174072265, 0.034933761596679686, 0.03533750534057617, 0.0355970573425293, 0.03500572967529297, 0.03474003219604492, 0.03486240005493164, 0.03479779052734375, 0.034957889556884766, 0.0346940803527832, 0.03495766448974609, 0.03493798446655273, 0.03493904113769531, 0.034831775665283206, 0.034894817352294924, 0.03501433563232422, 0.034894302368164064, 0.03515606307983398, 0.035095584869384765, 0.03503606414794922, 0.03501251220703125, 0.03492172622680664, 0.03511715316772461, 0.03506604766845703, 0.03494847869873047, 0.0349071044921875, 0.034949054718017576, 0.03491644668579102, 0.034740222930908206, 0.03504947280883789, 0.03469107055664063, 0.03475046539306641, 0.03478726577758789, 0.0346646728515625, 0.034940769195556644, 0.036071136474609376, 0.04329081726074219, 0.03529264068603516, 0.0352856330871582, 0.03504035186767578, 0.034939327239990235, 0.03558028793334961, 0.035215614318847656, 0.03495004653930664, 0.03496371078491211, 0.0352446403503418, 0.035348575592041014, 0.03513753509521484, 0.03520102310180664, 0.03495964813232422, 0.03469190216064453, 0.03496790313720703, 0.035060287475585934, 0.035175487518310546, 0.03484175872802735, 0.03498688125610352, 0.03479235076904297, 0.0352369270324707, 0.03542931365966797, 0.03488166427612305, 0.03473571014404297, 0.03476044845581055, 0.0349087028503418, 0.03473408126831055, 0.03474198532104492, 0.03753398513793945, 0.03495731353759766, 0.03477913665771484, 0.03479532623291016, 0.034711326599121094, 0.034605472564697266, 0.03467782211303711, 0.03475711822509766, 0.03523583984375, 0.035043327331542966, 0.0353177604675293, 0.03515801620483398, 0.03504537582397461, 0.035001953125, 0.034912094116210934, 0.03495292663574219, 0.03523465728759766, 0.03496918487548828, 0.03473683166503906, 0.03497264099121094, 0.03510095977783203, 0.03479177474975586, 0.03465536117553711, 0.03488240051269531, 0.03492265701293945, 0.03501260757446289, 0.03473408126831055, 0.03474227142333984, 0.0356824951171875, 0.034857887268066406, 0.03497478485107422, 0.034753982543945315, 0.0347259521484375, 0.034912609100341795, 0.03508639907836914, 0.03478214263916016, 0.03517139053344726, 0.034947006225585935, 0.03481315231323242, 0.03477705764770508, 0.0346550407409668, 0.034944671630859375, 0.03484502410888672, 0.0346255989074707, 0.03469686508178711, 0.03479609680175781, 0.034964256286621094, 0.03496166229248047, 0.0351137924194336, 0.03539980697631836, 0.0349666862487793, 0.03522361755371094, 0.03468947219848633, 0.03466473770141602, 0.03469043350219726, 0.03465571212768555, 0.03643695831298828, 0.03580928039550781, 0.03477423858642578, 0.03478607940673828, 0.03462758255004883, 0.034661823272705075, 0.03460563278198242, 0.034805313110351566, 0.03471001434326172, 0.03549177551269531, 0.034977790832519534, 0.03500236892700195, 0.03476483154296875, 0.03478524780273438, 0.03425068664550781, 0.03468489456176758, 0.03462815856933594, 0.03494902420043945, 0.0348304328918457, 0.03494854354858398, 0.034828544616699215, 0.034609470367431644, 0.0346847038269043, 0.03464995193481445, 0.03463520050048828, 0.034589630126953125, 0.034632991790771485, 0.03448704147338867, 0.03473430252075195, 0.03535638427734375, 0.034406688690185545, 0.034528289794921875, 0.03484316635131836, 0.03470764923095703, 0.03459686279296875, 0.03462963104248047, 0.03456409454345703, 0.03531980895996094, 0.03495743942260742, 0.036923263549804686, 0.034731166839599606, 0.03526332855224609, 0.034625537872314455, 0.03458832168579101, 0.03452755355834961, 0.03462790298461914, 0.034777854919433596, 0.034791553497314456, 0.034861438751220705, 0.035178462982177736, 0.034701023101806644, 0.034826751708984374, 0.0348502082824707, 0.03516915130615234, 0.034669921875, 0.03461356735229492, 0.035186622619628904, 0.03507235336303711, 0.03480710220336914, 0.03454140853881836, 0.03453839874267578, 0.03445113754272461, 0.034619712829589845, 0.034797569274902344, 0.03474156951904297, 0.03459347152709961, 0.03536076736450195, 0.03502899169921875, 0.03500032043457031, 0.03455926513671875, 0.03472048187255859, 0.034625537872314455, 0.03541721725463867, 0.03600032043457031, 0.03488729476928711, 0.03476924896240234, 0.03515849685668945, 0.034496097564697265, 0.034777503967285156, 0.03479347229003906, 0.03473625564575195, 0.035675838470458986, 0.03492019271850586, 0.03470175933837891, 0.03465359878540039, 0.03476115036010742, 0.034752670288085936, 0.034680992126464846, 0.03477897644042969, 0.03461759948730469, 0.03485260772705078, 0.03469916915893555, 0.03469321441650391, 0.035198974609375, 0.0349672966003418, 0.034583839416503906, 0.03455209732055664, 0.03457542419433594, 0.0346077766418457, 0.03452617645263672, 0.0344453125, 0.034753982543945315, 0.03516902542114258, 0.035039390563964844, 0.03560208129882812, 0.03489996719360351, 0.0347710075378418, 0.03495481491088867, 0.03492707061767578, 0.034934688568115234, 0.0349306869506836, 0.034598209381103515, 0.034619937896728514, 0.03508563232421875, 0.03453833770751953, 0.034998302459716794, 0.034543582916259766, 0.03462758255004883, 0.034633438110351564, 0.034646305084228515, 0.03468697738647461, 0.034770942687988284, 0.03570211029052734, 0.03810985565185547, 0.034844833374023436, 0.03465760040283203, 0.03626364898681641, 0.03465078353881836, 0.03478953552246094, 0.034599040985107424, 0.03472723388671875, 0.03473823928833008, 0.034824382781982424, 0.03466921615600586, 0.03705846405029297, 0.0348873291015625, 0.034847999572753904, 0.03487353515625, 0.034726558685302736, 0.03477916717529297]",tokens/s,28.631739831961248,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,857.296896,8891.793408,0.0,8489.271296,8353.731072,s,1,19.882775390625,19.882775390625,0.0,19.882775390625,19.882775390625,19.882775390625,19.882775390625,[19.882775390625],,kWh,0.00037010331245422394,4.0818011972862095e-05,0.00011675259340199196,0.000527673917829078,,MB,1330.946048,9671.933952,0.0,9256.828928,8872.305152,s,10,15.396127197265624,1.5396127197265623,0.007153630207981161,1.5420650024414062,1.5466023681640626,1.5469328369140625,1.5471972119140627,"[1.5238726806640626, 1.530095458984375, 1.53588623046875, 1.5414049072265625, 1.54272509765625, 1.5403482666015624, 1.543223876953125, 1.5447784423828126, 1.5472633056640626, 1.5465289306640626]",tokens/s,166.27558133286013,kWh,4.501947981999971e-05,4.9652507064351795e-06,2.99457739565967e-05,7.993050448303158e-05,tokens/kWh,3202782.237591734,MB,1367.986176,9705.488384,0.0,9288.286208,8872.307712,s,10,81.9812939453125,8.19812939453125,0.06302884145701425,8.18798828125,8.2905529296875,8.296851171875,8.301889765624999,"[8.1947236328125, 8.17040380859375, 8.11117041015625, 8.16355419921875, 8.1812529296875, 8.10695947265625, 8.2401181640625, 8.3031494140625, 8.22080859375, 8.2891533203125]",tokens/s,7.684679878561759,kWh,0.0002369066769795851,2.6132625056087262e-05,0.0001551377074434035,0.0004181770094790759,tokens/kWh,150653.90629312512,,s,630,81.9787055969239,0.13012492951892674,0.0018980226179538722,0.12984225463867188,0.13232100830078125,0.13341757888793945,0.13602055755615236,"[0.13548675537109375, 0.1330941162109375, 0.13038546752929686, 0.13520637512207032, 0.1348487091064453, 0.130783935546875, 0.12952584838867187, 0.1293516845703125, 0.12951286315917968, 0.12904917907714844, 0.1298140869140625, 0.1340615692138672, 0.12984194946289063, 0.1322190399169922, 0.12970803833007813, 0.13071495056152344, 0.12884982299804687, 0.12876019287109375, 0.12906332397460937, 0.12941862487792968, 0.13116879272460938, 0.13050070190429688, 0.13053033447265625, 0.13040328979492188, 0.1293885498046875, 0.13017292785644533, 0.1297012176513672, 0.1293687286376953, 0.12911410522460937, 0.12987187194824218, 0.1290425262451172, 0.1305166473388672, 0.12848973083496093, 0.1290484161376953, 0.1283892822265625, 0.13220249938964843, 0.131051513671875, 0.1314693145751953, 0.1310836181640625, 0.13154150390625, 0.1302959747314453, 0.13016685485839843, 0.12934329223632812, 0.12834214782714845, 0.1311272888183594, 0.12999270629882811, 0.1307115478515625, 0.12980242919921875, 0.1296771240234375, 0.12898057556152343, 0.12826870727539064, 0.12754329681396484, 0.1279283218383789, 0.12905882263183593, 0.12953599548339845, 0.12881260681152343, 0.12927392578125, 0.1284872283935547, 0.128252197265625, 0.12769308471679688, 0.12867788696289062, 0.12968754577636718, 0.13004742431640626, 0.1348470153808594, 0.13118397521972655, 0.1311297302246094, 0.13271023559570314, 0.13038847351074218, 0.13170681762695313, 0.136163330078125, 0.13002957153320313, 0.12878643798828124, 0.12949020385742188, 0.13121165466308593, 0.1297571563720703, 0.12897932434082032, 0.12983206176757814, 0.12874163818359374, 0.1295878448486328, 0.12942445373535155, 0.12967213439941405, 0.1316003875732422, 0.12911398315429687, 0.12910169982910155, 0.12804873657226562, 0.13056002807617187, 0.1301829833984375, 0.12917433166503905, 0.12973670959472655, 0.13036749267578124, 0.1296506805419922, 0.12909135437011718, 0.12873750305175782, 0.12812261962890625, 0.12912460327148437, 0.12799600219726562, 0.1302486114501953, 0.12989439392089844, 0.12926156616210938, 0.12894805908203125, 0.13334133911132812, 0.1302302703857422, 0.13040435791015625, 0.12980018615722655, 0.13075045776367186, 0.13038099670410155, 0.1298337860107422, 0.12949888610839844, 0.12852864074707032, 0.1289852752685547, 0.1277683868408203, 0.12723104095458984, 0.12878944396972655, 0.12875369262695313, 0.1283171844482422, 0.13131155395507813, 0.12838327026367188, 0.12743475341796875, 0.1270497283935547, 0.12745523071289064, 0.12935955810546876, 0.12982508850097657, 0.1284505615234375, 0.12860847473144532, 0.12887986755371095, 0.128176513671875, 0.13580393981933594, 0.13144776916503906, 0.1320569305419922, 0.12964662170410157, 0.1289457550048828, 0.1286907196044922, 0.13139353942871093, 0.12991017150878906, 0.13147605895996095, 0.13387161254882812, 0.12899754333496094, 0.12912623596191405, 0.12870588684082032, 0.12917379760742187, 0.1290991668701172, 0.1281402587890625, 0.12788121795654298, 0.12697750091552734, 0.12860470581054687, 0.12707430267333986, 0.12748390197753906, 0.1273089599609375, 0.128314208984375, 0.12860989379882812, 0.12837315368652344, 0.12985881042480468, 0.129446533203125, 0.12916482543945312, 0.1285157165527344, 0.1283976593017578, 0.12829338073730467, 0.12871693420410157, 0.12793446350097656, 0.12765798187255858, 0.12686524963378906, 0.12756527709960938, 0.12718953704833985, 0.12791407775878907, 0.12776863861083984, 0.12971827697753907, 0.12899737548828125, 0.12991693115234376, 0.12975103759765624, 0.1295319061279297, 0.12867315673828125, 0.12981663513183594, 0.12899696350097656, 0.12944834899902344, 0.12770156860351561, 0.12698345947265624, 0.12701974487304687, 0.12749756622314454, 0.12695414733886717, 0.12863385009765624, 0.12758518218994142, 0.12940406799316406, 0.12790060424804686, 0.12731568145751954, 0.12709097290039062, 0.12714393615722655, 0.1266339874267578, 0.12850930786132814, 0.1272828826904297, 0.13577609252929687, 0.1297744903564453, 0.1301565399169922, 0.12939059448242188, 0.1318882598876953, 0.130642822265625, 0.12962406921386718, 0.12881304931640625, 0.12928614807128908, 0.12908572387695313, 0.13072940063476562, 0.13088368225097657, 0.13241094970703124, 0.12923078918457032, 0.12782457733154298, 0.12748185729980469, 0.1272995834350586, 0.12759040069580077, 0.1310392303466797, 0.12861447143554688, 0.129552001953125, 0.12881919860839844, 0.12741180419921874, 0.12862947082519532, 0.12773990631103516, 0.1321943054199219, 0.12946022033691407, 0.13082418823242187, 0.13034701538085938, 0.12954141235351563, 0.1291046142578125, 0.12872499084472655, 0.12889219665527343, 0.12962069702148438, 0.12825762939453125, 0.129208740234375, 0.12919398498535156, 0.12824166870117187, 0.12888064575195313, 0.12850790405273438, 0.1282720947265625, 0.12827020263671876, 0.13374505615234375, 0.13103913879394533, 0.13114320373535157, 0.1297782440185547, 0.12921002197265624, 0.12940322875976562, 0.129931396484375, 0.12932659912109376, 0.1291433868408203, 0.13049014282226562, 0.13120643615722657, 0.131070556640625, 0.1309509735107422, 0.12972682189941406, 0.12956259155273436, 0.1290301513671875, 0.12859762573242187, 0.1286429748535156, 0.1281847381591797, 0.1280635528564453, 0.1278238754272461, 0.13492201232910156, 0.12995698547363282, 0.12965890502929686, 0.12917350769042968, 0.128093017578125, 0.12809829711914061, 0.13041664123535157, 0.13180464172363282, 0.12913104248046875, 0.13039590454101563, 0.12869247436523437, 0.1301944580078125, 0.12863587951660158, 0.1321943054199219, 0.13586773681640624, 0.12943417358398437, 0.12871597290039063, 0.12865164184570313, 0.12862313842773437, 0.1279836196899414, 0.1280307159423828, 0.1276723175048828, 0.12841165161132811, 0.12881910705566407, 0.12885539245605468, 0.1301143035888672, 0.1282150421142578, 0.12786598205566407, 0.12775308990478515, 0.12878028869628907, 0.13026255798339845, 0.12927804565429687, 0.12910838317871093, 0.12948466491699218, 0.12946237182617187, 0.12938777160644532, 0.12946675109863282, 0.12994802856445312, 0.13030400085449217, 0.12970803833007813, 0.13002137756347656, 0.12980003356933595, 0.12976092529296876, 0.12995826721191406, 0.13258157348632812, 0.13065589904785158, 0.13063401794433593, 0.130082275390625, 0.13009359741210938, 0.13004733276367186, 0.130468505859375, 0.12994764709472656, 0.13020506286621095, 0.13073207092285155, 0.1300015106201172, 0.13071974182128906, 0.13174981689453125, 0.13022758483886718, 0.1292172088623047, 0.1327406005859375, 0.1320616912841797, 0.1293824005126953, 0.1283440704345703, 0.1379567413330078, 0.13006903076171875, 0.12920419311523437, 0.12927743530273436, 0.12868269348144531, 0.1285867462158203, 0.12868319702148437, 0.12763033294677734, 0.12767314910888672, 0.13118070983886718, 0.12939657592773438, 0.12801747131347657, 0.12788412475585936, 0.12988131713867188, 0.12932591247558595, 0.1302235565185547, 0.12915158081054687, 0.132347900390625, 0.12927590942382813, 0.12898057556152343, 0.12852015686035156, 0.1283527069091797, 0.1282347869873047, 0.1280064697265625, 0.1283321990966797, 0.12832972717285157, 0.12722716522216798, 0.12822802734375, 0.1267630386352539, 0.12726409912109374, 0.12802525329589845, 0.12793405151367188, 0.13074671936035157, 0.1292935028076172, 0.1282361602783203, 0.1281292724609375, 0.12787439727783204, 0.12733328247070314, 0.12825779724121095, 0.12761055755615233, 0.12975955200195313, 0.1280696258544922, 0.12712754821777345, 0.12720448303222656, 0.12661222076416015, 0.12710486602783203, 0.1278896942138672, 0.131112548828125, 0.13031056213378905, 0.12937216186523437, 0.12942745971679687, 0.1287977294921875, 0.12879312133789061, 0.1283892822265625, 0.12873043823242186, 0.1290413818359375, 0.12873893737792969, 0.12810646057128905, 0.12740444946289062, 0.12687155151367188, 0.12725865936279296, 0.12679926300048827, 0.1276563491821289, 0.13532322692871093, 0.13003826904296875, 0.12859596252441408, 0.12974800109863283, 0.1288527069091797, 0.1286719970703125, 0.1279238739013672, 0.12814691162109376, 0.1293402862548828, 0.12946022033691407, 0.12880210876464843, 0.13096333312988281, 0.13025363159179687, 0.13068284606933595, 0.13029705810546874, 0.1293135986328125, 0.12874960327148438, 0.12938441467285156, 0.13952000427246095, 0.13199932861328126, 0.1293336639404297, 0.1283431396484375, 0.12837171936035155, 0.12776844787597658, 0.12768873596191407, 0.12994764709472656, 0.12833177185058595, 0.1320482177734375, 0.13257589721679688, 0.13165286254882813, 0.1316441650390625, 0.13056935119628907, 0.12996493530273437, 0.12974490356445312, 0.13232643127441407, 0.13257778930664063, 0.13263650512695313, 0.1321246795654297, 0.13096829223632814, 0.13158799743652344, 0.13141737365722655, 0.1304317169189453, 0.13059811401367188, 0.13189199829101564, 0.13177151489257813, 0.1313391418457031, 0.13038914489746095, 0.13016102600097657, 0.12942518615722656, 0.1339288330078125, 0.1329344940185547, 0.13459408569335937, 0.13214938354492187, 0.13113151550292967, 0.1313805694580078, 0.13125885009765625, 0.13105599975585938, 0.13136659240722656, 0.1311865234375, 0.13192623901367187, 0.13177606201171874, 0.13105836486816405, 0.13041789245605467, 0.1360829772949219, 0.13232099914550782, 0.13103170776367187, 0.1310905303955078, 0.12985139465332032, 0.12960905456542968, 0.13087715148925783, 0.1312142791748047, 0.1321492462158203, 0.13107743835449218, 0.13059494018554688, 0.1301468505859375, 0.12910214233398437, 0.1345635528564453, 0.13266265869140625, 0.13329676818847655, 0.13120512390136718, 0.13172735595703125, 0.13110064697265625, 0.1341010284423828, 0.13086451721191406, 0.13905552673339844, 0.13224163818359375, 0.13287014770507813, 0.13217088317871092, 0.13212969970703126, 0.13196476745605468, 0.13195706176757813, 0.1323210906982422, 0.13232234191894532, 0.13172787475585937, 0.13386419677734376, 0.13178851318359375, 0.13237965393066406, 0.13176730346679688, 0.13192601013183594, 0.13208781433105468, 0.13196287536621093, 0.1324277801513672, 0.13155311584472656, 0.13177667236328125, 0.13196697998046875, 0.1310392303466797, 0.13185638427734375, 0.131689697265625, 0.1325289306640625, 0.13284352111816405, 0.13197894287109374, 0.13184442138671876, 0.13043093872070313, 0.13328694152832032, 0.1320396728515625, 0.1324582977294922, 0.13106985473632812, 0.13195033264160155, 0.1314207000732422, 0.13095631408691405, 0.13033305358886718, 0.128999267578125, 0.12971495056152343, 0.12910386657714842, 0.12995928955078126, 0.13044184875488282, 0.1362623748779297, 0.13155532836914063, 0.1314017333984375, 0.1307991943359375, 0.13090066528320313, 0.1297622985839844, 0.1287359619140625, 0.12884553527832032, 0.12895289611816407, 0.12954595947265626, 0.12955020141601561, 0.13021372985839844, 0.12990467834472658, 0.1291297607421875, 0.1328954620361328, 0.13184999084472657, 0.12995616149902345, 0.12991030883789062, 0.13040296936035156, 0.13078848266601562, 0.13340147399902344, 0.13041664123535157, 0.13025059509277342, 0.13363011169433595, 0.13099427795410157, 0.13022979736328125, 0.12962034606933595, 0.1303203887939453, 0.12925704956054687, 0.12817449951171875, 0.12900965881347656, 0.12876800537109376, 0.12922880554199218, 0.12985958862304686, 0.12887376403808593, 0.1309703369140625, 0.12935328674316407, 0.13159468078613282, 0.13129933166503907, 0.13164749145507812, 0.13082748413085937, 0.13045225524902343, 0.1314508819580078, 0.13113754272460937, 0.13031948852539063, 0.12960969543457032, 0.12927456665039064, 0.13002569580078124, 0.12965887451171876, 0.12925747680664063, 0.12922026062011718, 0.13109698486328125, 0.13159007263183595, 0.13161062622070313, 0.1316741180419922, 0.13191885375976561, 0.13172224426269533, 0.13074021911621095, 0.13072589111328126, 0.13087129211425783, 0.13083570861816407, 0.12998495483398437, 0.1282890625, 0.13749200439453124, 0.13096406555175782, 0.12988185119628906, 0.13041932678222656, 0.1298206787109375, 0.13117759704589843, 0.13019197082519532, 0.13043331909179687, 0.13039820861816406, 0.13066035461425782, 0.12918496704101562, 0.12991775512695314, 0.12953135681152345, 0.1308934326171875, 0.13089065551757811, 0.13150531005859376, 0.13346493530273437, 0.13100044250488282, 0.13034451293945312, 0.12994309997558592, 0.13142501831054687, 0.13098941040039064, 0.1356249542236328, 0.13094749450683593, 0.13343075561523438, 0.1306219787597656, 0.1293148193359375, 0.12956378173828126, 0.12940284729003906, 0.13095516967773438, 0.13416070556640625, 0.13188739013671874, 0.13245481872558593, 0.13187184143066405, 0.13209078979492186, 0.13174169921875, 0.13386137390136718, 0.13205007934570312, 0.1321021728515625, 0.13255763244628907, 0.1320766143798828, 0.13243629455566405, 0.13171366882324217, 0.132421630859375, 0.13216886901855468, 0.1317056884765625, 0.13158406066894532, 0.13471685791015625, 0.13193788146972657, 0.13241436767578124, 0.1315770263671875, 0.13325804138183595, 0.1319710693359375, 0.13374053955078125, 0.13201802062988283, 0.13151475524902342, 0.1298425598144531, 0.13112118530273437, 0.13127626037597656, 0.13150851440429687, 0.13120361328125, 0.13012387084960939, 0.13135836791992186]",tokens/s,7.684922510213921,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1231.44192,8448.311296,0.0,8053.06368,7930.605568,s,1,20.147826171875,20.147826171875,0.0,20.147826171875,20.147826171875,20.147826171875,20.147826171875,[20.147826171875],,kWh,0.0003734974021333187,4.1192099596527255e-05,0.00013842372184999507,0.0005531132235798411,,MB,1280.770048,10214.11328,0.0,9806.282752,9135.716352,s,10,18.62685852050781,1.8626858520507814,0.008198181861214697,1.8660625,1.8697125122070313,1.8699464660644531,1.8701336291503907,"[1.8432120361328126, 1.8550084228515624, 1.8570380859375, 1.864203125, 1.8626883544921875, 1.867921875, 1.8689832763671874, 1.86796240234375, 1.870180419921875, 1.8696605224609375]",tokens/s,137.43595019962646,kWh,5.421532625541544e-05,5.979594668753173e-06,3.600202880159956e-05,9.619694972576816e-05,tokens/kWh,2661207.0416971403,MB,1302.986752,10214.11328,0.0,9806.282752,9135.718912,s,10,94.5289873046875,9.45289873046875,0.03300749615594201,9.455719238281251,9.49055146484375,9.493977880859376,9.496719013671875,"[9.4092529296875, 9.4173203125, 9.40789453125, 9.438798828125, 9.4357744140625, 9.4804248046875, 9.4726396484375, 9.4796875, 9.4897900390625, 9.497404296875]",tokens/s,6.664622333987065,kWh,0.00027711843717833517,3.056830458298787e-05,0.00018413728619859972,0.0004918240279599228,tokens/kWh,128094.5956652888,,s,630,94.52455953979486,0.15003898339649988,0.001875406770582973,0.1499233856201172,0.15152191619873048,0.15219839096069337,0.15889560440063477,"[0.15854421997070312, 0.14693299865722656, 0.14741305541992186, 0.14709324645996094, 0.14787271118164064, 0.14706072998046876, 0.15467926025390624, 0.15004266357421875, 0.1489264678955078, 0.14833622741699218, 0.1476426544189453, 0.1476343078613281, 0.1486991424560547, 0.15006460571289063, 0.14923216247558593, 0.14806956481933595, 0.1482065887451172, 0.1475479736328125, 0.14836872863769532, 0.1491155548095703, 0.1513369598388672, 0.14916986083984374, 0.14808265686035157, 0.1494879608154297, 0.14759117126464844, 0.1497845764160156, 0.14995436096191406, 0.14902259826660155, 0.14852085876464843, 0.14935606384277345, 0.14895126342773438, 0.1481359405517578, 0.15096038818359375, 0.1497481231689453, 0.1502617645263672, 0.14804173278808594, 0.14938316345214844, 0.148231201171875, 0.14990182495117188, 0.1498867492675781, 0.1490885772705078, 0.14964553833007813, 0.1488426513671875, 0.14895907592773439, 0.14939053344726563, 0.15035423278808593, 0.15032386779785156, 0.14997885131835936, 0.14938922119140624, 0.14937464904785155, 0.14891484069824218, 0.14893670654296876, 0.15039900207519533, 0.1494896697998047, 0.15020442199707032, 0.15000941467285156, 0.14883680725097656, 0.1492229461669922, 0.1498505859375, 0.15049728393554687, 0.14987673950195313, 0.15002418518066407, 0.14994610595703126, 0.1584496612548828, 0.1466931915283203, 0.14688351440429687, 0.1472368621826172, 0.147654052734375, 0.14796246337890626, 0.15514755249023438, 0.14969091796875, 0.14807420349121095, 0.14830230712890624, 0.14861024475097656, 0.14789482116699218, 0.15093174743652343, 0.15089584350585938, 0.1492109375, 0.1481815948486328, 0.14759916687011718, 0.14816464233398438, 0.14881808471679686, 0.15072653198242186, 0.14980531311035156, 0.14910861206054687, 0.1488407745361328, 0.14732293701171875, 0.14835098266601562, 0.14897695922851562, 0.15124755859375, 0.15087615966796875, 0.1487477722167969, 0.14819378662109375, 0.15014093017578126, 0.14895893859863282, 0.14878749084472656, 0.15112602233886718, 0.14977996826171874, 0.1495716552734375, 0.14782643127441406, 0.14939360046386718, 0.14954652404785157, 0.14861529541015625, 0.150591552734375, 0.14975669860839844, 0.14852301025390624, 0.1493053436279297, 0.14988870239257812, 0.14947975158691407, 0.15011750793457032, 0.14948031616210938, 0.15034066772460938, 0.14879750061035157, 0.14923405456542968, 0.1499632568359375, 0.14945074462890626, 0.14956544494628907, 0.15033273315429688, 0.14984877014160156, 0.14942413330078125, 0.1496494140625, 0.1498419189453125, 0.15033139038085938, 0.15029656982421874, 0.14947943115234374, 0.15083724975585938, 0.15883468627929687, 0.14712348937988282, 0.1473582763671875, 0.14713871765136718, 0.1475625, 0.1480203857421875, 0.15394288635253905, 0.14968377685546874, 0.14885113525390625, 0.1478082275390625, 0.14778781127929688, 0.14760345458984375, 0.15012185668945313, 0.15080624389648437, 0.14987767028808593, 0.1475392608642578, 0.14777413940429687, 0.14762979125976564, 0.14804960632324218, 0.15000172424316408, 0.14989506530761718, 0.1495926971435547, 0.14783692932128906, 0.14928810119628907, 0.14761660766601561, 0.14889727783203124, 0.15015887451171875, 0.1493268127441406, 0.14894601440429686, 0.14766581726074218, 0.14871113586425783, 0.14815875244140625, 0.14938316345214844, 0.14897113037109375, 0.14930776977539062, 0.1483644104003906, 0.14922831726074218, 0.14824024963378907, 0.14879142761230468, 0.14986656188964845, 0.14854147338867188, 0.14899366760253907, 0.148991455078125, 0.14974864196777343, 0.1484962921142578, 0.14908364868164062, 0.1510467529296875, 0.15059353637695314, 0.15056486511230469, 0.14870527648925783, 0.14996470642089843, 0.14984725952148437, 0.15076797485351562, 0.14946357727050782, 0.14999705505371094, 0.14979327392578126, 0.149749755859375, 0.14939340209960938, 0.14963302612304688, 0.1512626190185547, 0.15061871337890625, 0.15026789855957032, 0.1501511688232422, 0.16048147583007813, 0.1478137969970703, 0.14684364318847656, 0.14750076293945313, 0.1481182098388672, 0.147625244140625, 0.15558543395996094, 0.15031295776367187, 0.15010202026367186, 0.1473228759765625, 0.14809840393066406, 0.14891690063476562, 0.15066709899902345, 0.1523625030517578, 0.1495528564453125, 0.15025657653808594, 0.14717951965332032, 0.1477015380859375, 0.14854576110839843, 0.15081053161621094, 0.14991574096679688, 0.14980709838867187, 0.14936863708496093, 0.14869113159179687, 0.14843289184570313, 0.1505497589111328, 0.1508167724609375, 0.14928973388671876, 0.14846726989746092, 0.14798419189453124, 0.14760362243652345, 0.14824208068847655, 0.1499185333251953, 0.15192268371582032, 0.14972518920898437, 0.15008335876464843, 0.1490160675048828, 0.1493173828125, 0.150193115234375, 0.14994003295898437, 0.1505341491699219, 0.15095826721191405, 0.1500897216796875, 0.14980300903320312, 0.14960179138183594, 0.14882662963867188, 0.15093875122070313, 0.15183961486816405, 0.14995864868164063, 0.14967808532714844, 0.1492111358642578, 0.1504192352294922, 0.15062448120117186, 0.15064678955078126, 0.14975999450683594, 0.14992723083496093, 0.15009767150878905, 0.15005177307128906, 0.14919270324707032, 0.15051065063476562, 0.15042655944824218, 0.14982322692871095, 0.1503541717529297, 0.15892048645019533, 0.14766339111328125, 0.147649658203125, 0.14745455932617188, 0.14792066955566407, 0.14772685241699218, 0.1545789489746094, 0.14918861389160157, 0.14881587219238282, 0.14828239440917967, 0.147767578125, 0.14780812072753907, 0.15152621459960938, 0.1516663360595703, 0.14877462768554686, 0.14945458984375, 0.14783168029785157, 0.14850457763671876, 0.1491631317138672, 0.15030157470703126, 0.1511710662841797, 0.14916403198242187, 0.14796131896972656, 0.14798233032226563, 0.14890652465820312, 0.1506727294921875, 0.15133334350585936, 0.15100947570800782, 0.14982733154296876, 0.14889369201660158, 0.14841587829589845, 0.1503773498535156, 0.15023670959472657, 0.150972900390625, 0.15039488220214844, 0.14894639587402345, 0.1489188232421875, 0.14809703063964844, 0.15040476989746093, 0.14948902893066407, 0.15060887145996094, 0.15013069152832031, 0.15026380920410157, 0.14938038635253906, 0.14961328125, 0.15043942260742188, 0.1501641845703125, 0.15000349426269532, 0.14944050598144532, 0.1500282897949219, 0.14859805297851564, 0.1498075866699219, 0.14956144714355468, 0.14955126953125, 0.1506856384277344, 0.1492419128417969, 0.1497716827392578, 0.14878781127929688, 0.15277027893066406, 0.1512610626220703, 0.15175926208496093, 0.15017327880859374, 0.149114501953125, 0.16288624572753907, 0.1477181396484375, 0.14761546325683594, 0.14845980834960937, 0.14872508239746093, 0.15000437927246094, 0.15573606872558593, 0.15141606140136718, 0.14867266845703125, 0.14755081176757812, 0.14773043823242188, 0.149042724609375, 0.15223001098632813, 0.15227264404296875, 0.14947142028808594, 0.148680419921875, 0.14868450927734375, 0.14776419067382812, 0.1512816619873047, 0.15114854431152344, 0.15097637939453126, 0.15034303283691405, 0.14924822998046874, 0.1480994873046875, 0.15076710510253907, 0.1502828826904297, 0.1508658905029297, 0.15182847595214843, 0.14930908203125, 0.14926681518554688, 0.14834402465820312, 0.15024003601074218, 0.15103999328613282, 0.15138316345214844, 0.14999005126953124, 0.14873008728027343, 0.14909849548339843, 0.1499279327392578, 0.1502471923828125, 0.15143548583984376, 0.15081446838378906, 0.14958003234863282, 0.1488170623779297, 0.14954173278808594, 0.15076069641113282, 0.15150355529785156, 0.15223603820800782, 0.15231800842285156, 0.15049728393554687, 0.15057510375976563, 0.1513492431640625, 0.1512489013671875, 0.15151922607421875, 0.1519964141845703, 0.15107667541503905, 0.14943251037597657, 0.15107891845703125, 0.15040835571289063, 0.15121405029296875, 0.15189286804199217, 0.15186067199707032, 0.1505996551513672, 0.151111328125, 0.16059103393554688, 0.14852359008789062, 0.14810488891601561, 0.1477718048095703, 0.1496065673828125, 0.14961048889160156, 0.15542066955566405, 0.14933811950683593, 0.14978770446777342, 0.1478082275390625, 0.14786204528808594, 0.14902473449707032, 0.151837158203125, 0.15117721557617186, 0.15020236206054688, 0.1484206085205078, 0.14789018249511718, 0.14916371154785157, 0.1492541046142578, 0.150806884765625, 0.15053184509277343, 0.150144775390625, 0.14877850341796875, 0.15022563171386719, 0.15060202026367187, 0.15170970153808594, 0.152453125, 0.15130604553222657, 0.14972128295898438, 0.14921302795410157, 0.150010009765625, 0.15065603637695313, 0.15075141906738282, 0.15193574523925782, 0.1505526123046875, 0.1509005126953125, 0.14931088256835937, 0.15039776611328126, 0.15007334899902344, 0.15094374084472656, 0.1516062774658203, 0.15031398010253907, 0.1499279327392578, 0.1499495086669922, 0.14997190856933593, 0.15113731384277343, 0.15154275512695312, 0.15045826721191408, 0.14896514892578125, 0.14913363647460937, 0.1492084503173828, 0.15082351684570314, 0.151297607421875, 0.1510236511230469, 0.15065907287597657, 0.15087251281738281, 0.1492991943359375, 0.14947065734863282, 0.15091722106933594, 0.15070870971679687, 0.15082701110839844, 0.15091670227050782, 0.15072662353515626, 0.15959231567382812, 0.14886105346679687, 0.14778778076171875, 0.14983538818359374, 0.1491922607421875, 0.14987551879882813, 0.1556634216308594, 0.1501907501220703, 0.1481136932373047, 0.14931695556640626, 0.14812838745117188, 0.14816668701171876, 0.15233842468261719, 0.15146563720703124, 0.14887699890136719, 0.14908892822265624, 0.1483120574951172, 0.14859878540039062, 0.15039077758789063, 0.15120335388183595, 0.15097850036621094, 0.14946754455566405, 0.14944790649414064, 0.14820649719238282, 0.1496678466796875, 0.1519964141845703, 0.15047599792480468, 0.15099481201171874, 0.15066986083984374, 0.15047103881835938, 0.15035968017578125, 0.1511182098388672, 0.15068130493164061, 0.1521093444824219, 0.15045222473144532, 0.14927462768554686, 0.1498862762451172, 0.15068006896972655, 0.15019847106933593, 0.15072419738769532, 0.15124111938476562, 0.15026789855957032, 0.15040614318847656, 0.1500877685546875, 0.15090985107421875, 0.15075328063964843, 0.15084748840332032, 0.15059666442871095, 0.14992041015625, 0.1497638397216797, 0.15109788513183595, 0.1503354949951172, 0.15112594604492188, 0.15078611755371094, 0.15057664489746095, 0.15072102355957032, 0.15090428161621094, 0.1504150390625, 0.15069987487792968, 0.1518204803466797, 0.1514442901611328, 0.15071408081054688, 0.15095794677734375, 0.1594798126220703, 0.14768946838378907, 0.1474086151123047, 0.14796194458007814, 0.14871980285644532, 0.1511751708984375, 0.1552052459716797, 0.14981773376464844, 0.14821128845214843, 0.14820323181152345, 0.14916677856445312, 0.14896742248535155, 0.15080825805664064, 0.15189616394042968, 0.14882736206054686, 0.14892665100097657, 0.14898463439941406, 0.15083059692382814, 0.15108352661132812, 0.15213772583007812, 0.1524715576171875, 0.15017575073242187, 0.14958181762695313, 0.15004057312011718, 0.15037164306640624, 0.1508236083984375, 0.1513135986328125, 0.15114118957519532, 0.14954229736328126, 0.149695068359375, 0.14996070861816407, 0.15055258178710937, 0.15043174743652343, 0.1514352569580078, 0.15094989013671875, 0.15007318115234375, 0.14993629455566407, 0.15067861938476562, 0.15130438232421875, 0.1506393280029297, 0.15247769165039063, 0.1504047088623047, 0.1506246795654297, 0.15124678039550782, 0.15129379272460938, 0.14995639038085937, 0.15152143859863282, 0.15201309204101562, 0.150329345703125, 0.15109939575195314, 0.14998074340820314, 0.15117327880859374, 0.15051394653320313, 0.15118861389160157, 0.15060012817382812, 0.15107868957519532, 0.15096450805664063, 0.1512484130859375, 0.1513419189453125, 0.1513861083984375, 0.150540283203125, 0.15215974426269532, 0.14958642578125, 0.16037628173828125, 0.14842933654785156, 0.14838374328613282, 0.1491242218017578, 0.14892326354980467, 0.1501709747314453, 0.1543555908203125, 0.1496727294921875, 0.14922137451171874, 0.14989312744140626, 0.1485779571533203, 0.150301025390625, 0.15237939453125, 0.15199392700195313, 0.14958956909179688, 0.14875474548339843, 0.14964384460449218, 0.1494644775390625, 0.15013743591308593, 0.1515335693359375, 0.15089596557617188, 0.14982826232910157, 0.14886679077148438, 0.14936093139648438, 0.14969651794433594, 0.15076870727539063, 0.15124166870117187, 0.14939546203613283, 0.1491958465576172, 0.14992636108398438, 0.1488585968017578, 0.1508871612548828, 0.1511299591064453, 0.1514286346435547, 0.14960263061523438, 0.1506829376220703, 0.14990847778320313, 0.15144514465332032, 0.1513882293701172, 0.15225180053710938, 0.1513173828125, 0.1515478973388672, 0.1519615936279297, 0.15199757385253906, 0.1519764862060547, 0.1515175323486328, 0.1520762939453125, 0.1513038787841797, 0.15061383056640626, 0.1510794219970703, 0.15177317810058594, 0.15131852722167968, 0.15134883117675782, 0.15165817260742187, 0.15170751953125, 0.14908502197265625, 0.15149215698242188, 0.15132716369628907, 0.15106185913085937, 0.15040988159179688, 0.1508311004638672, 0.15138406372070312, 0.15049462890625]",tokens/s,6.664934521432702,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,836.52608,555.614208,0.0,153.092096,140.32384,s,1,8.1435224609375,8.1435224609375,0.0,8.1435224609375,8.1435224609375,8.1435224609375,8.1435224609375,[8.1435224609375],,kWh,2.255117459584047e-05,2.480476665455659e-06,6.169171601982937e-06,3.120082286327906e-05,,MB,1287.671808,670.957568,0.0,255.852544,216.246784,s,10,0.22732559967041013,0.022732559967041013,0.0001311785683490533,0.022712575912475587,0.02285100517272949,0.02293278293609619,0.02299820514678955,"[0.02257004737854004, 0.02301456069946289, 0.022745471954345703, 0.022604543685913085, 0.022585792541503905, 0.02281340789794922, 0.02267420768737793, 0.022805055618286132, 0.02283283233642578, 0.022679679870605467]",tokens/s,11261.380168848722,kWh,6.57824053068593e-07,7.254525029560947e-08,4.3281628719365477e-07,1.1631855905578573e-06,tokens/kWh,220085257.31239828,MB,1320.579072,691.929088,0.0,276.824064,216.249344,s,10,10.612690307617187,1.0612690307617187,0.0051073585866504324,1.05996728515625,1.0682578979492188,1.0700089538574218,1.0714097985839843,"[1.071760009765625, 1.056057861328125, 1.05815283203125, 1.0607420654296875, 1.061920166015625, 1.0678687744140625, 1.0585333251953124, 1.0642095947265624, 1.054253173828125, 1.0591925048828126]",tokens/s,59.362893077905206,kWh,3.0701486169845053e-05,3.3854067172364966e-06,1.079694269660864e-05,4.488383558369018e-05,tokens/kWh,1403623.3575120938,,s,630,10.607889820098878,0.016837920349363295,0.00039163108499207656,0.01674025535583496,0.01710268211364746,0.017293227672576905,0.017834338531494145,"[0.01647964859008789, 0.016748512268066406, 0.016792320251464845, 0.016813440322875975, 0.017192895889282227, 0.017064191818237304, 0.016949535369873047, 0.01694927978515625, 0.01701478385925293, 0.016921920776367186, 0.016990911483764647, 0.016919807434082033, 0.016933631896972657, 0.017865760803222656, 0.019956159591674804, 0.01702112007141113, 0.016929119110107423, 0.016783615112304688, 0.016832256317138673, 0.01683564758300781, 0.016751712799072265, 0.016762720108032227, 0.01693270492553711, 0.01710246467590332, 0.017158048629760742, 0.01705228805541992, 0.01718272018432617, 0.017410175323486328, 0.017297279357910155, 0.01724415969848633, 0.017246463775634765, 0.017104639053344726, 0.01701193618774414, 0.017318815231323243, 0.017332096099853516, 0.0172258243560791, 0.01723814392089844, 0.017095808029174805, 0.016816160202026368, 0.016780160903930665, 0.016780927658081056, 0.016709760665893556, 0.016774784088134764, 0.0167589111328125, 0.01674060821533203, 0.016813728332519533, 0.0167380485534668, 0.01686822319030762, 0.016772127151489256, 0.01682703971862793, 0.016851232528686522, 0.016823104858398438, 0.016928895950317383, 0.017000415802001952, 0.016867456436157228, 0.01687855911254883, 0.01690959930419922, 0.01697225570678711, 0.016855039596557618, 0.01681161689758301, 0.016851360321044923, 0.01680899238586426, 0.01688470458984375, 0.016587711334228514, 0.016831167221069337, 0.017118688583374023, 0.01684556770324707, 0.016680959701538087, 0.016797727584838867, 0.01689743995666504, 0.016730720520019532, 0.016873472213745116, 0.016803552627563476, 0.01689833641052246, 0.01704140853881836, 0.016660480499267577, 0.016726015090942382, 0.016842464447021484, 0.016704992294311525, 0.016841791152954103, 0.017217279434204102, 0.016817472457885743, 0.016700096130371093, 0.01671513557434082, 0.016733919143676758, 0.01675587272644043, 0.017097856521606444, 0.016699071884155273, 0.01675155258178711, 0.016713504791259767, 0.016639583587646483, 0.01680240058898926, 0.016615007400512697, 0.016679359436035157, 0.016705535888671876, 0.01676892852783203, 0.01663190460205078, 0.01670140838623047, 0.016631839752197265, 0.016578624725341797, 0.016617408752441408, 0.01692572784423828, 0.01689481544494629, 0.016674943923950195, 0.016731327056884765, 0.016876352310180663, 0.016681087493896483, 0.016674400329589844, 0.016678592681884766, 0.016638559341430666, 0.016658079147338866, 0.016725984573364258, 0.016656959533691406, 0.016807743072509766, 0.016734207153320312, 0.016715103149414063, 0.01686137580871582, 0.016722400665283202, 0.016697376251220704, 0.01670140838623047, 0.01663385581970215, 0.01665772819519043, 0.016655040740966798, 0.016688159942626953, 0.016714080810546875, 0.01671664047241211, 0.016723039627075196, 0.016983999252319335, 0.01688265609741211, 0.016778495788574217, 0.01698908805847168, 0.017135103225708007, 0.017353055953979492, 0.01726464080810547, 0.017120607376098634, 0.01694713592529297, 0.016855775833129884, 0.016959487915039064, 0.016945152282714843, 0.01692416000366211, 0.016871936798095705, 0.016736255645751954, 0.016824447631835937, 0.017049472808837892, 0.017182432174682617, 0.01726025581359863, 0.017089088439941405, 0.01687055969238281, 0.016734495162963867, 0.016792224884033202, 0.0166582088470459, 0.016695455551147462, 0.01662886428833008, 0.0166759033203125, 0.016654367446899413, 0.016647327423095704, 0.016630399703979493, 0.016691167831420897, 0.01664352035522461, 0.016660383224487305, 0.016627424240112303, 0.016620479583740234, 0.016627391815185546, 0.016879615783691407, 0.01666899108886719, 0.016639999389648438, 0.0166910400390625, 0.016587232589721678, 0.01663155174255371, 0.016678592681884766, 0.016730367660522463, 0.01661359977722168, 0.016813184738159178, 0.01699843215942383, 0.01680624008178711, 0.01665046310424805, 0.016707231521606445, 0.016697759628295897, 0.016706911087036133, 0.016650911331176757, 0.016704896926879882, 0.0166396484375, 0.016648607254028322, 0.016621824264526366, 0.01664396858215332, 0.01658310317993164, 0.016723968505859374, 0.016615423202514648, 0.01660927963256836, 0.016492544174194337, 0.016676607131958007, 0.016726015090942382, 0.01664204788208008, 0.01663795280456543, 0.016665727615356445, 0.01671571159362793, 0.01670444869995117, 0.016731744766235353, 0.016780927658081056, 0.01678620719909668, 0.016858591079711913, 0.01704604721069336, 0.017299104690551757, 0.017426271438598633, 0.017060352325439454, 0.01694054412841797, 0.0169169921875, 0.0174370231628418, 0.017579679489135743, 0.01761484718322754, 0.017215456008911133, 0.01696499252319336, 0.01678607940673828, 0.016752864837646483, 0.016688064575195314, 0.016672895431518554, 0.016602079391479493, 0.016676607131958007, 0.01701478385925293, 0.01680335998535156, 0.01669183921813965, 0.016703327178955077, 0.016855039596557618, 0.01668115234375, 0.016760639190673828, 0.01668739128112793, 0.016688863754272462, 0.016687263488769533, 0.016887487411499022, 0.016688543319702147, 0.016751359939575196, 0.01676620864868164, 0.0167935676574707, 0.017062463760375977, 0.01692083168029785, 0.016749792098999024, 0.016853759765625, 0.01683046340942383, 0.016914432525634765, 0.01680588722229004, 0.016654144287109374, 0.016682464599609374, 0.016665088653564454, 0.01668118476867676, 0.016666431427001954, 0.016722015380859375, 0.016814367294311523, 0.016664384841918945, 0.016732383728027343, 0.016699167251586915, 0.01678335952758789, 0.016797760009765624, 0.01638675117492676, 0.016698720932006837, 0.016718143463134764, 0.016685279846191406, 0.016754816055297852, 0.016726015090942382, 0.0166748161315918, 0.01672547149658203, 0.016765535354614256, 0.016706815719604494, 0.016652896881103517, 0.016711776733398437, 0.016648063659667967, 0.01680396842956543, 0.01665433692932129, 0.016672544479370117, 0.016939519882202148, 0.016720800399780272, 0.016672832489013672, 0.01704319953918457, 0.016929344177246095, 0.017109439849853515, 0.01720902442932129, 0.017127904891967773, 0.016764768600463865, 0.017130847930908202, 0.01727952003479004, 0.017309823989868165, 0.017445152282714843, 0.017047168731689454, 0.016773311614990235, 0.016947103500366212, 0.016748607635498045, 0.016807743072509766, 0.0167957763671875, 0.016670719146728515, 0.016700672149658202, 0.01669811248779297, 0.016705535888671876, 0.016786880493164062, 0.016678592681884766, 0.01678220748901367, 0.0167589111328125, 0.016874528884887694, 0.016933536529541014, 0.01679977607727051, 0.016683168411254883, 0.016653888702392577, 0.016689599990844725, 0.016678176879882812, 0.01666441535949707, 0.01687641525268555, 0.01709823989868164, 0.016683040618896486, 0.016631967544555665, 0.016781440734863283, 0.016649791717529297, 0.01663670349121094, 0.016684896469116212, 0.017641471862792968, 0.01813692855834961, 0.017266847610473632, 0.01679052734375, 0.01644553565979004, 0.01675859260559082, 0.016808063507080077, 0.01689993667602539, 0.016750591278076172, 0.016746015548706056, 0.016710111618041992, 0.016953344345092772, 0.01668828773498535, 0.01769772720336914, 0.016689056396484374, 0.016770431518554688, 0.016643999099731445, 0.016620479583740234, 0.016723648071289062, 0.01668511962890625, 0.016709503173828126, 0.016695455551147462, 0.016654048919677734, 0.016986400604248046, 0.016656383514404297, 0.01663148880004883, 0.01692608070373535, 0.016649152755737303, 0.016702560424804686, 0.016634784698486327, 0.01670947265625, 0.016611648559570313, 0.01665542411804199, 0.016607168197631837, 0.016598175048828125, 0.016748064041137694, 0.016669023513793946, 0.01663983917236328, 0.01666864013671875, 0.01670457649230957, 0.01680508804321289, 0.01692860794067383, 0.01708576011657715, 0.017170528411865234, 0.016903999328613282, 0.016747455596923828, 0.01674403190612793, 0.01805708885192871, 0.0220849609375, 0.02168057632446289, 0.017371456146240236, 0.016906208038330078, 0.01673414421081543, 0.01672412872314453, 0.016682336807250977, 0.01679641532897949, 0.016660480499267577, 0.016604991912841798, 0.01668320083618164, 0.01673423957824707, 0.016703359603881834, 0.016740383148193358, 0.016637184143066405, 0.016633760452270507, 0.01675766372680664, 0.016662527084350585, 0.016691200256347655, 0.016500991821289064, 0.016854400634765624, 0.016796031951904298, 0.016838176727294922, 0.016781312942504883, 0.016748544692993163, 0.016781120300292968, 0.016751264572143553, 0.016993568420410155, 0.016932928085327148, 0.016939935684204103, 0.017028863906860352, 0.01685840034484863, 0.01687215995788574, 0.016799808502197266, 0.017010080337524415, 0.017428672790527344, 0.017218175888061522, 0.017141471862792967, 0.017260671615600586, 0.016917728424072267, 0.01677120018005371, 0.016735904693603514, 0.016936063766479492, 0.01672150421142578, 0.016734304428100585, 0.016734399795532227, 0.01674012756347656, 0.016679296493530272, 0.016684576034545897, 0.01668524742126465, 0.0166748161315918, 0.016676416397094728, 0.01666828727722168, 0.016847679138183594, 0.01677948760986328, 0.016801536560058592, 0.016658464431762696, 0.016850944519042968, 0.016663711547851564, 0.016919391632080078, 0.01664419174194336, 0.01665654373168945, 0.01664588737487793, 0.016701215744018554, 0.016666208267211914, 0.016745088577270507, 0.0167076473236084, 0.016740383148193358, 0.016699264526367188, 0.016738336563110353, 0.016662208557128907, 0.016630079269409178, 0.01665017509460449, 0.016693311691284178, 0.016635200500488282, 0.016667423248291017, 0.016716896057128908, 0.016919008255004885, 0.016864927291870117, 0.01675699234008789, 0.01677350425720215, 0.01672403144836426, 0.016390464782714845, 0.016701152801513672, 0.016743967056274414, 0.016703968048095704, 0.01666361618041992, 0.016966592788696288, 0.016735904693603514, 0.016679264068603514, 0.016686912536621093, 0.01726483154296875, 0.016712736129760743, 0.016688095092773438, 0.017116256713867187, 0.016848928451538087, 0.01672243118286133, 0.01670390319824219, 0.016739616394042967, 0.016808704376220705, 0.016678848266601563, 0.016642112731933594, 0.01674339294433594, 0.016683231353759764, 0.016771743774414063, 0.01669868850708008, 0.01671174430847168, 0.016691232681274416, 0.01670137596130371, 0.016666751861572265, 0.016716415405273438, 0.016968000411987306, 0.016707263946533202, 0.01673209571838379, 0.016773183822631835, 0.016899616241455077, 0.016875999450683594, 0.017026111602783202, 0.01677142333984375, 0.016691808700561524, 0.01675609588623047, 0.016775808334350585, 0.017053823471069336, 0.017376991271972658, 0.017293472290039063, 0.01721343994140625, 0.016977312088012696, 0.016806047439575197, 0.016705856323242188, 0.017084543228149413, 0.01741360092163086, 0.01732252883911133, 0.017229215621948242, 0.0170600643157959, 0.016785791397094726, 0.01672719955444336, 0.016763967514038088, 0.01669049644470215, 0.016824640274047852, 0.0167807674407959, 0.01674118423461914, 0.01679302406311035, 0.016693151473999024, 0.01933942413330078, 0.017490304946899415, 0.016515327453613282, 0.01672208023071289, 0.016728992462158202, 0.01672902488708496, 0.016701440811157226, 0.01675071907043457, 0.01676198387145996, 0.01691929626464844, 0.016852127075195313, 0.016767839431762695, 0.016707712173461915, 0.016723039627075196, 0.016699359893798827, 0.016698175430297852, 0.01678745651245117, 0.016964832305908204, 0.01679644775390625, 0.016721920013427736, 0.016776351928710936, 0.016692319869995118, 0.01671776008605957, 0.016689983367919922, 0.016733184814453125, 0.016742399215698242, 0.01764352035522461, 0.01671507263183594, 0.016642208099365233, 0.016669216156005858, 0.016736255645751954, 0.016769023895263673, 0.01678726387023926, 0.016704832077026367, 0.01682236862182617, 0.016706335067749024, 0.016679008483886718, 0.016639808654785156, 0.01665964889526367, 0.016645023345947266, 0.01666646385192871, 0.016634143829345704, 0.016639871597290037, 0.016680959701538087, 0.016615232467651366, 0.016721792221069336, 0.016653951644897462, 0.016650976181030272, 0.016746463775634764, 0.016718975067138674, 0.016925567626953124, 0.01664556884765625, 0.01666044807434082, 0.016677471160888673, 0.016702816009521483, 0.01664681625366211, 0.01671491241455078, 0.01666912078857422, 0.01661955261230469, 0.016630239486694336, 0.01672159957885742, 0.016720352172851564, 0.016703008651733398, 0.01665603256225586, 0.016636159896850584, 0.01648031997680664, 0.016703424453735353, 0.016961536407470702, 0.01668739128112793, 0.016739967346191407, 0.016707679748535157, 0.016742399215698242, 0.016696544647216798, 0.01662441635131836, 0.016660480499267577, 0.016695295333862305, 0.016742399215698242, 0.01665996742248535, 0.01670195198059082, 0.016693248748779296, 0.016766752243041992, 0.01675619125366211, 0.016867488861083985, 0.01691094398498535, 0.016947200775146484, 0.01729292869567871, 0.016859519958496095, 0.017096927642822266, 0.016963104248046874, 0.01699046325683594, 0.016764736175537108, 0.01691222381591797, 0.01745136070251465, 0.017757408142089842, 0.01737094306945801, 0.016765920639038086, 0.016762943267822267, 0.01675872039794922, 0.016676864624023437, 0.016865280151367186, 0.016646080017089844, 0.016857152938842775, 0.01681817626953125, 0.01699612808227539, 0.01686502456665039, 0.01691270446777344, 0.01668931198120117, 0.016652288436889647, 0.016649599075317382, 0.016680992126464844, 0.016737056732177735, 0.016733791351318358, 0.01661359977722168, 0.016690176010131837, 0.016732383728027343, 0.017031167984008787, 0.016689983367919922, 0.016685216903686524, 0.01677907180786133, 0.01669430351257324, 0.016667552947998047, 0.01667078399658203, 0.01665433692932129, 0.01668921661376953, 0.016617408752441408, 0.016726015090942382, 0.01683452796936035, 0.016672767639160157]",tokens/s,59.38975712269679,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,1183.182848,5163.057152,0.0,4760.53504,4751.079424,s,1,14.4248896484375,14.4248896484375,0.0,14.4248896484375,14.4248896484375,14.4248896484375,14.4248896484375,[14.4248896484375],,kWh,0.00020817933887916904,2.2955898240043184e-05,6.860144377000138e-05,0.0002997366808892136,,MB,1418.391552,6037.569536,0.0,5622.464512,5351.551488,s,10,10.116026123046876,1.0116026123046875,0.0054247634399796395,1.01097216796875,1.018905828857422,1.0192733489990233,1.0195673651123045,"[1.0067706298828125, 1.0014197387695312, 1.0067474975585937, 1.015257080078125, 1.0109721069335937, 1.0149801025390626, 1.0109722290039063, 1.0104417114257813, 1.019640869140625, 1.0188241577148438]",tokens/s,253.06379885355082,kWh,2.9582426314582335e-05,3.262422467378916e-06,1.976932137099999e-05,5.2614170152961234e-05,tokens/kWh,4865609.383475029,MB,1443.831808,6039.666688,0.0,5622.464512,5351.554048,s,10,52.816798828124995,5.281679882812499,0.008070446948162054,5.282215576171875,5.289451220703125,5.290354516601562,5.2910771533203125,"[5.27518994140625, 5.28322216796875, 5.27497119140625, 5.281208984375, 5.2803154296875, 5.2640048828125, 5.28925048828125, 5.28879443359375, 5.2912578125, 5.28858349609375]",tokens/s,11.928023166457496,kWh,0.00015466525261833373,1.706075069167466e-05,0.00010159658127719963,0.000273322584587208,tokens/kWh,230496.869093885,,s,630,52.81297869110111,0.08383012490650964,0.0013448224511361866,0.08364436721801757,0.08414946365356445,0.08444847145080565,0.09367265815734864,"[0.09509683227539062, 0.08430796813964844, 0.08365625762939453, 0.08336224365234375, 0.08292147064208985, 0.08299878692626952, 0.0831115493774414, 0.0831324462890625, 0.08320700836181641, 0.08314816284179688, 0.08305689239501954, 0.08338432312011719, 0.08316713714599609, 0.08308783721923828, 0.08305462646484375, 0.083107421875, 0.08334086608886719, 0.08314527893066406, 0.0833538589477539, 0.08338841247558594, 0.08331468963623047, 0.08337407684326172, 0.08331468963623047, 0.08377251434326172, 0.08353987121582031, 0.08335052490234375, 0.08324301147460937, 0.08334950256347656, 0.08338947296142578, 0.08336073303222656, 0.0833433609008789, 0.08334937286376953, 0.08332256317138671, 0.08328646087646484, 0.0833905258178711, 0.08342272186279297, 0.0835560302734375, 0.08340557098388672, 0.08342118072509766, 0.08358089447021484, 0.08342736053466797, 0.08459264373779297, 0.0843468780517578, 0.08425881958007812, 0.08396800231933593, 0.08338841247558594, 0.08353382110595703, 0.08373862457275391, 0.08358707427978515, 0.08347628784179688, 0.08384735870361328, 0.08368118286132813, 0.08363426971435547, 0.08364646148681641, 0.08353177642822265, 0.08377734375, 0.08441036987304687, 0.08411312103271484, 0.0839316177368164, 0.08412979125976562, 0.08432844543457031, 0.08407654571533203, 0.08415763092041016, 0.09328844451904297, 0.08464383697509766, 0.08447795104980468, 0.08408780670166016, 0.0834731216430664, 0.0838141098022461, 0.08344809722900391, 0.08342111968994141, 0.08345635223388671, 0.08355430603027343, 0.08394735717773437, 0.08380636596679687, 0.08387174224853515, 0.08353593444824219, 0.0833473892211914, 0.08329574584960937, 0.08338191986083984, 0.08340975952148437, 0.08449228668212891, 0.08400252532958985, 0.08367273712158203, 0.08347843170166015, 0.08352432250976563, 0.0833966064453125, 0.08338003540039063, 0.08354630279541016, 0.08345804595947266, 0.08348870086669923, 0.08362303924560546, 0.08347724914550782, 0.08359260559082031, 0.08370243072509766, 0.08382073974609375, 0.08388396453857422, 0.08393852996826172, 0.08339126586914063, 0.083451904296875, 0.08341680145263672, 0.0837511978149414, 0.08372956848144532, 0.08363839721679688, 0.08359295654296875, 0.0838927001953125, 0.08355878448486329, 0.08373785400390625, 0.08374070739746094, 0.08373235321044922, 0.08353072357177735, 0.0836157455444336, 0.08373442840576172, 0.08360265350341797, 0.08374758148193359, 0.08364659118652344, 0.08374444580078125, 0.08364883422851563, 0.08373235321044922, 0.08366502380371094, 0.08493840026855469, 0.08367139434814454, 0.08370790100097657, 0.08389234924316406, 0.08386547088623048, 0.08373193359375, 0.09353011322021484, 0.08441363525390624, 0.08382911682128906, 0.08345645141601563, 0.08318685150146485, 0.08297273254394531, 0.08324137878417968, 0.08302146911621094, 0.08299795532226563, 0.08322799682617188, 0.0831261749267578, 0.0831209259033203, 0.08311376190185547, 0.08367330932617187, 0.08324095916748046, 0.08307670593261719, 0.08325545501708985, 0.08334566497802734, 0.08330239868164062, 0.08357068634033203, 0.08343545532226562, 0.08312429046630859, 0.0832531509399414, 0.08335574340820312, 0.08347853088378906, 0.08344780731201172, 0.08349696350097656, 0.08459059143066407, 0.08405811309814454, 0.08370381164550782, 0.08356658935546875, 0.08341709136962891, 0.08347974395751953, 0.0840195541381836, 0.08384969329833984, 0.08334950256347656, 0.08405967712402344, 0.08348105621337891, 0.08352973175048828, 0.08351516723632812, 0.08362614440917969, 0.08360959625244141, 0.08366291046142578, 0.08376525115966797, 0.08381158447265626, 0.08356102752685547, 0.08372857666015625, 0.08372764587402344, 0.08380461120605469, 0.08362041473388672, 0.08367206573486329, 0.08350588989257812, 0.08381830596923828, 0.08370166778564453, 0.08408943939208985, 0.08372191619873047, 0.08366079711914062, 0.0837449951171875, 0.08392066955566406, 0.08378691101074219, 0.08379027557373046, 0.08367340850830078, 0.08368547058105469, 0.09388047790527344, 0.08454962921142578, 0.08404169464111329, 0.08347446441650391, 0.08309926605224609, 0.08302627563476563, 0.08322409820556641, 0.08321033477783203, 0.08320038604736328, 0.08312393951416015, 0.0832371826171875, 0.08341948699951172, 0.08370102691650391, 0.08358131408691406, 0.0831990737915039, 0.08351654052734375, 0.08342915344238282, 0.08351673889160156, 0.08319455718994141, 0.08322662353515625, 0.08331059265136719, 0.08346009826660156, 0.08332249450683593, 0.08334374237060548, 0.0833160629272461, 0.0835731201171875, 0.08354780578613281, 0.08475440216064453, 0.0843967056274414, 0.08364777374267578, 0.08344054412841796, 0.08352748870849609, 0.08360755157470703, 0.08375091552734375, 0.083525634765625, 0.08355203247070313, 0.08354450988769531, 0.08375068664550782, 0.08355430603027343, 0.08350857543945313, 0.08368364715576172, 0.08356867218017579, 0.08357273864746094, 0.08364268493652344, 0.08378572845458984, 0.08368463897705078, 0.08363603210449219, 0.08371804809570313, 0.08381465911865234, 0.0842001953125, 0.08426080322265625, 0.08415430450439453, 0.08407052612304687, 0.08392294311523438, 0.0836728973388672, 0.08388832092285156, 0.08385072326660156, 0.08369641876220703, 0.08488050842285157, 0.08381302642822265, 0.08407997131347657, 0.08407820892333985, 0.08387891387939453, 0.09413906860351562, 0.08509983825683594, 0.08414892578125, 0.0835189437866211, 0.08314524841308593, 0.08352140808105468, 0.08321558380126953, 0.08352425384521485, 0.08320668792724609, 0.08335862731933594, 0.08326636505126953, 0.08345315551757812, 0.08381890869140625, 0.08443449401855468, 0.08349359893798829, 0.08353955078125, 0.08398445129394531, 0.08354576110839844, 0.0836124496459961, 0.08339456176757812, 0.08325529479980469, 0.08364031982421875, 0.0838287353515625, 0.0834128646850586, 0.08361532592773438, 0.08359113311767578, 0.08365846252441406, 0.08390946960449219, 0.08373168182373047, 0.08378205108642578, 0.08393904113769532, 0.08391747283935547, 0.08404354858398437, 0.08414435577392577, 0.08401676940917968, 0.08370009613037109, 0.0835541763305664, 0.0836053466796875, 0.0835412826538086, 0.08366182708740234, 0.08356845092773438, 0.0834721908569336, 0.08356492614746094, 0.08350514984130859, 0.08346828460693359, 0.08340425872802734, 0.0835149154663086, 0.08341334533691407, 0.08336656188964844, 0.08341913604736328, 0.08344956970214844, 0.08349520111083984, 0.08365433502197266, 0.08349523162841797, 0.08354783630371093, 0.08356639862060547, 0.08391526031494141, 0.08369971466064453, 0.08352726745605468, 0.08362262725830077, 0.083932861328125, 0.08371766662597656, 0.08366537475585938, 0.09398985290527344, 0.08432640075683594, 0.08358911895751953, 0.08307711791992188, 0.08291737365722657, 0.08295827484130859, 0.08336300659179688, 0.08320089721679688, 0.08298457336425781, 0.08302835083007812, 0.08294937896728516, 0.08301372528076172, 0.0831875228881836, 0.08308822631835938, 0.08310169219970703, 0.08308493041992188, 0.08315261077880859, 0.08326211547851563, 0.08314675140380859, 0.08305254364013671, 0.08311808013916015, 0.08309964752197266, 0.08321842956542969, 0.083072509765625, 0.08326566314697266, 0.08321676635742188, 0.08322230529785156, 0.0839579849243164, 0.083334716796875, 0.08318179321289063, 0.08320022583007812, 0.08318156433105468, 0.08356249237060547, 0.0837465591430664, 0.08337014770507813, 0.08326358032226562, 0.08332614135742188, 0.08328070068359375, 0.08330374145507813, 0.08334201812744141, 0.08325529479980469, 0.08336316680908203, 0.08330716705322265, 0.08330035400390624, 0.0833290252685547, 0.08335683441162109, 0.0833419189453125, 0.083351806640625, 0.08340684509277344, 0.08344505310058593, 0.08390089416503907, 0.08351971435546875, 0.08384512329101562, 0.08368895721435547, 0.08374441528320313, 0.08344454193115235, 0.08347004699707031, 0.08469725036621094, 0.08409279632568359, 0.08365078735351562, 0.08405951690673828, 0.08352540588378907, 0.08378668975830078, 0.09445283508300781, 0.08480655670166015, 0.08428854370117188, 0.0844728012084961, 0.08405401611328125, 0.08388227081298828, 0.08367036437988282, 0.08349033355712891, 0.08345865631103516, 0.08336819458007813, 0.08342281341552735, 0.08340521240234375, 0.083388671875, 0.08333689880371094, 0.08336390686035157, 0.0835765151977539, 0.08341276550292968, 0.08332752227783204, 0.0834150390625, 0.0833875503540039, 0.08351593780517579, 0.0835525131225586, 0.08370390319824218, 0.08356797027587891, 0.08359728240966798, 0.08418367767333984, 0.08375004577636719, 0.08373283386230469, 0.08397058868408203, 0.08379392242431641, 0.08369356536865234, 0.08386150360107422, 0.08404531097412109, 0.08356861114501953, 0.0838702392578125, 0.08369561767578125, 0.08360076904296875, 0.08365734100341797, 0.08373769378662109, 0.08378189086914062, 0.0841751708984375, 0.08412960052490234, 0.08379193878173828, 0.08381609344482421, 0.08388486480712891, 0.0838287353515625, 0.0838757095336914, 0.0839415054321289, 0.08393113708496094, 0.08390451049804687, 0.08392412567138671, 0.08374912261962891, 0.08388259124755859, 0.08382176208496094, 0.08387872314453125, 0.08405766296386719, 0.08391110229492188, 0.08378479766845703, 0.08393001556396484, 0.08380006408691407, 0.08416226959228515, 0.08384130859375, 0.08398643493652344, 0.09336217498779296, 0.08446685028076172, 0.08591446685791015, 0.08349593353271484, 0.0832194595336914, 0.08339984130859375, 0.08335587310791015, 0.08327232360839844, 0.08328105926513672, 0.08332403564453125, 0.08341613006591797, 0.08331330871582031, 0.08340828704833984, 0.08342111968994141, 0.08353196716308593, 0.08344009399414062, 0.08332288360595703, 0.08352153778076171, 0.08347443389892578, 0.08426905822753906, 0.08345753479003906, 0.08354662322998047, 0.08356044769287109, 0.08360755157470703, 0.08357635498046875, 0.08383331298828126, 0.08370790100097657, 0.08404377746582031, 0.08360336303710937, 0.08399267578125, 0.08469913482666015, 0.08398137664794922, 0.08366153717041015, 0.08364604949951172, 0.08359302520751953, 0.08392704010009766, 0.0837841567993164, 0.083892578125, 0.08362723541259766, 0.08375113677978516, 0.08364089965820312, 0.08376016235351562, 0.08396431732177734, 0.0838067855834961, 0.08367513275146485, 0.0837138900756836, 0.08384524536132812, 0.084891357421875, 0.08403568267822266, 0.08406771087646485, 0.08373887634277344, 0.08370979309082031, 0.08383769226074218, 0.08386557006835937, 0.0842195816040039, 0.08431651306152343, 0.08387993621826172, 0.08387583923339843, 0.08392806243896485, 0.08393177795410156, 0.08380226898193359, 0.08404227447509766, 0.0841704330444336, 0.09373088073730469, 0.08435008239746093, 0.08391334533691407, 0.08352787017822266, 0.08331417846679688, 0.08329472351074219, 0.08320626831054688, 0.08336115264892578, 0.08341094207763672, 0.08348076629638672, 0.08339692687988282, 0.08337935638427735, 0.08331922912597656, 0.0833288345336914, 0.08344969940185547, 0.08337894439697266, 0.08336908721923827, 0.08368780517578125, 0.08341519927978516, 0.08401251220703125, 0.08352416229248047, 0.08380857849121094, 0.08353743743896484, 0.08354278564453126, 0.08369084930419922, 0.08400518035888672, 0.08368323516845703, 0.0835354232788086, 0.0834504623413086, 0.08348057556152344, 0.08370995330810548, 0.08361952209472656, 0.08378195190429688, 0.08372428894042969, 0.08369872283935546, 0.0840040283203125, 0.08373455810546875, 0.08378495788574218, 0.08379977416992188, 0.08376809692382813, 0.08380726623535156, 0.08538006591796875, 0.08374259185791015, 0.08373404693603516, 0.08362659454345703, 0.08372838592529297, 0.08646361541748047, 0.08413455963134765, 0.08392105865478515, 0.08395744323730468, 0.08421737670898438, 0.08434774780273438, 0.08426668548583985, 0.08445990753173828, 0.08423417663574219, 0.08408678436279297, 0.0840110092163086, 0.0840110092163086, 0.08421580505371094, 0.08422809600830078, 0.08414002990722656, 0.08393424224853516, 0.08402223968505859, 0.09425714874267578, 0.08428953552246093, 0.0836537628173828, 0.08359315490722656, 0.08341600036621094, 0.0833986587524414, 0.08350646209716797, 0.08341986846923828, 0.08340275573730468, 0.08347830200195312, 0.08334358215332031, 0.0834448013305664, 0.08345081329345704, 0.08344118499755859, 0.08365676879882812, 0.08379334259033203, 0.08340895843505859, 0.08343644714355469, 0.08369538879394531, 0.08429590606689454, 0.08391468811035156, 0.08358726501464844, 0.08368080139160156, 0.08384547424316406, 0.08384451293945312, 0.08368508911132813, 0.08383987426757812, 0.08378572845458984, 0.08380210876464844, 0.08373766326904297, 0.08388294219970703, 0.0838635482788086, 0.08377362823486328, 0.08360300445556641, 0.08361763000488281, 0.08355174255371094, 0.08450550079345703, 0.08412108612060547, 0.08411980438232422, 0.0837627182006836, 0.08371683502197266, 0.08374809265136719, 0.08400153350830078, 0.083884033203125, 0.08387379455566406, 0.08390451049804687, 0.08369926452636718, 0.08459446716308594, 0.08402761840820312, 0.08384710693359375, 0.08366051483154296, 0.08373942565917969, 0.08382054138183594, 0.08377327728271484, 0.08375833892822265, 0.08374323272705078, 0.08365916442871094, 0.08383283233642579, 0.08396969604492187, 0.08400316619873047, 0.08400895690917969, 0.08418851470947265, 0.08385807800292969]",tokens/s,11.928885959733877,, 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,994.603008,896.466944,0.0,501.219328,495.906816,s,1,8.4486875,8.4486875,0.0,8.4486875,8.4486875,8.4486875,8.4486875,[8.4486875],,kWh,3.8634957929139084e-05,4.2523124273827515e-06,1.3758344339989681e-05,5.6645614696511517e-05,,MB,1280.659456,1060.0448,0.0,652.214272,602.88,s,10,0.6205229492187501,0.06205229492187501,0.00027595415758395323,0.06209208106994629,0.062417222595214844,0.06243381195068359,0.062447083435058594,"[0.06241353607177735, 0.06221529769897461, 0.0617476806640625, 0.06200137710571289, 0.06179667282104492, 0.06245040130615234, 0.062107616424560544, 0.06153657531738281, 0.06217724609375, 0.062076545715332034]",tokens/s,4125.552492817691,kWh,1.9457863596111968e-06,2.1457113460483855e-07,1.2915362184133065e-06,3.4518937126293417e-06,tokens/kWh,74162190.76021384,MB,1305.849856,1072.627712,0.0,664.797184,611.073536,s,10,22.119763916015625,2.2119763916015627,0.00939409823685884,2.211623046875,2.2202459716796876,2.2256539916992186,2.229980407714844,"[2.219044189453125, 2.212201416015625, 2.21731494140625, 2.19991943359375, 2.215291748046875, 2.195977294921875, 2.23106201171875, 2.211044677734375, 2.20692236328125, 2.21098583984375]",tokens/s,28.481316635746452,kWh,6.477474127580723e-05,7.144476916506505e-06,2.7728792553385804e-05,9.964801074569954e-05,tokens/kWh,632225.3653489903,,s,630,22.11338684844971,0.03510061404515827,0.0005865970056677282,0.03507145500183105,0.03545301055908203,0.03568929996490478,0.038359202995300296,"[0.0351126708984375, 0.03557414245605469, 0.03589945602416992, 0.03536259078979492, 0.035674175262451174, 0.035448833465576174, 0.03554924774169922, 0.035574081420898435, 0.03562662506103516, 0.03541100692749023, 0.03543145751953125, 0.035452831268310545, 0.03558758544921875, 0.035260128021240233, 0.035433246612548826, 0.03536041641235352, 0.03590339279174805, 0.03526726531982422, 0.03520223999023438, 0.03492534255981445, 0.03495948791503906, 0.03528476715087891, 0.03514761734008789, 0.03509475326538086, 0.035184417724609375, 0.03508652877807617, 0.035114334106445315, 0.03512985610961914, 0.034936511993408206, 0.034758975982666016, 0.03501670455932617, 0.034860607147216796, 0.03490822219848633, 0.034926239013671874, 0.03498262405395508, 0.03831635284423828, 0.03570156860351562, 0.03544521713256836, 0.03524240112304688, 0.035202465057373046, 0.03534864044189453, 0.035238048553466794, 0.03510025787353516, 0.0351157112121582, 0.03524790573120117, 0.035093055725097654, 0.034854560852050784, 0.034721790313720705, 0.03489708709716797, 0.03475270462036133, 0.03469724655151367, 0.03503779220581055, 0.035046527862548825, 0.03531158447265625, 0.035201152801513674, 0.03507238388061523, 0.03494134521484375, 0.03484595108032226, 0.03459968185424805, 0.03454576110839844, 0.03463270568847656, 0.03485580825805664, 0.034942657470703124, 0.03491551971435547, 0.03513631820678711, 0.03532185745239258, 0.03533004760742187, 0.03557894515991211, 0.03553580856323242, 0.035280895233154294, 0.03533004760742187, 0.03533811187744141, 0.035356895446777344, 0.0352845458984375, 0.035283294677734375, 0.03526812744140625, 0.03526294326782226, 0.035264511108398434, 0.03535676956176758, 0.03535795211791992, 0.035447391510009765, 0.03543584060668945, 0.03534924697875977, 0.03537926483154297, 0.035393470764160155, 0.03552851104736328, 0.03523731231689453, 0.035364864349365234, 0.03544140625, 0.03542547225952149, 0.03530124664306641, 0.03548067092895508, 0.03467875289916992, 0.03591350555419922, 0.034665664672851565, 0.034560928344726564, 0.034730144500732425, 0.03486294555664062, 0.035142814636230466, 0.035093345642089845, 0.034975200653076174, 0.03497366333007813, 0.03481011199951172, 0.035618942260742185, 0.03539737701416015, 0.035146175384521486, 0.035182144165039064, 0.035130142211914066, 0.034631328582763674, 0.03448524856567383, 0.0349725456237793, 0.03505779266357422, 0.035116737365722656, 0.03496172714233398, 0.035009727478027344, 0.0348782730102539, 0.03472592163085937, 0.03425088119506836, 0.03435625457763672, 0.034087390899658204, 0.034748767852783205, 0.03445145416259766, 0.035674304962158204, 0.03441424179077148, 0.03482598495483399, 0.03504579162597656, 0.03451145553588867, 0.03463987350463867, 0.03446988677978516, 0.03454771041870117, 0.03462368011474609, 0.034899776458740234, 0.0344923210144043, 0.03473622512817383, 0.03471308898925781, 0.03475868988037109, 0.034950847625732424, 0.03530831909179687, 0.034971649169921876, 0.03509689712524414, 0.03894240188598633, 0.035402751922607424, 0.03493180847167969, 0.03510236740112305, 0.03494732666015625, 0.03525379180908203, 0.035011039733886716, 0.03528051376342774, 0.034959743499755856, 0.03500032043457031, 0.034936832427978515, 0.03508243179321289, 0.03505078506469726, 0.035050079345703124, 0.03512928009033203, 0.03934620666503906, 0.035218463897705075, 0.03531846237182617, 0.038376705169677734, 0.03860201644897461, 0.03553875350952149, 0.03518422317504883, 0.03522592163085937, 0.035053569793701174, 0.03519184112548828, 0.03528799819946289, 0.03520719909667969, 0.035098430633544925, 0.0349796142578125, 0.03542406463623047, 0.03494911956787109, 0.03481270217895508, 0.034587871551513674, 0.03449484634399414, 0.03468230438232422, 0.03518726348876953, 0.03486742401123047, 0.034609119415283204, 0.03445955276489258, 0.03461676788330078, 0.03499305725097656, 0.03498783874511719, 0.035041343688964846, 0.03525027084350586, 0.035006271362304685, 0.03481190490722656, 0.0345824966430664, 0.034512096405029294, 0.03441132736206055, 0.03445977783203125, 0.03475715255737305, 0.034645694732666016, 0.03471769714355469, 0.03506774520874024, 0.03495100784301758, 0.03495910263061523, 0.035109344482421874, 0.03479951858520508, 0.03456982421875, 0.034552417755126956, 0.03465574264526367, 0.03469372940063477, 0.03514767837524414, 0.03476275253295898, 0.0346512336730957, 0.03462236785888672, 0.034586814880371096, 0.034790431976318356, 0.034775489807128905, 0.034812255859375, 0.03512934494018555, 0.03502035140991211, 0.03484102249145508, 0.03490329742431641, 0.03491712188720703, 0.03474959945678711, 0.034628257751464844, 0.034568382263183595, 0.03497356796264649, 0.03511939239501953, 0.03506159973144531, 0.03589862442016602, 0.03515228652954101, 0.03505920028686523, 0.035070526123046876, 0.035327617645263674, 0.03505014419555664, 0.035021984100341796, 0.03507843017578125, 0.0350555191040039, 0.03513116836547851, 0.03546406555175781, 0.03517200088500977, 0.0350211181640625, 0.03512944030761719, 0.035026016235351565, 0.0350126724243164, 0.03526531219482422, 0.03521305465698242, 0.035190078735351564, 0.035068641662597655, 0.035168479919433594, 0.03513350296020508, 0.03452627182006836, 0.034570175170898436, 0.03462665557861328, 0.034717536926269534, 0.03456998443603516, 0.03460742568969727, 0.03470739364624023, 0.03466649627685547, 0.034603073120117185, 0.034716350555419925, 0.03494400024414063, 0.03891279983520508, 0.03493478393554687, 0.03469209671020508, 0.03595110321044922, 0.03456460952758789, 0.034613536834716796, 0.03448393630981445, 0.03455590438842773, 0.03481808090209961, 0.03498137664794922, 0.03517817687988281, 0.03507689666748047, 0.03498937606811523, 0.03521782302856445, 0.035049758911132815, 0.03480947113037109, 0.0345615348815918, 0.03470630264282227, 0.03473231887817383, 0.03489555358886719, 0.03483820724487305, 0.03485935974121094, 0.03474774551391602, 0.03495951843261719, 0.03492505645751953, 0.03485696029663086, 0.03551174545288086, 0.03498995208740235, 0.034903934478759766, 0.03457228851318359, 0.03510150527954101, 0.03473180770874024, 0.035160289764404294, 0.035043582916259766, 0.034860256195068356, 0.03465097427368164, 0.034743999481201174, 0.03484262466430664, 0.03502460861206055, 0.03516969680786133, 0.035246177673339846, 0.03530217742919922, 0.03531283187866211, 0.035334014892578126, 0.0352239990234375, 0.03513600158691406, 0.035176448822021485, 0.035323680877685545, 0.03515846252441406, 0.035171329498291014, 0.0352694091796875, 0.03531897735595703, 0.035238304138183595, 0.03524240112304688, 0.035272705078125, 0.03530319976806641, 0.035215263366699216, 0.03525254440307617, 0.035176448822021485, 0.03514777755737305, 0.039908702850341794, 0.03512070465087891, 0.035621086120605466, 0.035105182647705076, 0.035385665893554685, 0.03503897476196289, 0.0352891845703125, 0.034912094116210934, 0.03475251388549805, 0.034680831909179685, 0.03449856185913086, 0.03427123260498047, 0.03423040008544922, 0.034242496490478516, 0.03425001525878906, 0.034401248931884766, 0.03459471893310547, 0.034770942687988284, 0.03512022399902344, 0.03484332656860351, 0.034988033294677735, 0.035339679718017575, 0.035270942687988284, 0.035227649688720705, 0.03452131271362305, 0.03538761520385742, 0.0344290885925293, 0.034864192962646486, 0.034149024963378904, 0.033836544036865236, 0.034018878936767576, 0.03563375854492187, 0.03482659149169922, 0.03513126373291016, 0.035118366241455076, 0.035028961181640624, 0.03463894271850586, 0.034204608917236326, 0.03512815856933594, 0.03405209732055664, 0.03398451232910156, 0.033941505432128906, 0.03383327865600586, 0.033780960083007815, 0.03409945678710938, 0.0343361930847168, 0.03452617645263672, 0.036985694885253904, 0.03516336059570312, 0.03522124862670899, 0.03542015838623047, 0.03493199920654297, 0.03482313537597656, 0.0347562255859375, 0.03516950225830078, 0.035353504180908206, 0.036014110565185546, 0.03511088180541992, 0.035005950927734376, 0.035023040771484375, 0.03509859085083008, 0.03525651168823242, 0.03530153656005859, 0.03522895812988281, 0.035417377471923826, 0.03597769546508789, 0.03557708740234375, 0.03543308639526367, 0.03523417663574219, 0.03529523086547852, 0.03521763229370117, 0.03518032073974609, 0.0352749137878418, 0.035273662567138674, 0.035289825439453124, 0.03546121597290039, 0.03527478408813477, 0.035501953125, 0.03529987335205078, 0.03545462417602539, 0.03551395034790039, 0.03515750503540039, 0.036289440155029294, 0.035757568359375, 0.03534819030761719, 0.03638761520385742, 0.035377086639404295, 0.03550419235229492, 0.03552793502807617, 0.035506271362304685, 0.035407711029052734, 0.03537977600097656, 0.03527091217041016, 0.03586294555664062, 0.035288673400878906, 0.034944286346435545, 0.03502892684936523, 0.03506201553344727, 0.035164703369140626, 0.035224990844726564, 0.035289024353027346, 0.03541788864135742, 0.03537395095825195, 0.03600588989257812, 0.037115169525146485, 0.03641593551635742, 0.03547571182250977, 0.03584617614746094, 0.03475251388549805, 0.03469327926635742, 0.034996063232421874, 0.03527078247070312, 0.035211135864257816, 0.03514134216308594, 0.03482870483398438, 0.035030143737792965, 0.03531372833251953, 0.03510752105712891, 0.03518259048461914, 0.034960670471191405, 0.03517897415161133, 0.0349516487121582, 0.03689039993286133, 0.036117568969726566, 0.035199935913085935, 0.03480780792236328, 0.03471152114868164, 0.034812641143798825, 0.03526566314697266, 0.0352039680480957, 0.03511920166015625, 0.03495091247558594, 0.03519657516479492, 0.03501311874389648, 0.03514940643310547, 0.0351932487487793, 0.03530137634277344, 0.03512662506103516, 0.03570684814453125, 0.03523551940917969, 0.035953662872314454, 0.03540108871459961, 0.03538950347900391, 0.03565011215209961, 0.03521331024169922, 0.035186817169189456, 0.03515955352783203, 0.035153854370117185, 0.03523952102661133, 0.0352182731628418, 0.035211231231689455, 0.03513142395019531, 0.0352973747253418, 0.0353155517578125, 0.03511299133300781, 0.035284000396728514, 0.03498073577880859, 0.034983745574951174, 0.03483004760742187, 0.034754337310791014, 0.0354947509765625, 0.03483606338500977, 0.03491068649291992, 0.03502012634277344, 0.034820350646972656, 0.034831871032714845, 0.035324737548828124, 0.03503104019165039, 0.03504563140869141, 0.03499184036254883, 0.03493686294555664, 0.034783550262451175, 0.03476886367797852, 0.03474812698364258, 0.03490345764160156, 0.03504188919067383, 0.03487644958496094, 0.03490208053588867, 0.034764801025390625, 0.034828510284423825, 0.03478316879272461, 0.034921150207519534, 0.03489324951171875, 0.03473916625976563, 0.03475027084350586, 0.0350022087097168, 0.03523583984375, 0.03505900955200195, 0.03497644805908203, 0.035415103912353516, 0.03449555206298828, 0.034870208740234374, 0.035170143127441406, 0.03528086471557617, 0.03479692840576172, 0.03482259368896484, 0.034840545654296874, 0.03468124771118164, 0.03472771072387695, 0.03478345489501953, 0.03480166244506836, 0.03495913696289062, 0.035126945495605466, 0.03502748870849609, 0.0353485107421875, 0.03482771301269531, 0.03468860626220703, 0.034732257843017575, 0.03509913635253906, 0.035129791259765626, 0.03503494262695313, 0.0350797119140625, 0.0350684814453125, 0.03504710388183594, 0.03512937545776367, 0.03523356628417969, 0.03533635330200195, 0.035358814239501955, 0.03732863998413086, 0.03542422485351562, 0.035332542419433594, 0.03531980895996094, 0.035259807586669925, 0.03497654342651367, 0.03517987060546875, 0.035127777099609375, 0.03513958358764648, 0.035108062744140626, 0.03527350234985351, 0.034952255249023435, 0.035164447784423826, 0.0349211196899414, 0.03482009506225586, 0.03483238220214844, 0.034699264526367186, 0.03475225448608398, 0.03466880035400391, 0.034551807403564457, 0.035094528198242186, 0.03495734405517578, 0.03490963363647461, 0.034871871948242185, 0.03496239852905274, 0.034974624633789066, 0.035200191497802735, 0.034885921478271485, 0.03484735870361328, 0.03473574447631836, 0.034811550140380856, 0.03478790283203125, 0.035279006958007814, 0.03493478393554687, 0.034686782836914065, 0.0349733772277832, 0.03492963027954102, 0.03680444717407227, 0.03563520050048828, 0.03585833740234375, 0.03510847854614258, 0.034700000762939456, 0.03443891143798828, 0.03441254425048828, 0.03423027038574219, 0.03421404647827148, 0.03423385620117188, 0.03423862457275391, 0.034516769409179686, 0.03477148818969727, 0.034797569274902344, 0.0349716796875, 0.034996192932128904, 0.03537452697753906, 0.03494166564941406, 0.03495292663574219, 0.034698528289794923, 0.03468352127075195, 0.03482588958740234, 0.03849667358398438, 0.03511004638671875, 0.03492678451538086, 0.03505984115600586, 0.03507868957519531, 0.035030399322509766, 0.03509868621826172, 0.03539820861816406, 0.035446815490722657, 0.03536281585693359, 0.035631103515625, 0.035155998229980466, 0.03510879898071289, 0.03529264068603516, 0.035146495819091794, 0.035123008728027344, 0.035213409423828126, 0.035081344604492186, 0.03515235137939453, 0.03511072158813477, 0.035101184844970705, 0.03513267135620117, 0.035262622833251954, 0.03550252914428711, 0.03518457412719726, 0.03514380645751953, 0.0351802864074707, 0.03521577453613281, 0.035447742462158205, 0.03523625564575195, 0.03482479858398437, 0.03480166244506836, 0.034549983978271484, 0.03442374420166015, 0.03471651077270508, 0.03474991989135742, 0.03504105758666992, 0.035176223754882815, 0.0353109130859375]",tokens/s,28.489530089515306,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,1555.582976,6043.860992,0.0,5641.33888,5589.443072,s,1,14.7601630859375,14.7601630859375,0.0,14.7601630859375,14.7601630859375,14.7601630859375,14.7601630859375,[14.7601630859375],,kWh,0.00022245195347084064,2.453096839253638e-05,7.611839422801137e-05,0.00032310131609138837,,MB,1402.69568,7990.018048,0.0,7574.913024,6755.660288,s,10,9.9339736328125,0.99339736328125,0.006642274724698987,0.9950534973144531,1.0007639099121093,1.001932553100586,1.0028674676513674,"[0.9838090209960938, 0.9842240600585938, 0.9889259643554688, 0.999744873046875, 0.9966932373046875, 0.9943037719726563, 1.0005042114257812, 0.99580322265625, 0.9868640747070313, 1.0031011962890626]",tokens/s,257.7015094487637,kWh,2.889625624507613e-05,3.1868105561941813e-06,1.911895468909272e-05,5.1202021490363026e-05,tokens/kWh,4999802.5966256615,MB,1447.264256,7990.018048,0.0,7574.913024,6755.662848,s,10,48.47760400390625,4.847760400390625,0.011599150415356616,4.850068115234375,4.86076494140625,4.86307119140625,4.86491619140625,"[4.82986962890625, 4.83301708984375, 4.8356435546875, 4.8418310546875, 4.846787109375, 4.86025244140625, 4.86537744140625, 4.85334912109375, 4.855958984375, 4.855517578125]",tokens/s,12.995691782729931,kWh,0.00014202536521409128,1.566642321854463e-05,9.442824725970797e-05,0.000252120035692344,tokens/kWh,249880.97366794516,,s,630,48.475242553710906,0.07694482945033482,0.0013884307372864925,0.07673393630981445,0.07792576904296875,0.07833617095947265,0.08556069694519043,"[0.0872303695678711, 0.07582870483398438, 0.07514278411865234, 0.0764037094116211, 0.07526758575439453, 0.07486310577392578, 0.07588684844970703, 0.07627340698242188, 0.07552614593505859, 0.07603955078125, 0.07561510467529296, 0.07682972717285157, 0.07932796478271484, 0.07827801513671875, 0.07665238189697265, 0.07615296173095704, 0.0764722900390625, 0.07623139190673828, 0.07580668640136719, 0.07569203186035156, 0.07632009887695312, 0.07545513916015625, 0.07609366607666015, 0.07588838195800782, 0.07756147003173829, 0.07809830474853516, 0.07786316680908204, 0.07670809936523437, 0.07626048278808593, 0.07675596618652344, 0.07618755340576172, 0.07544847869873048, 0.07637181091308594, 0.07540092468261719, 0.0753748779296875, 0.07671520233154297, 0.07639481353759765, 0.07828530883789063, 0.07755570983886718, 0.07740191650390625, 0.07683055877685546, 0.07637026977539063, 0.07675049591064453, 0.07612655639648437, 0.07592960357666016, 0.07540451049804688, 0.07575615692138672, 0.07746137237548828, 0.07593193817138671, 0.07748505401611328, 0.07735724639892579, 0.07749510192871094, 0.07684095764160156, 0.07714943695068359, 0.07672908782958984, 0.07609760284423828, 0.07604831695556641, 0.07646371459960938, 0.0763984603881836, 0.07610153961181641, 0.07625984191894532, 0.0773264617919922, 0.07768883514404297, 0.08539488220214844, 0.07627247619628906, 0.07569734191894531, 0.0752503662109375, 0.07522099304199219, 0.07649852752685547, 0.07587385559082031, 0.0754758071899414, 0.07607705688476563, 0.07605417633056641, 0.07674403381347657, 0.07662844848632812, 0.07883020782470704, 0.07830313873291016, 0.07659523010253906, 0.07628387451171875, 0.07571046447753907, 0.07603839874267578, 0.07602288055419922, 0.07565811157226562, 0.07593283081054687, 0.07631651306152344, 0.07647856140136719, 0.076897216796875, 0.07779510498046875, 0.0790621109008789, 0.07776911926269531, 0.07683331298828125, 0.07555840301513672, 0.07585533142089844, 0.07568038177490234, 0.07631497955322265, 0.07540118408203125, 0.07572860717773437, 0.07647846221923828, 0.07713788604736328, 0.076866943359375, 0.07755670166015625, 0.0773570556640625, 0.0777154541015625, 0.07734272003173828, 0.07630582427978516, 0.07549775695800781, 0.07607328033447265, 0.07555481719970703, 0.07548652648925781, 0.07738336181640625, 0.07601596832275391, 0.07679363250732422, 0.07737776184082032, 0.07741417694091797, 0.07730249786376953, 0.07892147064208985, 0.0771055679321289, 0.07643917083740234, 0.07597433471679688, 0.07662425231933594, 0.07632835388183594, 0.07589513397216797, 0.07622918701171875, 0.0765494384765625, 0.07768563079833984, 0.07708013153076172, 0.08521619415283203, 0.07615078735351563, 0.07542784118652343, 0.07595852661132813, 0.07552387237548829, 0.07678749084472657, 0.07534611511230468, 0.07576985931396485, 0.07590707397460937, 0.0765849609375, 0.07595212554931641, 0.07727859497070312, 0.07885478210449219, 0.07813120269775391, 0.07677529907226563, 0.07592153930664063, 0.07650927734375, 0.07589469146728516, 0.07529676818847657, 0.07524966430664062, 0.07689411163330079, 0.0753637466430664, 0.07598355102539063, 0.0769966049194336, 0.07766425323486328, 0.07827664184570313, 0.07790179443359375, 0.07681024169921875, 0.07623379516601562, 0.07574237060546875, 0.07689561462402343, 0.07632937622070313, 0.07595017242431641, 0.07583315277099609, 0.07633478546142577, 0.07650262451171876, 0.07814403533935547, 0.07746905517578125, 0.07772866821289062, 0.0775763168334961, 0.0774286117553711, 0.07633715057373047, 0.07632236480712891, 0.07581148529052735, 0.07614543914794922, 0.07609037017822265, 0.07668531036376953, 0.07715795135498046, 0.07675708770751953, 0.076951904296875, 0.07744921875, 0.07741203308105468, 0.07735942077636719, 0.077264892578125, 0.0771688003540039, 0.07594159698486327, 0.07564617919921875, 0.07612303924560547, 0.07607500457763672, 0.07690876770019531, 0.0770467529296875, 0.07666361236572265, 0.07744303894042968, 0.08588143920898438, 0.07663155364990235, 0.07576809692382812, 0.07609548950195312, 0.07574732971191406, 0.07571593475341797, 0.07621485137939453, 0.07549164581298828, 0.0757446060180664, 0.07659117126464844, 0.07696627044677734, 0.07677552032470703, 0.07922678375244141, 0.0781058578491211, 0.07697657775878906, 0.07649244689941406, 0.07659180450439453, 0.07582217407226563, 0.07609552001953125, 0.07555363464355469, 0.07629618835449219, 0.07643465423583984, 0.07657113647460938, 0.07626351928710938, 0.07743238067626954, 0.07764195251464844, 0.0785084457397461, 0.07693106842041016, 0.0764865951538086, 0.07608137512207032, 0.07602281951904297, 0.07599568176269532, 0.07600588989257813, 0.07607810974121093, 0.0763645477294922, 0.07688323211669922, 0.0774161605834961, 0.07698944091796875, 0.07748368072509766, 0.07731439971923829, 0.07731183624267578, 0.07687760162353516, 0.07631295776367188, 0.07589250946044922, 0.07580665588378906, 0.07679126739501953, 0.07619667053222656, 0.07677059173583985, 0.07721366119384766, 0.07751347351074218, 0.07731404876708985, 0.07769497680664063, 0.07785234832763673, 0.07750809478759765, 0.07701382446289062, 0.07630038452148437, 0.07620291137695312, 0.07670272064208984, 0.07646822357177735, 0.07709244537353516, 0.07657062530517578, 0.07757046508789063, 0.07696787261962891, 0.08580159759521484, 0.0761522216796875, 0.07579424285888672, 0.07537702178955077, 0.07559597015380859, 0.0765829086303711, 0.07596342468261719, 0.07556944274902344, 0.07659001922607422, 0.0761846694946289, 0.0769747543334961, 0.07654601287841797, 0.07912588500976563, 0.07810671997070312, 0.07682530975341798, 0.07610345458984374, 0.07667513275146484, 0.07550982666015625, 0.07623465728759765, 0.07555894470214844, 0.07674886322021485, 0.07640467071533204, 0.0771747817993164, 0.07697593688964843, 0.07738972473144531, 0.07740988922119141, 0.07809299468994141, 0.07789129638671875, 0.07661795043945313, 0.07601155090332032, 0.07552413177490234, 0.07587593841552734, 0.07661727905273437, 0.07608191680908204, 0.07680198669433594, 0.07692822265625, 0.07740316772460938, 0.07797305297851563, 0.07749667358398438, 0.07729766082763671, 0.07724435424804688, 0.076525634765625, 0.07586515045166016, 0.0770664291381836, 0.07612083435058593, 0.07593555450439453, 0.07678329467773437, 0.07749417877197265, 0.07774002838134765, 0.0777053451538086, 0.0774755859375, 0.07748271942138672, 0.07734272003173828, 0.07737465667724609, 0.0768023681640625, 0.07684966278076172, 0.07625727844238281, 0.07660902404785157, 0.07676313781738281, 0.07693730926513671, 0.07774684906005859, 0.07745216369628906, 0.07694630432128906, 0.08548390197753906, 0.07664169311523437, 0.07563868713378906, 0.07605062103271484, 0.07531388854980468, 0.07662521362304688, 0.07590294647216797, 0.07552639770507813, 0.07660582733154297, 0.07619366455078125, 0.07718809509277344, 0.07715971374511718, 0.0790893783569336, 0.07847526550292969, 0.07715760040283202, 0.07652022552490234, 0.07610697937011719, 0.07661238098144531, 0.07645193481445313, 0.07637776184082032, 0.07614495849609375, 0.07663404846191406, 0.07669554901123046, 0.07718643188476562, 0.0785127716064453, 0.07864022064208985, 0.07786383819580078, 0.07732742309570312, 0.07678034973144532, 0.0765564193725586, 0.07653510284423828, 0.07608201599121094, 0.07677426910400391, 0.07662076568603515, 0.07673241424560547, 0.07704595184326171, 0.0775513916015625, 0.07801449584960937, 0.0779384994506836, 0.07850943756103515, 0.07740732574462891, 0.07692041778564453, 0.07699225616455078, 0.0762003173828125, 0.07652873229980468, 0.07660944366455077, 0.07762226867675781, 0.07657676696777344, 0.07716038513183594, 0.07763974761962891, 0.07752089691162109, 0.07750653076171875, 0.07815907287597657, 0.0772228775024414, 0.07679984283447265, 0.07733042907714843, 0.07664972686767578, 0.07640755462646484, 0.07796332550048828, 0.07670582580566407, 0.07716035461425781, 0.07803903961181641, 0.07755081939697266, 0.0862674560546875, 0.07657107543945313, 0.0759617919921875, 0.07598137664794923, 0.07638336181640625, 0.07556582641601563, 0.0774898223876953, 0.07641855621337891, 0.07640483093261718, 0.07703437042236329, 0.07648461151123047, 0.07792435455322265, 0.07966719818115234, 0.0784691162109375, 0.0768770523071289, 0.07665670776367188, 0.076468994140625, 0.07687366485595704, 0.0771809310913086, 0.07616732788085938, 0.07634044647216796, 0.07670028686523438, 0.07715225219726562, 0.07764717102050782, 0.0779701156616211, 0.07858579254150391, 0.07720556640625, 0.07723020935058594, 0.07682755279541016, 0.0772290267944336, 0.07663728332519532, 0.07638028717041015, 0.07591929626464844, 0.07677155303955079, 0.0770382080078125, 0.07773184204101563, 0.07728742218017579, 0.07797513580322266, 0.0776904296875, 0.07735750579833985, 0.0779677734375, 0.07667890930175782, 0.0776789779663086, 0.07674992370605468, 0.0763104019165039, 0.07666985321044922, 0.07655033874511719, 0.07787299346923827, 0.07814959716796875, 0.07747586822509765, 0.07741359710693359, 0.07743154907226563, 0.07784870147705078, 0.0772339859008789, 0.07670716857910156, 0.07750115203857422, 0.07642518615722656, 0.07587664031982422, 0.07692217254638672, 0.07661154937744141, 0.07819926452636719, 0.07697535705566406, 0.07738803100585938, 0.08560639953613282, 0.076218017578125, 0.07582550048828125, 0.07534796905517578, 0.07646793365478516, 0.07603036499023437, 0.07595142364501953, 0.07638483428955078, 0.07676044464111328, 0.07696857452392578, 0.07641881561279297, 0.07744898986816406, 0.07953398132324219, 0.07836319732666015, 0.07668511962890626, 0.07627548980712891, 0.0759804458618164, 0.07712044525146484, 0.07559078216552734, 0.07601222229003907, 0.07605471801757813, 0.07659910583496093, 0.07653171539306641, 0.07685497283935547, 0.07787142181396485, 0.07806566619873047, 0.07746669006347656, 0.077412353515625, 0.07694601440429688, 0.07651363372802734, 0.07600099182128907, 0.0759810562133789, 0.07624854278564454, 0.077042236328125, 0.07638985443115234, 0.07690857696533203, 0.07782185363769531, 0.07741862487792969, 0.07750294494628907, 0.0780800018310547, 0.07719267272949219, 0.07651948547363281, 0.07606729888916015, 0.07673052978515625, 0.07658889770507812, 0.07689625549316406, 0.07656201934814454, 0.07763536071777344, 0.0773076171875, 0.07780172729492188, 0.07724864196777344, 0.07845321655273438, 0.07721683502197266, 0.07714867401123048, 0.07639910125732421, 0.07708262634277344, 0.07632640075683594, 0.0779118423461914, 0.07683340454101563, 0.07658914947509765, 0.07738697814941406, 0.07757084655761719, 0.076980224609375, 0.0855920639038086, 0.07674873352050782, 0.0758924789428711, 0.07591097259521484, 0.07614514923095703, 0.07616070556640625, 0.0762965087890625, 0.07560806274414063, 0.07678975677490234, 0.07655219268798828, 0.07678739166259765, 0.07775379180908203, 0.07941824340820312, 0.07815158081054688, 0.0771421127319336, 0.07601356506347656, 0.07670921325683594, 0.07623894500732421, 0.07614838409423828, 0.07623977661132812, 0.07612774658203125, 0.07723468780517578, 0.07745148468017578, 0.07688579559326172, 0.07810253143310547, 0.07797756958007812, 0.07766649627685547, 0.07777468872070313, 0.07767040252685548, 0.07693500518798828, 0.0762550048828125, 0.07585395050048828, 0.07605865478515625, 0.07654627227783203, 0.07731008148193359, 0.076391357421875, 0.07744403076171875, 0.07725225830078125, 0.07787554931640625, 0.07786870574951171, 0.07722003173828125, 0.07776041412353515, 0.07612646484375, 0.07605558776855469, 0.07656752014160156, 0.0762061767578125, 0.07775145721435547, 0.07619491577148438, 0.07678141021728516, 0.07748793792724609, 0.07708796691894532, 0.07770806121826172, 0.07826566314697266, 0.07760761260986328, 0.07625225830078125, 0.07648921966552734, 0.07668287658691406, 0.07635456085205078, 0.07759152221679687, 0.07654073333740234, 0.07704166412353515, 0.07749632263183594, 0.07748607635498046, 0.08561049652099609, 0.07618560028076173, 0.0758656005859375, 0.07577257537841797, 0.0765417938232422, 0.07603404998779296, 0.07601971435546875, 0.07655785369873047, 0.07610022735595703, 0.07659913635253907, 0.07691468811035156, 0.07753727722167969, 0.07913597106933594, 0.07820358276367187, 0.07711344146728516, 0.07625945281982421, 0.07651110076904297, 0.07657695770263671, 0.07590812683105469, 0.07641295623779297, 0.07636045074462891, 0.07623680114746094, 0.07789878082275391, 0.07673545837402344, 0.07797126770019532, 0.0777927017211914, 0.07776700592041015, 0.077609375, 0.07684607696533204, 0.07645286560058594, 0.07601939392089843, 0.07600505828857422, 0.07707849884033204, 0.0765440673828125, 0.07668617248535156, 0.07698611450195313, 0.07738755035400391, 0.07821542358398438, 0.07747782135009766, 0.07735504150390625, 0.07771135711669921, 0.0767979507446289, 0.07628390502929687, 0.075955810546875, 0.07678607940673828, 0.0774103012084961, 0.07654319763183594, 0.076329345703125, 0.07696221160888672, 0.07776255798339844, 0.07765776062011719, 0.07864755249023438, 0.0781304931640625, 0.076923583984375, 0.07632672119140625, 0.07618950653076172, 0.07685196685791015, 0.07731552124023437, 0.07665471649169922, 0.07673993682861328, 0.07700972747802734, 0.07760076904296875, 0.07745072174072265]",tokens/s,12.996324862159385,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,820.822016,4724.752384,0.0,4322.230272,4218.036736,s,1,13.7112841796875,13.7112841796875,0.0,13.7112841796875,13.7112841796875,13.7112841796875,13.7112841796875,[13.7112841796875],,kWh,0.00019343141136666115,2.1329793376566716e-05,6.258477229000026e-05,0.0002773459770332281,,MB,1318.883328,5379.063808,0.0,4963.958784,4656.747008,s,10,9.551892517089845,0.9551892517089844,0.004095110668673521,0.9564719543457032,0.9591959655761719,0.9593675445556641,0.9595048077392578,"[0.945282470703125, 0.9520821533203125, 0.9532904052734374, 0.9538060302734375, 0.9556437377929687, 0.9574463500976562, 0.95834423828125, 0.9591578369140625, 0.9573001708984376, 0.9595391235351562]",tokens/s,268.0097159196207,kWh,2.7856372544320865e-05,3.0712006519297875e-06,1.8455469309817572e-05,4.938304250606822e-05,tokens/kWh,5183965.730109531,MB,1343.483904,5379.063808,0.0,4963.958784,4656.749568,s,10,44.65644189453125,4.4656441894531245,0.003608832592192586,4.465925537109375,4.46994775390625,4.47027734375,4.470541015625,"[4.46987451171875, 4.46146044921875, 4.4604130859375, 4.4674560546875, 4.46290625, 4.46518408203125, 4.46202294921875, 4.47060693359375, 4.4698505859375, 4.4666669921875]",tokens/s,14.107707046788956,kWh,0.00013060428075651007,1.4407482149881653e-05,8.646464240398177e-05,0.00023147640531037347,tokens/kWh,272165.9683436284,,s,630,44.65418838500984,0.0708796641031901,0.0014379925343906677,0.07064944076538085,0.0709762710571289,0.07137885856628418,0.08159596786499024,"[0.08166006469726562, 0.07210208129882813, 0.07136038208007812, 0.07098941040039063, 0.07040946960449218, 0.07041081237792969, 0.07045362854003906, 0.07037129974365235, 0.07042665863037109, 0.07042457580566407, 0.07042253112792969, 0.07046963500976562, 0.07038905334472656, 0.07050012969970704, 0.07059958648681641, 0.07057603454589843, 0.07337993621826172, 0.07046364593505859, 0.07060380554199219, 0.07060540771484375, 0.07060502624511719, 0.07064701080322265, 0.07059945678710937, 0.07057929229736327, 0.07066307067871094, 0.07061299133300782, 0.07064089965820312, 0.07067520141601563, 0.07061094665527344, 0.07063756561279297, 0.07069696044921875, 0.07064985656738282, 0.07069446563720704, 0.07071174621582031, 0.07066828918457031, 0.07069286346435547, 0.07068463897705078, 0.07066230773925782, 0.07069599914550781, 0.07074294281005859, 0.0707270736694336, 0.07068863677978515, 0.07078361511230469, 0.07096710205078124, 0.07069100952148437, 0.07074201965332032, 0.07074972534179688, 0.07068515014648437, 0.07076249694824219, 0.07094477081298828, 0.07109798431396484, 0.07084435272216796, 0.07095299530029296, 0.07086294555664062, 0.07072946929931641, 0.07075798034667968, 0.07134019470214843, 0.07129990386962891, 0.07091152191162109, 0.07087152099609376, 0.07082803344726563, 0.07075984191894531, 0.0708655014038086, 0.08091065979003906, 0.07196217346191407, 0.07137875366210937, 0.07098332977294922, 0.0704886703491211, 0.07038604736328125, 0.07038566589355469, 0.07039904022216797, 0.07039241790771485, 0.07048636627197266, 0.07041161346435547, 0.0704989776611328, 0.07044915008544922, 0.07039382171630859, 0.07049152374267578, 0.07069513702392578, 0.07052742767333985, 0.0705638427734375, 0.07051055908203124, 0.07045942687988281, 0.07048172760009766, 0.07047129821777344, 0.07052140808105468, 0.07048601531982422, 0.07053311920166015, 0.07061638641357422, 0.07052751922607423, 0.07049846649169922, 0.07056998443603515, 0.07067852783203125, 0.07056179046630859, 0.0707092514038086, 0.07055155181884766, 0.07059561920166016, 0.0706033935546875, 0.07053961944580078, 0.0706170883178711, 0.07062528228759765, 0.07060031890869141, 0.07067446136474609, 0.07063382720947266, 0.07061254119873046, 0.07069331359863282, 0.07075635528564453, 0.07067033386230469, 0.0709265594482422, 0.07083599853515625, 0.07063961791992188, 0.07072156524658203, 0.07073497772216797, 0.07066223907470703, 0.07072774505615234, 0.07077305603027344, 0.07072335815429688, 0.070738525390625, 0.07075215911865235, 0.07072982025146485, 0.07073792266845703, 0.07082189178466797, 0.07074329376220703, 0.07074892425537109, 0.07082189178466797, 0.07079869079589844, 0.08251398468017578, 0.07183331298828124, 0.07121238708496094, 0.07086975860595703, 0.07050028991699218, 0.07035295867919922, 0.07040144348144531, 0.07039260864257812, 0.0703664321899414, 0.0704467544555664, 0.07033344268798829, 0.07040812683105468, 0.07044096374511719, 0.0704567642211914, 0.07057872009277344, 0.0704082260131836, 0.07041555023193359, 0.07049504089355468, 0.07044716644287109, 0.07052684783935546, 0.07048512268066406, 0.07047468566894531, 0.07053472137451172, 0.07047417449951172, 0.07049420928955077, 0.07064899444580078, 0.0705321273803711, 0.0705720672607422, 0.07056950378417969, 0.07055123138427734, 0.07053574371337891, 0.0704840316772461, 0.07054534149169922, 0.07057817840576172, 0.07058636474609375, 0.0705802230834961, 0.07059833526611328, 0.07055721282958985, 0.07060095977783203, 0.0706904296875, 0.07059548950195313, 0.07061504364013672, 0.07060431671142578, 0.07060940551757812, 0.07068425750732422, 0.07063980865478516, 0.07059878540039062, 0.07070521545410156, 0.07064575958251954, 0.07063478088378906, 0.07078713226318359, 0.07069264221191406, 0.07065910339355469, 0.07076802825927735, 0.07069750213623047, 0.07063081359863281, 0.07070361328125, 0.07079964447021485, 0.07068617248535156, 0.07073612976074219, 0.07071949005126953, 0.07070105743408203, 0.07078428649902344, 0.08084051513671875, 0.07203794860839843, 0.07138505554199219, 0.07116867065429687, 0.07050959777832032, 0.07044124603271484, 0.07039276885986329, 0.07044662475585937, 0.07043094635009765, 0.07046144104003907, 0.07055955505371093, 0.07049356842041016, 0.07045011138916016, 0.0705532455444336, 0.07042889404296875, 0.07050444793701172, 0.07042816162109375, 0.07044563293457032, 0.07240902709960938, 0.07055769348144532, 0.0709959716796875, 0.07046963500976562, 0.070540771484375, 0.07053775787353515, 0.07062560272216797, 0.07057158660888672, 0.07095308685302734, 0.07099167633056641, 0.0705948486328125, 0.07056169891357422, 0.07054080200195313, 0.07071552276611329, 0.07057753753662109, 0.07082905578613281, 0.07079730987548828, 0.07073753356933593, 0.07070515441894532, 0.07076287841796874, 0.07064966583251953, 0.0706500473022461, 0.07068595123291016, 0.07069977569580078, 0.070659423828125, 0.07076902770996094, 0.07075663757324219, 0.07071453094482422, 0.07073878479003906, 0.0709728012084961, 0.07074224090576171, 0.070766845703125, 0.07083971405029296, 0.07080012512207032, 0.07082012939453125, 0.07082978820800781, 0.07082915496826171, 0.07080028533935546, 0.07089676666259766, 0.07094137573242187, 0.0708446044921875, 0.07083606719970703, 0.07080976104736328, 0.07085465240478515, 0.07085059356689453, 0.08143904113769532, 0.07216505432128906, 0.07143132781982423, 0.07099887847900391, 0.07041741180419922, 0.07041331481933594, 0.07044681549072265, 0.07051277160644531, 0.070459228515625, 0.07046995544433594, 0.0705222396850586, 0.07039568328857422, 0.07045616149902344, 0.07047142028808594, 0.07046947479248047, 0.07048847961425782, 0.07044608306884766, 0.07052706909179687, 0.0704889907836914, 0.07044915008544922, 0.07071497344970704, 0.07063113403320312, 0.07053097534179688, 0.07064656066894531, 0.07052902221679687, 0.07050434875488282, 0.07067350769042968, 0.070614013671875, 0.07058841705322266, 0.0706170883178711, 0.07050220489501953, 0.07051897430419922, 0.07057820892333984, 0.07050179290771484, 0.07055363464355469, 0.07060342407226562, 0.07054249572753907, 0.07065865325927734, 0.07067865753173828, 0.07070105743408203, 0.07069001770019531, 0.07061910247802734, 0.07060505676269531, 0.07066886138916016, 0.07062332916259766, 0.07062108612060547, 0.07071743774414062, 0.07066828918457031, 0.07066352081298828, 0.07072016143798829, 0.07072930908203125, 0.07079078674316407, 0.0714043197631836, 0.07088278198242187, 0.07070598602294922, 0.07068438720703125, 0.07078707122802734, 0.07093036651611329, 0.070697021484375, 0.070793212890625, 0.07079955291748047, 0.0707204818725586, 0.07081196594238282, 0.08239727783203125, 0.07186886596679687, 0.07133184051513672, 0.07094477081298828, 0.07047142028808594, 0.0705006103515625, 0.07040930938720703, 0.07047206115722657, 0.07052889251708984, 0.07043753814697265, 0.07045938873291016, 0.07050649261474609, 0.07045938873291016, 0.07050198364257812, 0.07040796661376954, 0.07067302703857421, 0.07048806762695313, 0.070508544921875, 0.07054547119140625, 0.07069894409179688, 0.07051862335205078, 0.07071715545654297, 0.07062777709960938, 0.07056998443603515, 0.07059251403808593, 0.07051443481445313, 0.07057193756103515, 0.0705519027709961, 0.07051078033447265, 0.07064995574951172, 0.07057174682617187, 0.07055888366699219, 0.07057698822021484, 0.07060684967041016, 0.07052003479003906, 0.07061507415771484, 0.07067520141601563, 0.07059366607666015, 0.0706343002319336, 0.07065129852294921, 0.07062799835205077, 0.07060415649414062, 0.07062751770019532, 0.07058268737792969, 0.07068447875976562, 0.07068022155761719, 0.07079503631591796, 0.07076553344726562, 0.07069407653808593, 0.07062569427490234, 0.07069718170166016, 0.07070956420898437, 0.07065913391113281, 0.0707303695678711, 0.07073935699462891, 0.07063753509521484, 0.0707651824951172, 0.07074713897705077, 0.07069149017333984, 0.07075670623779297, 0.0719974365234375, 0.07155241394042969, 0.07115779113769531, 0.08216162872314453, 0.07173065948486328, 0.07124639892578125, 0.07085420989990235, 0.07038329315185547, 0.07040076446533203, 0.0703672332763672, 0.07049215698242188, 0.07036669158935546, 0.07041897583007813, 0.07037133026123046, 0.07037942504882813, 0.07051036834716796, 0.07051222229003906, 0.07056575775146484, 0.07054608154296875, 0.07045475006103516, 0.07066492462158203, 0.07046553802490234, 0.07051264190673828, 0.07073382568359375, 0.07050035095214843, 0.07045033264160157, 0.07052886199951172, 0.07046774291992187, 0.07046854400634765, 0.07063340759277344, 0.07050764465332031, 0.07061363220214843, 0.07057785797119141, 0.07055343627929687, 0.07063648223876953, 0.07057587432861329, 0.0705752944946289, 0.07061587524414062, 0.07058179473876953, 0.07055721282958985, 0.07057027435302735, 0.07051741027832031, 0.07059407806396484, 0.07061574554443359, 0.07060265350341798, 0.07064883422851563, 0.07069376373291016, 0.07066006469726563, 0.07082768249511719, 0.07071372985839844, 0.07062454223632812, 0.07067311859130859, 0.07073792266845703, 0.07058432006835938, 0.07070681762695312, 0.07070758056640625, 0.07065760040283203, 0.0706923828125, 0.07074294281005859, 0.07109980773925781, 0.07093280029296875, 0.07083650970458985, 0.07088333129882812, 0.07076044464111328, 0.07084236907958984, 0.07087500762939453, 0.08308207702636719, 0.07188422393798828, 0.0713076171875, 0.0710494384765625, 0.07051273345947266, 0.07051776123046875, 0.07054585266113281, 0.07223321533203125, 0.0706196517944336, 0.07072329711914062, 0.07068262481689454, 0.07066349029541015, 0.07058911895751953, 0.07063346862792969, 0.07059772491455078, 0.07056489562988282, 0.07063273620605469, 0.07056854248046875, 0.07048303985595702, 0.07098368072509766, 0.07105213165283203, 0.07085676574707031, 0.07073750305175781, 0.07092912292480469, 0.0706654052734375, 0.0705699234008789, 0.070582275390625, 0.07056851196289063, 0.07055260467529297, 0.07057270050048828, 0.07047148895263672, 0.0705643539428711, 0.07057769775390625, 0.07059311676025391, 0.07066342163085937, 0.07066998291015625, 0.07067747497558594, 0.0707863006591797, 0.070640380859375, 0.07065395355224609, 0.07071260833740234, 0.07066902160644531, 0.07061504364013672, 0.0707215347290039, 0.07067750549316407, 0.07065087890625, 0.07073494720458984, 0.07072681427001953, 0.07071842956542969, 0.07081244659423828, 0.07072767639160156, 0.07073081970214844, 0.0708000030517578, 0.07086249542236328, 0.07087580871582032, 0.07083132934570313, 0.070868896484375, 0.07083302307128907, 0.07082803344726563, 0.07091139221191406, 0.07099635314941406, 0.07091439819335937, 0.07087324523925781, 0.0825791015625, 0.07192617797851562, 0.07128268432617188, 0.07113081359863281, 0.07053958129882812, 0.07049215698242188, 0.07060409545898437, 0.07052358245849609, 0.07056732940673828, 0.07053167724609374, 0.07050627136230468, 0.07053977966308594, 0.07049129486083984, 0.07051289367675781, 0.07129325103759766, 0.0711251220703125, 0.07069821166992188, 0.0707896957397461, 0.0706888656616211, 0.07054950714111329, 0.0705022430419922, 0.07058803558349609, 0.07057667541503906, 0.0705249252319336, 0.07056703948974609, 0.07043571472167968, 0.07053517150878906, 0.07053107452392578, 0.07049785614013672, 0.07053766632080079, 0.07049769592285156, 0.07059629058837891, 0.07061110687255859, 0.07160489654541016, 0.07120700836181641, 0.07097122955322266, 0.07066150665283204, 0.07059232330322265, 0.07057833862304687, 0.07064383697509766, 0.0705788803100586, 0.07059417724609375, 0.07062361907958985, 0.07054124450683594, 0.07086700439453125, 0.07073401641845703, 0.07069789123535156, 0.07064848327636719, 0.07066809844970703, 0.07061052703857422, 0.07072633361816406, 0.07068057250976563, 0.07065945434570313, 0.07066000366210938, 0.0720302734375, 0.07164765167236328, 0.0713015365600586, 0.07073734283447265, 0.07073673248291015, 0.07079702758789062, 0.07086386871337891, 0.07077171325683594, 0.07083417510986328, 0.08175574493408203, 0.07180560302734375, 0.07129532623291016, 0.07101427459716797, 0.07044710540771484, 0.07036313629150391, 0.07046112060546875, 0.0704229736328125, 0.07046870422363281, 0.07049456024169921, 0.07044960021972656, 0.07053107452392578, 0.07048831939697266, 0.07048166656494141, 0.07045935821533203, 0.07042880249023438, 0.07052877044677734, 0.07065187072753906, 0.07051283264160156, 0.07057599639892578, 0.07057625579833984, 0.07056764984130859, 0.07061539459228515, 0.07055353546142579, 0.0705433578491211, 0.07066015625, 0.07065184020996093, 0.07062528228759765, 0.07064921569824219, 0.07067507171630859, 0.0706659164428711, 0.07071775817871094, 0.07065805053710937, 0.07077833557128907, 0.07072313690185547, 0.07065702056884765, 0.07060431671142578, 0.07071501159667969, 0.07063145446777344, 0.07068547058105469, 0.07076815795898438, 0.07076230621337891, 0.0707508773803711, 0.07081929779052734, 0.07074432373046875, 0.07067881774902343, 0.07084207916259766, 0.070814208984375, 0.07077251434326172, 0.07078211212158203, 0.07078364562988282, 0.07081798553466796, 0.07085823822021485, 0.07092684936523437, 0.07084031677246094, 0.07126630401611328, 0.07137894439697266, 0.07097548675537109, 0.07098969268798828, 0.0708322525024414, 0.07079936218261719, 0.07088278198242187, 0.07076713562011719]",tokens/s,14.10841900356851,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,2133.262336,4618.911744,0.0,4223.664128,4030.321664,s,1,12.9877333984375,12.9877333984375,0.0,12.9877333984375,12.9877333984375,12.9877333984375,12.9877333984375,[12.9877333984375],,kWh,0.00016406305547914143,1.8089961443284726e-05,6.06139373799941e-05,0.00024276695430242023,,MB,1618.501632,5021.564928,0.0,4613.7344,4385.21344,s,10,4.788660247802735,0.4788660247802735,0.0014680126505054807,0.47900039672851563,0.4798681488037109,0.4809925308227539,0.4818920364379883,"[0.48211691284179686, 0.47681765747070315, 0.47686187744140623, 0.4785756530761719, 0.47771749877929687, 0.4790259094238281, 0.4793768615722656, 0.47957470703125, 0.4796182861328125, 0.47897488403320315]",tokens/s,534.5962894683643,kWh,1.4391370430555777e-05,1.5864802138101464e-06,9.506211308665405e-06,2.5484061953031327e-05,tokens/kWh,10045494.335707689,MB,1622.540288,5023.66208,0.0,4615.831552,4385.216,s,10,42.01978515625,4.201978515625,0.015899671911357487,4.197839599609375,4.223855419921875,4.224817602539063,4.225587348632813,"[4.17725537109375, 4.2219921875, 4.19841455078125, 4.18525537109375, 4.20359521484375, 4.1972646484375, 4.19577880859375, 4.22577978515625, 4.1908076171875, 4.2236416015625]",tokens/s,14.992937199877476,kWh,0.00012341991380652849,1.3614399468116394e-05,7.641430187213566e-05,0.00021344861514678058,tokens/kWh,295153.00418640464,,s,630,42.01672320556641,0.066693211437407,0.0008805868155106545,0.06655326461791992,0.0671962188720703,0.06759352226257324,0.07020866516113282,"[0.06733004760742188, 0.06611942291259766, 0.0661690902709961, 0.0663934097290039, 0.0663410873413086, 0.06653343963623047, 0.06625958251953125, 0.06640412902832031, 0.06625484466552735, 0.06584832000732421, 0.06630409240722657, 0.06601119995117187, 0.06608367919921875, 0.06639974212646485, 0.06603228759765625, 0.065834716796875, 0.06608089447021484, 0.06592511749267578, 0.06672383880615235, 0.06621091461181641, 0.06607929229736328, 0.06596012878417969, 0.06619888305664062, 0.0659648666381836, 0.06614755249023438, 0.06618192291259765, 0.06619545745849609, 0.0663796157836914, 0.06706393432617187, 0.06595587158203126, 0.0661277084350586, 0.06624066925048828, 0.06604560089111328, 0.0663280029296875, 0.0664073257446289, 0.06620569610595703, 0.0660664291381836, 0.06615039825439453, 0.065775390625, 0.06687766265869141, 0.06601455688476562, 0.06581878662109375, 0.06645171356201172, 0.0665459213256836, 0.06593360137939454, 0.06618911743164063, 0.06636124420166016, 0.06616063690185547, 0.06656368255615235, 0.06636176300048828, 0.06643465423583984, 0.06622249603271485, 0.06918348693847656, 0.06618521881103516, 0.0662194595336914, 0.06625321960449218, 0.06620700836181641, 0.06630668640136719, 0.06695756530761719, 0.06610944366455078, 0.0663552017211914, 0.06627875518798829, 0.06621849822998047, 0.06753257751464843, 0.06839542388916016, 0.06723123168945312, 0.0672542724609375, 0.07038169860839844, 0.06735715484619141, 0.06663954925537109, 0.06627350616455079, 0.06669014739990234, 0.06631053161621094, 0.06696355438232422, 0.06654966735839844, 0.06651302337646485, 0.0664970245361328, 0.0768532485961914, 0.06685078430175781, 0.06645558166503907, 0.06687744140625, 0.0668958740234375, 0.06624578857421876, 0.06671587371826172, 0.06711974334716797, 0.06640249633789062, 0.0668424301147461, 0.0667026596069336, 0.06663238525390625, 0.06668841552734375, 0.06676105499267578, 0.06669471740722656, 0.06637987518310547, 0.0668157730102539, 0.066787841796875, 0.06673235321044922, 0.0675041275024414, 0.0664430694580078, 0.06668860626220703, 0.06672239685058594, 0.06642588806152344, 0.06692758178710938, 0.0668569564819336, 0.06670134735107422, 0.06716851043701172, 0.06685052490234375, 0.06660707092285156, 0.06681398773193359, 0.06736640167236328, 0.06654412841796875, 0.06646927642822266, 0.06733001708984375, 0.06653151702880859, 0.0668635482788086, 0.06682828521728515, 0.0672314224243164, 0.06733971405029297, 0.06665510559082032, 0.06655101013183594, 0.06662252807617187, 0.0667174072265625, 0.06668643188476563, 0.06645609283447265, 0.06638082885742187, 0.06667385864257812, 0.06667801666259765, 0.06722560119628906, 0.06661865234375, 0.06682915496826172, 0.06643891143798829, 0.0664659194946289, 0.06668486022949219, 0.06678739166259766, 0.06655522918701172, 0.06686377716064452, 0.06635724639892578, 0.0663345947265625, 0.06628546905517578, 0.0662489242553711, 0.06715142059326172, 0.06626143646240235, 0.06667046356201171, 0.06646182250976562, 0.07020543670654297, 0.06638947296142578, 0.06610793304443359, 0.06632653045654296, 0.06658662414550781, 0.06656409454345703, 0.06705971527099609, 0.06667855834960937, 0.06655203247070313, 0.06631401824951172, 0.06661968231201172, 0.06661933135986328, 0.06635926055908203, 0.06634089660644531, 0.0665759048461914, 0.06662560272216797, 0.06685942077636718, 0.06672959899902343, 0.06644915008544922, 0.06636608123779297, 0.0664039077758789, 0.06704959869384766, 0.06648831939697265, 0.06688371276855469, 0.06626255798339843, 0.06608739471435547, 0.06725651550292969, 0.06648992156982422, 0.06633078765869141, 0.06660710144042968, 0.06652726745605468, 0.06621324920654297, 0.06659123229980468, 0.06628108978271484, 0.06635552215576172, 0.06612361907958984, 0.06667324829101562, 0.06641049957275391, 0.06652518463134766, 0.06631014251708985, 0.06643411254882813, 0.06617798614501953, 0.06619292449951172, 0.06615001678466798, 0.06606114959716797, 0.07067135620117188, 0.06689826965332031, 0.06632582092285157, 0.06675481414794922, 0.06619967651367188, 0.06609747314453125, 0.06597427368164062, 0.06591283416748046, 0.06622322845458985, 0.0662938232421875, 0.06596281433105469, 0.06640435028076172, 0.06732511901855469, 0.06633350372314453, 0.06681517028808594, 0.06761759948730468, 0.06689791870117187, 0.06607667541503906, 0.06650675201416016, 0.06632569885253906, 0.06619782257080079, 0.06668544006347656, 0.06620703887939453, 0.06647599792480469, 0.06831382751464844, 0.06745702362060547, 0.0664801254272461, 0.06637347412109375, 0.06633618927001952, 0.0663436508178711, 0.06669312286376954, 0.06627327728271484, 0.06643004608154297, 0.0661817626953125, 0.06599324798583985, 0.06625401306152344, 0.06646841430664062, 0.06642278289794921, 0.06614351654052734, 0.06598729705810547, 0.0661294403076172, 0.06613785552978516, 0.06588470458984375, 0.06616697692871094, 0.0665716781616211, 0.0663288345336914, 0.06655625915527344, 0.06589234924316406, 0.0662282257080078, 0.06630963134765624, 0.06619171142578124, 0.06615261077880859, 0.0662507553100586, 0.06652252960205078, 0.06618086242675782, 0.06591983795166016, 0.06589234924316406, 0.06635475158691406, 0.06626143646240235, 0.06656803131103516, 0.06664790344238282, 0.06714604949951172, 0.06677932739257812, 0.06719602966308594, 0.06748585510253906, 0.06638480377197266, 0.06644217681884766, 0.06745292663574219, 0.06677053070068359, 0.06787318420410156, 0.06834598541259766, 0.06727458953857422, 0.06661634826660157, 0.06671052551269531, 0.06659481811523438, 0.0665128936767578, 0.06643302154541016, 0.06685430145263672, 0.06648226928710937, 0.06660704040527343, 0.06692256164550782, 0.06646015930175782, 0.06654064178466797, 0.06684496307373047, 0.06665660858154297, 0.06645283508300781, 0.06698079681396485, 0.06656819152832032, 0.06718195343017579, 0.06704144287109375, 0.06684239959716796, 0.0667286376953125, 0.06673817443847656, 0.0664815673828125, 0.06674082946777343, 0.06651904296875, 0.06661491394042969, 0.06694745635986328, 0.06670336151123046, 0.0669839324951172, 0.06656409454345703, 0.06662457275390625, 0.06651996612548829, 0.06616067504882812, 0.066733154296875, 0.06675267028808594, 0.06650752258300781, 0.06673612976074218, 0.06636339569091797, 0.06635724639892578, 0.06658787536621094, 0.06633757019042968, 0.06657843017578124, 0.06672179412841797, 0.06698150634765625, 0.06648051452636719, 0.06676403045654297, 0.06668326568603515, 0.066542236328125, 0.06637948608398438, 0.06630809783935547, 0.06655705261230468, 0.06663462066650391, 0.06669020843505859, 0.06637654113769531, 0.06723696136474609, 0.06631903839111328, 0.0671297607421875, 0.0664653091430664, 0.07095123291015625, 0.06671507263183593, 0.0661279067993164, 0.0661974105834961, 0.06616143798828125, 0.06615225219726563, 0.0663695068359375, 0.06632406616210937, 0.06655449676513672, 0.06595276641845703, 0.06652339172363281, 0.06598326110839844, 0.06603142547607421, 0.06605840301513671, 0.06616473388671874, 0.0662610855102539, 0.0662034912109375, 0.06640054321289063, 0.06589561462402344, 0.06790000152587891, 0.06614169311523438, 0.06722598266601562, 0.0677623062133789, 0.06659891510009766, 0.06705766296386718, 0.06639513397216797, 0.06652825927734375, 0.06670320129394532, 0.0667772445678711, 0.06937190246582031, 0.06704742431640626, 0.06931660461425782, 0.06665328216552735, 0.06643599700927734, 0.06610739135742187, 0.06614412689208984, 0.06666214752197265, 0.06638972473144532, 0.06672764587402344, 0.06641718292236329, 0.0666096954345703, 0.06619532775878906, 0.06649839782714843, 0.06624272155761719, 0.06661529541015625, 0.06683238220214843, 0.06639584350585938, 0.06629622650146484, 0.06624195098876953, 0.06627734375, 0.0665031967163086, 0.066393310546875, 0.06630271911621094, 0.06611766052246094, 0.06646073913574219, 0.06637593841552734, 0.06618182373046876, 0.06674636840820312, 0.06603705596923828, 0.06645830535888672, 0.06620518493652344, 0.06717404937744141, 0.06664473724365234, 0.06690144348144532, 0.06643910217285157, 0.06618978881835938, 0.06619766235351562, 0.06645760345458984, 0.06641254425048829, 0.0663115234375, 0.06660099029541015, 0.06643942260742187, 0.0666382064819336, 0.06642412567138672, 0.06675475311279297, 0.06643309020996094, 0.0702099838256836, 0.06666620635986328, 0.06649446105957031, 0.06631858825683594, 0.06656412506103515, 0.06646736145019531, 0.06672227478027344, 0.06646784210205078, 0.06632243347167968, 0.06645286560058594, 0.06648076629638672, 0.06665625762939453, 0.066510498046875, 0.06700681304931641, 0.06662348937988281, 0.06675027465820313, 0.06652301025390625, 0.06751683044433594, 0.06639606475830079, 0.06688358306884766, 0.066698974609375, 0.0666770248413086, 0.06654771423339843, 0.06644070434570312, 0.0664060821533203, 0.06660163116455078, 0.0664557113647461, 0.06661145782470704, 0.06657833862304688, 0.06656156921386719, 0.06673244476318359, 0.06662518310546875, 0.06668278503417968, 0.0662911376953125, 0.06693366241455079, 0.06683853149414062, 0.06632582092285157, 0.06616291046142578, 0.06614473724365234, 0.06632991790771485, 0.0661118392944336, 0.06598220825195313, 0.06634556579589844, 0.06643711853027344, 0.06629376220703125, 0.06631231689453125, 0.06658182525634766, 0.06671366119384765, 0.0670928955078125, 0.06659081268310547, 0.06877110290527344, 0.06694156646728516, 0.06655503845214844, 0.0667779541015625, 0.06686685180664062, 0.06693513488769531, 0.06683033752441406, 0.06678083038330078, 0.06665660858154297, 0.06674022674560547, 0.07725260925292969, 0.06658662414550781, 0.06842499542236329, 0.06648291015625, 0.06623353576660156, 0.06672035217285156, 0.06703231811523437, 0.06670025634765625, 0.06642221069335938, 0.06660550689697266, 0.06680178833007812, 0.06632185363769531, 0.06621855926513671, 0.06671132659912109, 0.06695343780517578, 0.06725993347167969, 0.06676441955566406, 0.06635395050048828, 0.06737686157226562, 0.06673772430419922, 0.06628797149658203, 0.07008419036865235, 0.0666509780883789, 0.06672930908203124, 0.0666751708984375, 0.06679798126220703, 0.06634268951416016, 0.06651074981689453, 0.06670140838623047, 0.06700614166259766, 0.06787923431396484, 0.06829222106933594, 0.06702092742919921, 0.06635340881347657, 0.06668287658691406, 0.0667658233642578, 0.06675353240966797, 0.0666767349243164, 0.06658428955078124, 0.06659001922607422, 0.06719792175292968, 0.06655999755859375, 0.06642195129394532, 0.06645843505859375, 0.07015328216552734, 0.06653794860839844, 0.06620207977294922, 0.06676882934570312, 0.06677510070800781, 0.0665921630859375, 0.06691913604736328, 0.06732316589355469, 0.06670006561279297, 0.06652127838134765, 0.06654137420654296, 0.06794854736328125, 0.06684671783447266, 0.06664601898193359, 0.06657164764404297, 0.06644322967529297, 0.06636752319335938, 0.06614080047607422, 0.06656966400146484, 0.06756409454345703, 0.06613766479492188, 0.06658092498779297, 0.06622000122070312, 0.06616886138916016, 0.06634825897216796, 0.06604029083251953, 0.06658617401123047, 0.06616483306884766, 0.06731843566894531, 0.06625193786621093, 0.06622073364257812, 0.0662734375, 0.06815277099609375, 0.06752108764648437, 0.06617407989501953, 0.06634284973144532, 0.06620256042480469, 0.06600704193115234, 0.06638579559326171, 0.06624422454833985, 0.06604032135009766, 0.06645942687988281, 0.066287841796875, 0.06603119659423828, 0.06630032348632812, 0.06629785919189453, 0.06577152252197266, 0.06593724822998047, 0.06586585235595703, 0.066070556640625, 0.06599622344970703, 0.06648786926269531, 0.06589542388916016, 0.06815455627441407, 0.06633760070800782, 0.0658545913696289, 0.06611443328857422, 0.06640748596191406, 0.06603052520751954, 0.0667290267944336, 0.06658544158935546, 0.06625846099853516, 0.06627708435058594, 0.06664482879638672, 0.06816553497314454, 0.06685084533691406, 0.06669292449951172, 0.06652339172363281, 0.06692848205566407, 0.06697795104980468, 0.06765436553955079, 0.06690995025634766, 0.06685337829589844, 0.06687923431396485, 0.06650777435302735, 0.06700685119628906, 0.06674495697021485, 0.06719078063964844, 0.06664601898193359, 0.06731314849853516, 0.06703689575195312, 0.06650704193115234, 0.06681622314453126, 0.06702518463134766, 0.06692179107666016, 0.06696006774902344, 0.06703529357910157, 0.06653683471679687, 0.0667959976196289, 0.06658866882324219, 0.0672911376953125, 0.06659846496582031, 0.07059625244140624, 0.06653564453125, 0.06681862640380859, 0.06678294372558594, 0.06655744171142577, 0.06686185455322266, 0.06687334442138672, 0.06668492889404297, 0.06662067413330078, 0.06710892486572266, 0.06662624359130859, 0.0667688980102539, 0.0666269760131836, 0.06691622161865235, 0.06674230194091797, 0.06709232330322265, 0.06657724761962891, 0.06631353759765625, 0.06729593658447265, 0.06652864074707031, 0.06990911865234375, 0.06687872314453125, 0.0665771484375, 0.06677417755126953, 0.0667957763671875, 0.066943359375, 0.06725865936279297, 0.06653228759765625, 0.0667658233642578, 0.06934323120117188, 0.06936528015136718, 0.06684127807617188, 0.06689469146728516, 0.06712592315673828, 0.06661286163330078, 0.06711363220214844, 0.066779296875, 0.066864990234375, 0.06685689544677734, 0.0671744613647461, 0.06716329956054687]",tokens/s,14.994029803745788,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,821.870592,2378.039296,0.0,1975.517184,1956.768256,s,1,10.5425478515625,10.5425478515625,0.0,10.5425478515625,10.5425478515625,10.5425478515625,10.5425478515625,[10.5425478515625],,kWh,9.614946311245754e-05,1.0598802311342106e-05,3.076085794204153e-05,0.00013750912336584118,,MB,1268.187136,2646.474752,0.0,2231.369728,2177.460736,s,10,3.4495434265136726,0.3449543426513672,0.0008405701697659104,0.3449681854248047,0.34586766967773436,0.34601856689453125,0.3461392846679688,"[0.3450155029296875, 0.3446260681152344, 0.3432297668457031, 0.34574462890625, 0.3453783874511719, 0.34616946411132815, 0.3440893249511719, 0.3449208679199219, 0.3445352783203125, 0.3458341369628906]",tokens/s,742.1271987253393,kWh,1.0292346079165651e-05,1.135062171429193e-06,6.804133795793511e-06,1.8231542046388354e-05,tokens/kWh,14041598.859198708,MB,1292.685312,2646.474752,0.0,2231.369728,2177.463296,s,10,31.360389648437497,3.13603896484375,0.00922202546279069,3.1356613769531254,3.1444459716796875,3.1516230102539065,3.1573646411132814,"[3.137915283203125, 3.14285107421875, 3.137009765625, 3.124395751953125, 3.13480810546875, 3.12678173828125, 3.130117431640625, 3.1365146484375, 3.158800048828125, 3.13119580078125]",tokens/s,20.089036107731815,kWh,9.076185464208052e-05,1.0011284422711524e-05,4.9244466598607555e-05,0.00015001760566339957,tokens/kWh,419950.7099277107,,s,630,31.35823757553099,0.049774980278620644,0.0007112168176420514,0.0495904483795166,0.05045927505493164,0.05097720680236816,0.05251960342407227,"[0.051141056060791015, 0.050249214172363284, 0.05001679992675781, 0.0497360954284668, 0.049584129333496096, 0.04947763061523437, 0.051547489166259765, 0.05148124694824219, 0.05009408187866211, 0.04976435089111328, 0.04995414352416992, 0.05007632064819336, 0.049713153839111325, 0.04960255813598633, 0.04955033493041992, 0.049593345642089844, 0.0497597770690918, 0.05005769729614258, 0.04987807846069336, 0.05055583953857422, 0.049686527252197264, 0.04952883148193359, 0.049450111389160153, 0.04945388793945313, 0.0493507194519043, 0.04973936080932617, 0.049611072540283206, 0.0495371208190918, 0.04947305679321289, 0.049656288146972656, 0.04982771301269531, 0.049778816223144534, 0.04955859375, 0.050364734649658204, 0.049629825592041016, 0.05027779388427735, 0.04973587036132812, 0.04997478485107422, 0.049945503234863284, 0.04996073532104492, 0.049729984283447264, 0.049332000732421874, 0.049366241455078126, 0.049482559204101564, 0.04984419250488281, 0.049453025817871095, 0.04945840072631836, 0.049468223571777346, 0.04938547134399414, 0.05031856155395508, 0.05039494323730469, 0.050353118896484375, 0.050108287811279295, 0.04976038360595703, 0.04974796676635742, 0.04945849609375, 0.049484481811523436, 0.04928934478759766, 0.04950815963745117, 0.04919507217407226, 0.04957183837890625, 0.049280094146728515, 0.049342369079589846, 0.05091923141479492, 0.0501739501953125, 0.05165875244140625, 0.049926143646240234, 0.049591903686523435, 0.04943731307983398, 0.04938115310668945, 0.04946873474121094, 0.05037126541137695, 0.04981145477294922, 0.0499918098449707, 0.04938764953613281, 0.04941593551635742, 0.049465343475341796, 0.049958240509033205, 0.05035689544677734, 0.050403583526611326, 0.050636543273925784, 0.05228675079345703, 0.05031190490722656, 0.049983486175537106, 0.0499218864440918, 0.05095030212402344, 0.05055897521972656, 0.05169561767578125, 0.04956774520874024, 0.049606143951416014, 0.04952524948120117, 0.04993228912353516, 0.04964556884765625, 0.049600513458251956, 0.04950377655029297, 0.049549793243408205, 0.0495101432800293, 0.05002633666992187, 0.04947795104980469, 0.05002969741821289, 0.049429153442382814, 0.04958006286621094, 0.04948563385009765, 0.049447391510009764, 0.04951372909545899, 0.04997196960449219, 0.049802654266357424, 0.04975414276123047, 0.049377857208251955, 0.04962822341918945, 0.0494192008972168, 0.050329345703125, 0.04953107070922851, 0.049682750701904296, 0.04949708938598633, 0.04939238357543945, 0.04940595245361328, 0.05010636901855469, 0.04951766586303711, 0.050214080810546874, 0.04967190551757812, 0.0495022087097168, 0.04946944046020508, 0.04961280059814453, 0.04950028610229492, 0.04975775909423828, 0.05127030563354492, 0.05529375839233398, 0.04996527862548828, 0.04975369644165039, 0.05030624008178711, 0.04966502380371094, 0.04961280059814453, 0.04959231948852539, 0.04950009536743164, 0.04951219177246094, 0.04975238418579102, 0.049432575225830076, 0.04966099166870117, 0.0498144645690918, 0.05016287994384765, 0.04978287887573242, 0.04987564849853516, 0.05013827133178711, 0.04951100921630859, 0.05004422378540039, 0.04988528060913086, 0.049406368255615236, 0.04937478256225586, 0.049826271057128904, 0.04993475341796875, 0.04944489669799805, 0.04950425720214844, 0.04953497695922852, 0.04936243057250977, 0.04931363296508789, 0.049272705078125, 0.049386272430419924, 0.050003105163574216, 0.05012070465087891, 0.04947840118408203, 0.049476734161376955, 0.04957692718505859, 0.0494202880859375, 0.04929087829589844, 0.04948358535766602, 0.0495928955078125, 0.04931516647338867, 0.04920790481567383, 0.05032966232299805, 0.05034726333618164, 0.049355518341064455, 0.04920284652709961, 0.04948409652709961, 0.04908854293823242, 0.04928921508789062, 0.049160224914550785, 0.0508702392578125, 0.04976230239868164, 0.04993228912353516, 0.04967862319946289, 0.04959203338623047, 0.050673694610595704, 0.04935084915161133, 0.04923270416259766, 0.05133369445800781, 0.0497279052734375, 0.049360897064208986, 0.049168384552001954, 0.05097062301635742, 0.050188190460205076, 0.050968990325927735, 0.04922793579101563, 0.04910079956054687, 0.04921052932739258, 0.04921615982055664, 0.04931103897094727, 0.04929977416992187, 0.049203071594238285, 0.04937942504882813, 0.049474143981933595, 0.04907123184204101, 0.04917695999145508, 0.04898896026611328, 0.049545246124267577, 0.04918854522705078, 0.04928716659545898, 0.053147647857666014, 0.049400863647460935, 0.04903756713867188, 0.04924694442749023, 0.04933599853515625, 0.04929087829589844, 0.049562015533447266, 0.04995920181274414, 0.05022662353515625, 0.05026259231567383, 0.05053235244750977, 0.05079443359375, 0.05094931030273438, 0.05023628616333008, 0.05054003143310547, 0.050369022369384765, 0.049731327056884767, 0.04980569458007812, 0.049268478393554686, 0.04911321640014649, 0.04917452621459961, 0.04899638366699219, 0.049802528381347654, 0.04972627258300781, 0.04954262542724609, 0.04918912124633789, 0.04907254409790039, 0.04906502532958985, 0.04915475082397461, 0.04923392105102539, 0.049076480865478514, 0.04944377517700195, 0.04945497512817383, 0.049363903045654293, 0.049192958831787106, 0.049212448120117186, 0.04923801422119141, 0.050024478912353516, 0.04912246322631836, 0.04925593566894531, 0.04948102569580078, 0.04916543960571289, 0.049108768463134764, 0.04919622421264649, 0.049267681121826175, 0.05126438522338867, 0.050358497619628906, 0.049552257537841794, 0.04962192153930664, 0.04943894577026367, 0.05024275207519531, 0.05015532684326172, 0.05060028839111328, 0.04998393630981445, 0.05064278411865234, 0.051095584869384765, 0.04964691162109375, 0.04968531036376953, 0.050358112335205076, 0.05017001724243164, 0.0502743034362793, 0.05039334487915039, 0.05020556640625, 0.04993727874755859, 0.04963705444335938, 0.049614334106445314, 0.0512786865234375, 0.04949398422241211, 0.04943667221069336, 0.04942598342895508, 0.04988880157470703, 0.049525665283203124, 0.04976844787597656, 0.04935270309448242, 0.04927897644042969, 0.049223678588867184, 0.049288352966308596, 0.049396575927734374, 0.04954652786254883, 0.04959408187866211, 0.04939878463745117, 0.0493359375, 0.04936947250366211, 0.0493507194519043, 0.04956668853759766, 0.049922847747802736, 0.04992124938964844, 0.04986294555664063, 0.049678207397460934, 0.04949440002441406, 0.049969600677490233, 0.04938089752197266, 0.04941267013549805, 0.04938441467285156, 0.04938028717041015, 0.0493383674621582, 0.04931379318237305, 0.049286209106445315, 0.04944732666015625, 0.04921744155883789, 0.04923027038574219, 0.04921366500854492, 0.04925436782836914, 0.049334270477294925, 0.05003833770751953, 0.050095840454101564, 0.05019311904907227, 0.0498073616027832, 0.0510294075012207, 0.04996771240234375, 0.04948364639282227, 0.04948508834838867, 0.049531009674072264, 0.050455265045166016, 0.04930915069580078, 0.04932182312011719, 0.049423038482666014, 0.049547264099121094, 0.04946739196777344, 0.049616897583007816, 0.04926889419555664, 0.04964281463623047, 0.049508895874023434, 0.04979916763305664, 0.04918476867675781, 0.04945900726318359, 0.0492259521484375, 0.049278465270996094, 0.04919311904907227, 0.04912774276733398, 0.0491212158203125, 0.049164352416992185, 0.04942544174194336, 0.04909507369995117, 0.04897158432006836, 0.04909856033325195, 0.04898441696166992, 0.04907049560546875, 0.04908051300048828, 0.0491069450378418, 0.04975331115722656, 0.04928371047973633, 0.049405632019042967, 0.04929584121704102, 0.049223678588867184, 0.049119232177734375, 0.049460865020751955, 0.049430912017822265, 0.04932988739013672, 0.049057857513427734, 0.049508575439453126, 0.05256192016601562, 0.05042099380493164, 0.04994294357299805, 0.04951894378662109, 0.049530879974365234, 0.04940166473388672, 0.04937276840209961, 0.04937788772583008, 0.05122662353515625, 0.0508487663269043, 0.04934963226318359, 0.04953628921508789, 0.04954800033569336, 0.05001619338989258, 0.049562816619873044, 0.04943264007568359, 0.05320377731323242, 0.049942527770996094, 0.04981366348266601, 0.04963747024536133, 0.05100252914428711, 0.050073505401611325, 0.05023433685302734, 0.049989345550537106, 0.049845664978027344, 0.04977900695800781, 0.04995743942260742, 0.04964742279052734, 0.04976454544067383, 0.04974182510375977, 0.049642974853515626, 0.0495777587890625, 0.04958899307250977, 0.04971724700927734, 0.04969990539550781, 0.050996192932128905, 0.05015753555297851, 0.049995742797851565, 0.04972547149658203, 0.049777793884277347, 0.05001500701904297, 0.050587745666503904, 0.05041785430908203, 0.04961171340942383, 0.04991852951049805, 0.050663745880126954, 0.05062035369873047, 0.05095436859130859, 0.05028617477416992, 0.05012134552001953, 0.05007676696777344, 0.05030771255493164, 0.05016476821899414, 0.049731647491455075, 0.049156417846679686, 0.04961955261230469, 0.04943462371826172, 0.04955136108398438, 0.0492092170715332, 0.04900057601928711, 0.04899430465698242, 0.04894265747070312, 0.04921615982055664, 0.048920352935791014, 0.04916617584228516, 0.049171806335449215, 0.04935148620605469, 0.04930374526977539, 0.04966547012329101, 0.04937356948852539, 0.04909212875366211, 0.04915004730224609, 0.0489736328125, 0.04894367980957031, 0.04908022308349609, 0.0492108154296875, 0.04906665420532227, 0.04888886260986328, 0.049372127532958984, 0.0496226577758789, 0.04933465576171875, 0.04938956832885742, 0.04931379318237305, 0.051617568969726565, 0.053833473205566404, 0.05015964889526367, 0.0496748161315918, 0.05006687927246094, 0.05007212829589844, 0.049782367706298826, 0.04946268844604492, 0.049601535797119144, 0.04918067169189453, 0.05062451171875, 0.04952230453491211, 0.049516929626464846, 0.04921343994140625, 0.04937897491455078, 0.04936345672607422, 0.049595230102539065, 0.04931891250610351, 0.04913356781005859, 0.04978073501586914, 0.050024574279785156, 0.049271678924560545, 0.04912025451660156, 0.0491253776550293, 0.04961667251586914, 0.049434848785400394, 0.050205921173095705, 0.0491324462890625, 0.04954281616210938, 0.04894294357299805, 0.049025054931640624, 0.0494185905456543, 0.04923622512817383, 0.049519489288330075, 0.049879871368408206, 0.049207359313964846, 0.04945315170288086, 0.04918291091918945, 0.04916809463500976, 0.05353814315795898, 0.05182841491699219, 0.050135135650634766, 0.04925513458251953, 0.04938988876342774, 0.049366878509521483, 0.04956310272216797, 0.049371681213378905, 0.04931071853637695, 0.04914278411865235, 0.049253536224365235, 0.049054561614990236, 0.049325214385986325, 0.04929417419433594, 0.04959958267211914, 0.049445854187011716, 0.05510137557983399, 0.05001420974731445, 0.04955740737915039, 0.04931388854980469, 0.049409534454345705, 0.049265151977539064, 0.04958153533935547, 0.049770240783691404, 0.05177753448486328, 0.050759681701660155, 0.0502578239440918, 0.05013052749633789, 0.05016233444213867, 0.05045993423461914, 0.05045920181274414, 0.049919872283935546, 0.05009328079223633, 0.0501052474975586, 0.049880321502685544, 0.04976713562011719, 0.049934368133544925, 0.04976639938354492, 0.04956739044189453, 0.049825214385986326, 0.050076095581054685, 0.05016524887084961, 0.050156513214111326, 0.04995481491088867, 0.04995686340332031, 0.04987801742553711, 0.050127872467041014, 0.04981964874267578, 0.04974182510375977, 0.049858558654785154, 0.05024153518676758, 0.04961075210571289, 0.05000191879272461, 0.05042585754394531, 0.05040332794189453, 0.050409534454345706, 0.05066131210327148, 0.051525630950927735, 0.05221494293212891, 0.050632606506347655, 0.050834110260009766, 0.04971459197998047, 0.04975519943237305, 0.049659679412841794, 0.049637374877929685, 0.050826881408691404, 0.049709342956542966, 0.049875102996826175, 0.049584129333496096, 0.04984134292602539, 0.05052492904663086, 0.04970086288452148, 0.04970086288452148, 0.0500428466796875, 0.05097238540649414, 0.05022259140014648, 0.04983276748657227, 0.05006131362915039, 0.050138431549072264, 0.049987518310546875, 0.050219776153564454, 0.05027337646484375, 0.049938751220703126, 0.049818206787109375, 0.04962899017333984, 0.04966131210327149, 0.04969350433349609, 0.05131673431396484, 0.049947647094726565, 0.04952950286865234, 0.049236320495605466, 0.049565696716308595, 0.04935270309448242, 0.04943203353881836, 0.04943414306640625, 0.04934963226318359, 0.04951382446289063, 0.04917913436889648, 0.049428638458251954, 0.049280094146728515, 0.049830814361572266, 0.04920729446411133, 0.04911088180541992, 0.04923961639404297, 0.04931958389282227, 0.04922054290771485, 0.04926496124267578, 0.049159137725830075, 0.049261280059814457, 0.049541088104248045, 0.04992822265625, 0.04963708877563477, 0.049763935089111325, 0.04957868957519531, 0.04929536056518555, 0.04935190582275391, 0.04920809555053711, 0.04917852783203125, 0.04915430450439453, 0.04914588928222656, 0.05022236633300781, 0.04916175842285156, 0.04911587142944336, 0.04940214538574219, 0.05026406478881836, 0.05042585754394531, 0.05051955032348633, 0.050461185455322265, 0.050329601287841794, 0.05030428695678711, 0.05045135879516602, 0.0512011833190918, 0.05017433547973633, 0.05098115158081055, 0.04934860610961914, 0.04936316680908203, 0.04928092956542969, 0.05241600036621094, 0.049019264221191405, 0.049620990753173826, 0.048791553497314455, 0.048893856048583983, 0.048881759643554686, 0.048964897155761716, 0.04918102264404297, 0.05002687835693359, 0.050457984924316406, 0.050254463195800785, 0.05031481552124024, 0.051182014465332035]",tokens/s,20.090414790772307,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,822.706176,8535.277568,0.0,8132.755456,7824.681472,s,1,20.239404296875,20.239404296875,0.0,20.239404296875,20.239404296875,20.239404296875,20.239404296875,[20.239404296875],,kWh,0.00036547763667915656,4.030764831322505e-05,0.00011976426247800598,0.0005255495474703875,,MB,1328.869376,9539.813376,0.0,9124.708352,8500.500992,s,10,17.6496591796875,1.76496591796875,0.008069081486357212,1.7673174438476562,1.7723209228515626,1.772489013671875,1.772623486328125,"[1.744209716796875, 1.7600440673828126, 1.7606370849609374, 1.765523193359375, 1.7658818359375, 1.76890966796875, 1.7687530517578125, 1.7722835693359376, 1.7707598876953126, 1.7726571044921875]",tokens/s,145.0452937327103,kWh,5.137821300500036e-05,5.6666330271496925e-06,3.408911060460012e-05,9.113395663675017e-05,tokens/kWh,2809051.745886416,MB,1353.478144,9541.910528,0.0,9126.805504,8500.503552,s,10,83.1837822265625,8.318378222656248,0.012373395103269352,8.3210693359375,8.330133300781249,8.332666748046876,8.334693505859375,"[8.2913125, 8.3067744140625, 8.3097578125, 8.3178154296875, 8.31565234375, 8.3243232421875, 8.3268603515625, 8.326515625, 8.3352001953125, 8.3295703125]",tokens/s,7.573591668194508,kWh,0.0002432191061183312,2.682897776544594e-05,0.00016160215705939978,0.00043165024094317687,tokens/kWh,145951.49967330476,,s,630,83.1798198699952,0.13203146011110337,0.0017406141564366205,0.13201754760742188,0.13310989379882812,0.13347393035888672,0.1424360319519043,"[0.14634512329101562, 0.1319015350341797, 0.13055258178710938, 0.13018870544433594, 0.13025750732421876, 0.13028115844726562, 0.13024400329589844, 0.13161497497558594, 0.13127349853515624, 0.1313276824951172, 0.13164700317382813, 0.1303411865234375, 0.13074832153320312, 0.1320142059326172, 0.1305562286376953, 0.13141401672363281, 0.13149757385253907, 0.13101008605957032, 0.1318593292236328, 0.13044248962402344, 0.13030899047851563, 0.13035865783691405, 0.13139610290527343, 0.1319315185546875, 0.13085906982421874, 0.13104499816894533, 0.13041789245605467, 0.13075018310546874, 0.13220223999023437, 0.1311398468017578, 0.13205299377441407, 0.1311840362548828, 0.13224620056152345, 0.13096540832519532, 0.13056614685058593, 0.1319567413330078, 0.13134439086914063, 0.13174783325195313, 0.1315285186767578, 0.1306761016845703, 0.13065936279296875, 0.1323865966796875, 0.1317939910888672, 0.13201487731933595, 0.1306728057861328, 0.13135871887207032, 0.13213081359863282, 0.1307852783203125, 0.13065216064453125, 0.13210009765625, 0.13280665588378907, 0.13289846801757813, 0.1313734130859375, 0.13126451110839843, 0.132332763671875, 0.13090690612792968, 0.13111500549316407, 0.13265493774414064, 0.13162246704101563, 0.13315696716308595, 0.13197567749023437, 0.13130709838867188, 0.1327266845703125, 0.14321212768554686, 0.1318338623046875, 0.13038223266601562, 0.13028326416015626, 0.13020390319824218, 0.13024163818359374, 0.13066534423828124, 0.1336709442138672, 0.1334107208251953, 0.13216983032226562, 0.13081117248535157, 0.13025456237792968, 0.13029069519042968, 0.1303399658203125, 0.13186087036132813, 0.13245286560058595, 0.1332551727294922, 0.1326460418701172, 0.13124490356445312, 0.1303245086669922, 0.1303500518798828, 0.13040435791015625, 0.13166490173339843, 0.13203251647949218, 0.1325260772705078, 0.131842041015625, 0.13065830993652344, 0.13052723693847657, 0.13043276977539062, 0.13241180419921875, 0.13227325439453125, 0.13226264953613281, 0.1324719696044922, 0.13109333801269532, 0.13232853698730468, 0.13106636047363282, 0.13144105529785155, 0.13202455139160157, 0.13251152038574218, 0.1321943054199219, 0.13210546875, 0.1308981475830078, 0.13060665893554688, 0.13144729614257813, 0.13222921752929687, 0.13260403442382812, 0.1327469482421875, 0.13252665710449218, 0.1312357177734375, 0.13064639282226562, 0.13073997497558593, 0.13201181030273437, 0.1322227783203125, 0.1325828857421875, 0.13314143371582032, 0.13139308166503907, 0.13082794189453126, 0.13072854614257812, 0.1320531768798828, 0.13225680541992188, 0.1326927947998047, 0.1339905548095703, 0.13259674072265626, 0.14352540588378906, 0.13170223999023437, 0.1302783966064453, 0.130123779296875, 0.13010943603515626, 0.13016677856445313, 0.1304780731201172, 0.13362527465820312, 0.13307756042480468, 0.1318973388671875, 0.1308037109375, 0.130197509765625, 0.13025689697265624, 0.1306234893798828, 0.1324154815673828, 0.13330589294433592, 0.13188339233398438, 0.13256646728515625, 0.13098664855957032, 0.1303789367675781, 0.13034783935546876, 0.13064396667480468, 0.13243753051757812, 0.13205081176757813, 0.1324099578857422, 0.13250559997558595, 0.1309286346435547, 0.13054348754882814, 0.13056217956542968, 0.13123992919921876, 0.13243801879882813, 0.1323520050048828, 0.13284556579589843, 0.13183795166015624, 0.13045333862304687, 0.13059907531738282, 0.13221206665039062, 0.13100714111328124, 0.13285784912109375, 0.1325998077392578, 0.13256089782714844, 0.1324268798828125, 0.1309110107421875, 0.13044723510742187, 0.13135484313964843, 0.1318174743652344, 0.1325093078613281, 0.13276194763183594, 0.13275958251953124, 0.13278822326660156, 0.13171241760253907, 0.1310599365234375, 0.13117478942871094, 0.13309114074707032, 0.13274934387207032, 0.13290512084960937, 0.13293977355957032, 0.1319015350341797, 0.13287785339355468, 0.1313339538574219, 0.13181190490722655, 0.13206118774414063, 0.13216152954101562, 0.14231741333007814, 0.1316982727050781, 0.1301082305908203, 0.12987187194824218, 0.13202960205078124, 0.1304923553466797, 0.13008578491210937, 0.13385884094238282, 0.13315936279296875, 0.13130551147460937, 0.13020159912109375, 0.1303032684326172, 0.13030374145507811, 0.13032138061523438, 0.1324769287109375, 0.13336566162109376, 0.13248439025878905, 0.1313203125, 0.13177468872070314, 0.1305450897216797, 0.13020538330078124, 0.1315557098388672, 0.132536376953125, 0.13310211181640624, 0.1331111297607422, 0.13219088745117188, 0.1309381103515625, 0.13048265075683593, 0.13099813842773436, 0.13204258728027343, 0.13261605834960938, 0.13205145263671875, 0.132501220703125, 0.13157331848144532, 0.13141407775878905, 0.1310990447998047, 0.13225949096679687, 0.13164215087890624, 0.13325311279296875, 0.13283740234375, 0.1328349151611328, 0.13121527099609376, 0.13072972106933595, 0.13245103454589843, 0.1317255401611328, 0.13250674438476562, 0.13220726013183592, 0.13254861450195313, 0.13197654724121094, 0.13161334228515625, 0.13247897338867187, 0.13187881469726562, 0.13260797119140624, 0.13296652221679686, 0.1322618865966797, 0.13223936462402344, 0.1309388732910156, 0.13247488403320312, 0.1316549835205078, 0.13285848999023436, 0.132667236328125, 0.13352572631835938, 0.1326489562988281, 0.14248448181152343, 0.13174272155761718, 0.13032447814941406, 0.13011967468261718, 0.1302405090332031, 0.13020176696777344, 0.13081173706054688, 0.13373458862304688, 0.1334897003173828, 0.13127349853515624, 0.13028086853027343, 0.13014710998535156, 0.1302136993408203, 0.13206314086914062, 0.1328415069580078, 0.1331079406738281, 0.13131964111328126, 0.1321697235107422, 0.13075180053710939, 0.13036029052734374, 0.13069686889648438, 0.1330852508544922, 0.13342236328125, 0.13267800903320312, 0.1321700744628906, 0.13070541381835937, 0.13038307189941406, 0.13038467407226562, 0.13171098327636718, 0.1326796875, 0.1332202606201172, 0.13303379821777345, 0.13131520080566406, 0.13100912475585938, 0.1320202178955078, 0.13079306030273438, 0.13175196838378905, 0.13235052490234375, 0.13340179443359376, 0.13139208984375, 0.1313769989013672, 0.132623779296875, 0.13185328674316407, 0.13124566650390626, 0.1321926727294922, 0.13285580444335937, 0.13248101806640625, 0.13105101013183593, 0.1321351318359375, 0.1317972869873047, 0.1325445098876953, 0.13167616271972657, 0.13246464538574218, 0.1325460205078125, 0.13268159484863282, 0.1311640625, 0.1315663299560547, 0.13279026794433593, 0.1317205810546875, 0.13258131408691406, 0.1320987548828125, 0.13300735473632813, 0.13292941284179688, 0.14280809020996094, 0.13174882507324218, 0.13035110473632813, 0.13016677856445313, 0.1306071014404297, 0.13200547790527345, 0.13116390991210938, 0.1339808349609375, 0.13270220947265626, 0.13095852661132812, 0.1303026885986328, 0.1303022155761719, 0.13068476867675782, 0.13200387573242187, 0.13245555114746094, 0.13267987060546876, 0.13243049621582031, 0.13245849609375, 0.130989501953125, 0.13038230895996095, 0.13051913452148436, 0.13181526184082032, 0.13309149169921874, 0.13224876403808594, 0.1326989440917969, 0.13196083068847655, 0.13216358947753906, 0.13061322021484376, 0.13039414978027344, 0.13209548950195313, 0.13278828430175782, 0.13336341857910156, 0.13289651489257812, 0.1325170593261719, 0.131071044921875, 0.13061532592773437, 0.13118278503417968, 0.13269232177734375, 0.13250717163085937, 0.13296844482421874, 0.1328230743408203, 0.1324870147705078, 0.13109925842285156, 0.13059599304199218, 0.1313883819580078, 0.13307859802246094, 0.13357101440429686, 0.13256629943847656, 0.13264108276367187, 0.13151683044433593, 0.132347900390625, 0.1308745574951172, 0.13101344299316406, 0.13404127502441407, 0.13282131958007812, 0.13219218444824218, 0.13243399047851562, 0.13269606018066407, 0.13287948608398437, 0.13135769653320312, 0.1320385284423828, 0.13299507141113281, 0.13310975646972656, 0.14345884704589842, 0.13174794006347657, 0.1304449005126953, 0.13018508911132812, 0.13016441345214844, 0.13019622802734376, 0.13113375854492187, 0.13490687561035156, 0.13362786865234375, 0.1321851806640625, 0.13109225463867188, 0.13023036193847656, 0.13032858276367187, 0.13083238220214843, 0.13323802185058595, 0.13344610595703124, 0.1323461456298828, 0.13103103637695312, 0.13216152954101562, 0.13079782104492188, 0.13032179260253907, 0.13179942321777344, 0.13298074340820312, 0.13251773071289064, 0.13171522521972656, 0.13215744018554687, 0.13193522644042968, 0.13051942443847656, 0.1316541748046875, 0.1322411804199219, 0.13346847534179687, 0.13307644653320314, 0.13219007873535157, 0.1321355895996094, 0.13137283325195312, 0.1304978485107422, 0.13237545776367188, 0.13200178527832032, 0.13273443603515625, 0.13292306518554686, 0.13224777221679687, 0.13209577941894532, 0.13162130737304686, 0.1311297607421875, 0.1324277801513672, 0.13243708801269533, 0.13291407775878905, 0.13279624938964843, 0.1323419189453125, 0.13289395141601562, 0.1314718017578125, 0.13239903259277344, 0.13194277954101563, 0.13247669982910157, 0.13247715759277343, 0.13249945068359376, 0.13257113647460939, 0.13153648376464844, 0.132508056640625, 0.13161062622070313, 0.1325158386230469, 0.13261187744140626, 0.13271023559570314, 0.14315673828125, 0.13170921325683593, 0.130335205078125, 0.13013731384277344, 0.1301342010498047, 0.13015443420410155, 0.13087171936035155, 0.13459645080566407, 0.13362188720703125, 0.13177682495117188, 0.1307788543701172, 0.13006437683105468, 0.13014775085449218, 0.13104550170898438, 0.13314480590820313, 0.13454156494140626, 0.13239295959472655, 0.13116621398925782, 0.13216152954101562, 0.13073158264160156, 0.13017132568359374, 0.13161677551269532, 0.1328926696777344, 0.13248716735839844, 0.13250764465332032, 0.13143653869628907, 0.13193746948242188, 0.13047071838378907, 0.13085696411132813, 0.1334845733642578, 0.13252787780761718, 0.13257484436035155, 0.13222972106933595, 0.13150320434570312, 0.13048310852050782, 0.13067987060546876, 0.13278713989257812, 0.13268582153320313, 0.13251327514648437, 0.13314508056640625, 0.13266473388671876, 0.1315968933105469, 0.1310433349609375, 0.13281590270996094, 0.13259671020507813, 0.13313778686523436, 0.1324837188720703, 0.1319599609375, 0.13232009887695312, 0.13108575439453124, 0.13242425537109376, 0.13236837768554688, 0.13279743957519533, 0.13243263244628906, 0.13248332214355468, 0.13297048950195312, 0.13131753540039062, 0.13163746643066407, 0.1323765411376953, 0.13279629516601563, 0.13239312744140624, 0.1334783935546875, 0.13329536437988282, 0.14217056274414064, 0.13139149475097656, 0.13200518798828126, 0.13062623596191406, 0.13021385192871093, 0.13022825622558593, 0.13140301513671876, 0.13506227111816407, 0.13261112976074219, 0.13199586486816406, 0.1313041229248047, 0.130247802734375, 0.1301636199951172, 0.13208131408691406, 0.13284796142578126, 0.13291110229492187, 0.1320079345703125, 0.13164349365234376, 0.13146307373046875, 0.13064723205566406, 0.13040928649902345, 0.13337811279296874, 0.13315615844726564, 0.133308349609375, 0.13308966064453126, 0.13201429748535157, 0.1305723876953125, 0.13019049072265626, 0.13239791870117187, 0.13319606018066407, 0.13343510437011719, 0.13221466064453125, 0.1325569305419922, 0.13222402954101561, 0.13086985778808594, 0.13065040588378907, 0.13244134521484374, 0.13307785034179687, 0.1322291259765625, 0.1319916534423828, 0.13323049926757813, 0.1322265625, 0.1307263946533203, 0.1320018310546875, 0.13247640991210938, 0.13285987854003906, 0.1329229736328125, 0.13292953491210938, 0.13268598937988282, 0.13124684143066406, 0.13139762878417968, 0.13349888610839844, 0.13244134521484374, 0.13293849182128906, 0.13337939453125, 0.13288076782226563, 0.13267999267578126, 0.13137677001953124, 0.13288076782226563, 0.13202841186523437, 0.13286387634277344, 0.13324403381347658, 0.13346304321289063, 0.14210124206542968, 0.1317042236328125, 0.13021856689453126, 0.13010934448242187, 0.13002117919921874, 0.13016915893554687, 0.13113958740234374, 0.1348727722167969, 0.1326328887939453, 0.1313026885986328, 0.1312734375, 0.1304016571044922, 0.13012637329101562, 0.1312154541015625, 0.13361943054199218, 0.1334336395263672, 0.13260185241699218, 0.13144883728027343, 0.13132389831542968, 0.13135667419433594, 0.13055705261230469, 0.13193894958496094, 0.13283148193359376, 0.133268798828125, 0.13205984497070314, 0.13200588989257814, 0.1312188415527344, 0.13176454162597656, 0.1315096893310547, 0.1319548797607422, 0.13310546875, 0.13183676147460938, 0.13270384216308595, 0.13141456604003907, 0.13216755676269532, 0.1308973083496094, 0.1326065673828125, 0.1325506591796875, 0.1325846710205078, 0.13259823608398438, 0.13214915466308594, 0.1323791046142578, 0.1323428497314453, 0.13151437377929687, 0.13223411560058593, 0.13285795593261718, 0.13286515808105467, 0.13273167419433593, 0.13250355529785157, 0.13303753662109374, 0.13139407348632812, 0.1316077423095703, 0.13261701965332032, 0.13238287353515624, 0.13249110412597656, 0.13277792358398438, 0.13224761962890624, 0.13219766235351563, 0.13230706787109375, 0.1328175048828125, 0.1325977020263672, 0.13383497619628906, 0.1326153564453125]",tokens/s,7.573952444050141,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,820.92032,1507.721216,0.0,1105.199104,1100.186112,s,1,9.072400390625,9.072400390625,0.0,9.072400390625,9.072400390625,9.072400390625,9.072400390625,[9.072400390625],,kWh,5.4956265612516594e-05,6.055032312289932e-06,1.7528625134016762e-05,7.853992305882328e-05,,MB,1312.280576,1719.533568,0.0,1304.428544,1276.530688,s,10,1.84060791015625,0.184060791015625,0.0005783838018231383,0.18389855957031248,0.1846462478637695,0.1850107536315918,0.18530235824584962,"[0.18456524658203124, 0.1838230438232422, 0.1834337921142578, 0.18353289794921876, 0.18426371765136718, 0.18442848205566406, 0.1839740753173828, 0.1834705352783203, 0.18374085998535156, 0.18537525939941407]",tokens/s,1390.8448322286524,kWh,5.550368840486959e-06,6.117902490534697e-07,3.6850029479999377e-06,9.847162037540365e-06,tokens/kWh,25997338.017192207,MB,1335.984128,1719.533568,0.0,1304.428544,1276.533248,s,10,24.362478271484378,2.4362478271484376,0.007146869388581428,2.4347025146484373,2.4451154296875,2.4477870849609373,2.4499244091796872,"[2.4319228515625, 2.434615966796875, 2.42898486328125, 2.42936474609375, 2.450458740234375, 2.43696044921875, 2.444521728515625, 2.443009521484375, 2.427850341796875, 2.4347890625]",tokens/s,25.859438148269096,kWh,7.197185549618386e-05,7.93874020005392e-06,3.269266504300001e-05,0.00011260326073923778,tokens/kWh,559486.4623493713,,s,630,24.360354503631598,0.0386672293708438,0.0005706300882210212,0.0385493278503418,0.03909793128967285,0.03946794605255127,0.04038945701599121,"[0.03975430297851563, 0.03891785430908203, 0.03854569625854492, 0.03854524612426758, 0.03857571029663086, 0.03860895919799805, 0.039569503784179685, 0.038387134552001954, 0.03841462326049805, 0.03852486419677734, 0.038382400512695314, 0.039077823638916015, 0.03880511856079102, 0.03875680160522461, 0.03860012817382812, 0.0387751350402832, 0.03859622573852539, 0.03842512130737305, 0.03834048080444336, 0.038795265197753906, 0.03842998504638672, 0.038539745330810546, 0.038494335174560544, 0.038467681884765625, 0.03857628631591797, 0.038370399475097655, 0.03849308776855469, 0.03833590316772461, 0.03842895889282227, 0.038402366638183597, 0.03845632171630859, 0.03859558486938477, 0.039217086791992185, 0.03856745529174805, 0.03847568130493164, 0.03842339324951172, 0.0385283203125, 0.038537696838378904, 0.03873382568359375, 0.039384414672851566, 0.03935612869262695, 0.03863628768920899, 0.03853740692138672, 0.038510719299316404, 0.03854643249511719, 0.038839168548583984, 0.03842031860351563, 0.03832233428955078, 0.03821977615356445, 0.03831808090209961, 0.03822787094116211, 0.03825468826293945, 0.03835302352905273, 0.038825855255126954, 0.03870876693725586, 0.03844697570800781, 0.03836579132080078, 0.038350593566894534, 0.038588031768798825, 0.03844316864013672, 0.038359359741210936, 0.038895233154296875, 0.038351390838623045, 0.03931990432739258, 0.03878416061401367, 0.03874652862548828, 0.03868678283691406, 0.03852096176147461, 0.038544479370117186, 0.03842985534667969, 0.03842230224609375, 0.0384266242980957, 0.038387134552001954, 0.03832281494140625, 0.03837865447998047, 0.03848432159423828, 0.03843471908569336, 0.038391422271728516, 0.03844553756713867, 0.039534912109375, 0.03839766311645508, 0.038562271118164064, 0.03837120056152344, 0.03837343978881836, 0.03888755035400391, 0.03843401718139648, 0.03890447998046875, 0.038730751037597655, 0.03888044738769531, 0.03836278533935547, 0.03825270462036133, 0.03836723327636719, 0.03854742431640625, 0.03846329498291016, 0.03836950302124023, 0.03829555130004883, 0.03842262268066406, 0.038512542724609376, 0.03887923049926758, 0.039591934204101564, 0.038645759582519534, 0.0384879035949707, 0.038484127044677734, 0.03891641616821289, 0.04035142517089844, 0.03898313522338867, 0.0387215690612793, 0.03899407958984375, 0.03869545745849609, 0.03836908721923828, 0.038340606689453126, 0.038621185302734375, 0.04108902359008789, 0.03907174301147461, 0.039128990173339845, 0.03856729507446289, 0.03827171325683594, 0.03843660736083984, 0.03837996673583984, 0.03828307342529297, 0.038285408020019535, 0.03813587188720703, 0.03839574432373047, 0.03837334442138672, 0.03826908874511719, 0.03826665496826172, 0.03894761657714844, 0.03850864028930664, 0.03873766326904297, 0.03865219116210938, 0.03871887969970703, 0.03854396820068359, 0.03830099105834961, 0.03935097503662109, 0.03973734283447266, 0.03852899169921875, 0.03840409469604492, 0.03828534317016601, 0.03827507019042969, 0.03866825485229492, 0.038320159912109374, 0.038391807556152346, 0.03849216079711914, 0.03828736114501953, 0.03826054382324219, 0.03904240036010742, 0.03819961547851562, 0.03894121551513672, 0.042571968078613284, 0.03848172760009766, 0.03829145431518555, 0.038362430572509765, 0.0385088005065918, 0.03827347183227539, 0.03839487838745117, 0.03822694396972656, 0.038174720764160154, 0.03837459182739258, 0.03814028930664062, 0.0385516471862793, 0.03861334228515625, 0.03854131317138672, 0.03852080154418945, 0.03841231918334961, 0.038778881072998046, 0.038168575286865236, 0.03827452850341797, 0.038285854339599606, 0.0381822738647461, 0.0386115837097168, 0.038456512451171876, 0.03835577774047851, 0.03840198516845703, 0.03852499389648437, 0.038250495910644534, 0.03838771057128906, 0.03822182464599609, 0.038421825408935545, 0.03865260696411133, 0.03847577667236328, 0.038497791290283204, 0.03872208023071289, 0.038209503173828124, 0.03825449752807617, 0.03825436782836914, 0.039196670532226564, 0.038424896240234374, 0.03846758270263672, 0.03828326416015625, 0.038965248107910154, 0.03879731369018555, 0.03844300842285156, 0.038285408020019535, 0.03828521728515625, 0.03829759979248047, 0.03811532974243164, 0.03820748901367187, 0.038150272369384765, 0.03810044860839844, 0.03811161422729492, 0.038221408843994144, 0.03839590454101562, 0.03831670379638672, 0.038158111572265625, 0.03863545608520508, 0.0384101448059082, 0.03854915237426758, 0.03827891159057617, 0.03839187240600586, 0.03827987289428711, 0.038340606689453126, 0.038485919952392575, 0.038557056427001954, 0.038338558197021484, 0.038232799530029296, 0.0381739501953125, 0.03812838363647461, 0.03832217788696289, 0.038144001007080076, 0.0381247673034668, 0.03950460815429688, 0.038211647033691405, 0.04134921646118164, 0.03878662490844727, 0.03837577438354492, 0.038490207672119144, 0.03845084762573242, 0.03968844985961914, 0.03823747253417969, 0.038165214538574216, 0.038246593475341796, 0.03912480163574219, 0.039378944396972655, 0.03876655960083008, 0.038502334594726566, 0.03833171081542969, 0.038303585052490235, 0.038484256744384764, 0.038591136932373045, 0.038526817321777346, 0.03820899200439453, 0.038259391784667966, 0.03841571044921875, 0.03958236694335938, 0.03912646484375, 0.039257728576660156, 0.03831612777709961, 0.038553470611572264, 0.03872867202758789, 0.03887216186523437, 0.03959695816040039, 0.03849625778198242, 0.039952030181884764, 0.03931119918823242, 0.03933366394042969, 0.03904380798339844, 0.03893657684326172, 0.03880755233764648, 0.03898748779296875, 0.03877916717529297, 0.039048511505126955, 0.039488193511962894, 0.039229438781738284, 0.03892428970336914, 0.03882819366455078, 0.03870294570922852, 0.038790752410888675, 0.03851913452148437, 0.038946975708007814, 0.03873308944702149, 0.038690624237060545, 0.038664257049560544, 0.039443199157714846, 0.03882553482055664, 0.038728031158447265, 0.038700862884521486, 0.03876198577880859, 0.03854006576538086, 0.03854336166381836, 0.039923713684082034, 0.038940673828125, 0.038886878967285154, 0.038617633819580076, 0.03867635345458984, 0.03849843215942383, 0.03866537475585938, 0.0387729606628418, 0.03887529754638672, 0.038669921875, 0.03870604705810547, 0.03869270324707031, 0.03857219314575195, 0.03873567962646484, 0.039282878875732424, 0.039179359436035156, 0.03881267166137695, 0.038836097717285155, 0.03873321533203125, 0.03867916870117188, 0.03902668762207031, 0.03885465621948242, 0.03862262344360352, 0.03869756698608398, 0.038698078155517575, 0.03894988632202148, 0.038766494750976564, 0.039002113342285157, 0.03878521728515625, 0.038833984375, 0.03895296096801758, 0.038828033447265625, 0.03861888122558594, 0.03853279876708984, 0.039117374420166016, 0.03970764923095703, 0.03982950210571289, 0.039067649841308595, 0.038434814453125, 0.03839385604858398, 0.03877427291870117, 0.03853740692138672, 0.03864355087280273, 0.038513118743896485, 0.03840524673461914, 0.03847257614135742, 0.038480960845947265, 0.038760704040527345, 0.03867513656616211, 0.03864780807495117, 0.0385043830871582, 0.03856524658203125, 0.03878543853759766, 0.03859312057495117, 0.038523998260498044, 0.038531681060791016, 0.03855286407470703, 0.03856841659545898, 0.03842278289794922, 0.04040499114990234, 0.03889775848388672, 0.03859241485595703, 0.03845865631103516, 0.038884063720703126, 0.03936268615722656, 0.038770561218261716, 0.03849148941040039, 0.03848668670654297, 0.03850796890258789, 0.03849273681640625, 0.03869081497192383, 0.0383438720703125, 0.03869964981079101, 0.04071968078613281, 0.03894771194458008, 0.03854131317138672, 0.038644798278808595, 0.03869286346435547, 0.038513824462890624, 0.03848988723754883, 0.03859004974365234, 0.03841680145263672, 0.03909737777709961, 0.03856412887573242, 0.03844371032714844, 0.03849430465698242, 0.03832617568969727, 0.038258689880371094, 0.0383631362915039, 0.0383364143371582, 0.03849225616455078, 0.03862681579589844, 0.038453758239746096, 0.03833808135986328, 0.03891641616821289, 0.03850665664672852, 0.038577953338623044, 0.03852460861206055, 0.03912553787231445, 0.039165790557861326, 0.03882908630371094, 0.03873791885375977, 0.038790111541748044, 0.038670337677001954, 0.03858432006835937, 0.038841697692871095, 0.038890079498291014, 0.03873798370361328, 0.03846748733520508, 0.038604862213134764, 0.03844265747070313, 0.03856032180786133, 0.038537025451660156, 0.03880550384521484, 0.038502689361572265, 0.03876425552368164, 0.03870697784423828, 0.03882416152954102, 0.038795265197753906, 0.03885894393920898, 0.03857388687133789, 0.0388026237487793, 0.03870188903808594, 0.03876617431640625, 0.03874563217163086, 0.039414112091064456, 0.03852272033691406, 0.038701438903808595, 0.03906351852416992, 0.0387050895690918, 0.03871171188354492, 0.038775905609130856, 0.039142303466796875, 0.038780158996582034, 0.038617504119873046, 0.03874371337890625, 0.03859321594238281, 0.03865715026855469, 0.038830974578857425, 0.03870486450195312, 0.03875849533081055, 0.0386108169555664, 0.03872713470458984, 0.03872582244873047, 0.038768798828125, 0.03910291290283203, 0.03873388671875, 0.03869286346435547, 0.040048160552978516, 0.03868928146362305, 0.03902256011962891, 0.03915750503540039, 0.039303424835205075, 0.038811649322509766, 0.03861260986328125, 0.03865795135498047, 0.03895276641845703, 0.038623905181884764, 0.03906780624389648, 0.038958942413330075, 0.03872972869873047, 0.03890943908691406, 0.03881196975708008, 0.03890585708618164, 0.03882223892211914, 0.03868966293334961, 0.03957328033447265, 0.038661121368408206, 0.038560993194580076, 0.03866470336914062, 0.038723873138427733, 0.04010723114013672, 0.03978319931030273, 0.038809600830078124, 0.03868380737304687, 0.038742881774902344, 0.03850035095214844, 0.038657470703125, 0.038664031982421875, 0.038580127716064457, 0.03987744140625, 0.039530464172363285, 0.03990883255004883, 0.038797889709472656, 0.03854131317138672, 0.038732063293457034, 0.03857366561889648, 0.03868073654174805, 0.03851401519775391, 0.03859519958496094, 0.03865142440795898, 0.038496734619140625, 0.039204864501953124, 0.038473121643066405, 0.03846368026733398, 0.038555999755859376, 0.03847180938720703, 0.03858835220336914, 0.038671390533447265, 0.03840703964233398, 0.03855913543701172, 0.03877254486083984, 0.03887011337280273, 0.03938604736328125, 0.038742015838623044, 0.039039360046386716, 0.03861734390258789, 0.0386910400390625, 0.03878201675415039, 0.03855660629272461, 0.03954483032226563, 0.03852896118164063, 0.03833657455444336, 0.03855567932128906, 0.03845843124389649, 0.03849308776855469, 0.03848396682739258, 0.0384634895324707, 0.03871539306640625, 0.038841663360595705, 0.03847443389892578, 0.03852288055419922, 0.038594558715820314, 0.03864166259765625, 0.03847372817993164, 0.03891814422607422, 0.038851871490478515, 0.03847958374023437, 0.03854950332641602, 0.038726112365722654, 0.04778448104858399, 0.03845539093017578, 0.03891804885864258, 0.038430721282958984, 0.03838566589355469, 0.03821977615356445, 0.038340385437011716, 0.03836240005493164, 0.038370304107666016, 0.0393235855102539, 0.03830745697021484, 0.03834048080444336, 0.038464191436767575, 0.03836703872680664, 0.038319873809814456, 0.038400447845458985, 0.038209278106689455, 0.03833244705200195, 0.038178081512451174, 0.03829836654663086, 0.038346656799316405, 0.038211681365966796, 0.038438911437988284, 0.03903641510009766, 0.03870534515380859, 0.03846089553833008, 0.038486881256103514, 0.038304031372070314, 0.03826454544067383, 0.038301055908203124, 0.03820751953125, 0.03826339340209961, 0.0381952018737793, 0.03823388671875, 0.03821347045898438, 0.0381710090637207, 0.03861913681030273, 0.03828275299072265, 0.03814860916137695, 0.03835420989990234, 0.03816726303100586, 0.038461376190185546, 0.03818854522705078, 0.0389486083984375, 0.038193984985351564, 0.03817004776000977, 0.03816505432128906, 0.03827097702026367, 0.038131423950195316, 0.03815862274169922, 0.03822972869873047, 0.03824873733520508, 0.038338241577148435, 0.03855532836914063, 0.03837401580810547, 0.038144001007080076, 0.03811532974243164, 0.03822387313842773, 0.03881833648681641, 0.03845119857788086, 0.03844710540771484, 0.038292640686035155, 0.03834115219116211, 0.038449470520019534, 0.03875430297851563, 0.03840409469604492, 0.038264255523681644, 0.038324798583984375, 0.0390555534362793, 0.03994502258300781, 0.038724609375, 0.038440608978271486, 0.03844745635986328, 0.03926358413696289, 0.039062175750732425, 0.039103710174560546, 0.04026857757568359, 0.038904830932617186, 0.0385516471862793, 0.03835587310791016, 0.03834988784790039, 0.03853635025024414, 0.03863302230834961, 0.04106051254272461, 0.038550750732421875, 0.03843363189697266, 0.0384156494140625, 0.03853299331665039, 0.03863843154907227, 0.0389870719909668, 0.03843900680541992, 0.038597217559814455, 0.03839529418945312, 0.038472286224365236, 0.03846963119506836, 0.0386080322265625, 0.038429439544677736, 0.03852707290649414, 0.03876572799682617, 0.038576416015625, 0.03852115249633789, 0.038997825622558595, 0.03886249542236328, 0.038478015899658206, 0.03841033554077149, 0.038437374114990236, 0.03851792144775391, 0.03848624038696289, 0.039019134521484374, 0.038618526458740234, 0.03823616027832031, 0.03837923049926758, 0.0383535041809082, 0.03830988693237305, 0.038281505584716796, 0.03888339233398438, 0.03837155151367187, 0.03826768112182617, 0.03830851364135742, 0.03846342468261719, 0.03830819320678711]",tokens/s,25.861692608212284,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,835.428352,1979.580416,0.0,1577.058304,1537.483264,s,1,9.365990234375,9.365990234375,0.0,9.365990234375,9.365990234375,9.365990234375,9.365990234375,[9.365990234375],,kWh,6.271410308331724e-05,6.910807780769939e-06,2.021057172399421e-05,8.983548258808139e-05,,MB,1320.30464,2157.838336,0.0,1742.733312,1681.428992,s,10,1.8416128082275391,0.1841612808227539,0.0008140061373167685,0.1841848907470703,0.185171044921875,0.1853301818847656,0.18545749145507812,"[0.18407093811035155, 0.18337977600097657, 0.183830810546875, 0.18438706970214844, 0.1849286346435547, 0.182908447265625, 0.18513568115234375, 0.18318328857421876, 0.18548931884765624, 0.18429884338378907]",tokens/s,1390.085901098762,kWh,5.559613925941952e-06,6.127576282888745e-07,3.6826706610184697e-06,9.855042215249296e-06,tokens/kWh,25976550.31897031,MB,1343.737856,2157.838336,0.0,1742.733312,1681.431552,s,10,25.40066650390625,2.5400666503906253,0.015621037144712184,2.5370379638671876,2.5602937744140624,2.566202551269531,2.570929572753906,"[2.53868798828125, 2.521669189453125, 2.52369287109375, 2.535387939453125, 2.5293515625, 2.551942138671875, 2.541688720703125, 2.572111328125, 2.527154052734375, 2.558980712890625]",tokens/s,24.80249878101093,kWh,7.274886373698165e-05,8.02452863083843e-06,3.4151526272981674e-05,0.00011492491864080175,tokens/kWh,548183.9860762202,,s,630,25.398546791076644,0.040315153636629616,0.0006220370276033227,0.04011932754516601,0.04100068855285645,0.041314522933959964,0.043073577461242685,"[0.0422872314453125, 0.04112831878662109, 0.04051148986816406, 0.040468257904052736, 0.04012422561645508, 0.04055900955200195, 0.04016332626342774, 0.04080575942993164, 0.04038528060913086, 0.040355712890625, 0.04014284896850586, 0.04003420639038086, 0.04018960189819336, 0.04013862228393555, 0.04046278381347656, 0.040003711700439454, 0.04004880142211914, 0.04019798278808594, 0.04060979080200195, 0.040269824981689455, 0.04019747161865234, 0.04003209686279297, 0.04009043121337891, 0.04003839874267578, 0.040871936798095705, 0.04055654525756836, 0.04044211196899414, 0.040217601776123046, 0.04010470581054688, 0.04015919876098633, 0.040136737823486326, 0.04006233596801758, 0.04085171127319336, 0.0409562873840332, 0.040322399139404295, 0.040026782989501956, 0.040052032470703124, 0.04010063934326172, 0.04016323089599609, 0.04004044723510742, 0.03997462463378906, 0.03998339080810547, 0.040153087615966795, 0.04018172836303711, 0.040158527374267575, 0.03996710586547852, 0.03996092987060547, 0.040134654998779294, 0.040029342651367185, 0.04006905746459961, 0.04016963195800781, 0.04022118377685547, 0.040073535919189454, 0.04001580810546875, 0.04030844879150391, 0.04009545516967773, 0.040137279510498045, 0.041316352844238284, 0.04031078338623047, 0.040297950744628906, 0.04015740966796875, 0.040239425659179685, 0.040172607421875, 0.04040265655517578, 0.04008777618408203, 0.04006918334960938, 0.03989411163330078, 0.03978496170043945, 0.039761505126953124, 0.03981804656982422, 0.03980287933349609, 0.040015838623046876, 0.04014083099365234, 0.03994527816772461, 0.03999151992797852, 0.0400940170288086, 0.039901599884033204, 0.0398902702331543, 0.04037228775024414, 0.03994076919555664, 0.039911361694335935, 0.039845088958740234, 0.039890945434570314, 0.03984259033203125, 0.03977830505371094, 0.039825439453125, 0.03992367935180664, 0.039857440948486325, 0.03963679885864258, 0.039782432556152346, 0.039944351196289064, 0.03985833740234375, 0.03974105453491211, 0.03975382232666016, 0.039871326446533205, 0.03965478515625, 0.04036876678466797, 0.0398884162902832, 0.03975215911865235, 0.039650718688964845, 0.04024380874633789, 0.03988243103027344, 0.039728511810302736, 0.03999135971069336, 0.03988524627685547, 0.039749408721923826, 0.03976873779296875, 0.03979484939575195, 0.0398109130859375, 0.04009904098510742, 0.039778942108154296, 0.04195139312744141, 0.039935806274414065, 0.03977849578857422, 0.03982131195068359, 0.03984819030761719, 0.04035763168334961, 0.03988492965698242, 0.0430048942565918, 0.040879039764404296, 0.04017353439331055, 0.04009369659423828, 0.04006707382202149, 0.040253440856933595, 0.03994009780883789, 0.04005068969726563, 0.04158883285522461, 0.040081504821777345, 0.04008252716064453, 0.04009664154052734, 0.039954593658447266, 0.04006307220458984, 0.040013568878173825, 0.03998038482666016, 0.042242721557617186, 0.04018700790405273, 0.03999833679199219, 0.04004220962524414, 0.04013699340820313, 0.04008755111694336, 0.04013843154907227, 0.040237377166748044, 0.04046976089477539, 0.04002892684936524, 0.03982915115356445, 0.04123865509033203, 0.03991164779663086, 0.03975167846679688, 0.03986428833007812, 0.03982121658325195, 0.039682174682617186, 0.03988633728027344, 0.03974822235107422, 0.03968806457519531, 0.039636257171630856, 0.0397053108215332, 0.03976806259155274, 0.03962060928344727, 0.03969174575805664, 0.03964163208007813, 0.03955270385742188, 0.03966089630126953, 0.039687423706054686, 0.04004201507568359, 0.03995052719116211, 0.039796287536621094, 0.039758270263671874, 0.040689407348632814, 0.039905536651611326, 0.03985203170776367, 0.040052288055419924, 0.039661598205566403, 0.03958563232421875, 0.03973366546630859, 0.041788894653320315, 0.0404466552734375, 0.039895294189453125, 0.04049689483642578, 0.039812416076660154, 0.04001603317260742, 0.03976764678955078, 0.0397355842590332, 0.039897472381591796, 0.039680191040039066, 0.03986595153808594, 0.039608833312988284, 0.03989299011230469, 0.04065264129638672, 0.04107894515991211, 0.04107609558105469, 0.040476512908935544, 0.04028905487060547, 0.0401569938659668, 0.040546497344970706, 0.04026192092895508, 0.040201663970947266, 0.03992399978637695, 0.03994966506958008, 0.03987094497680664, 0.0398185920715332, 0.04009568023681641, 0.03988396835327149, 0.040238815307617186, 0.04011747360229492, 0.039992095947265625, 0.04009695816040039, 0.039971168518066404, 0.03997884750366211, 0.039879295349121095, 0.03987577438354492, 0.039997825622558596, 0.04006943893432617, 0.04017110443115234, 0.040143104553222654, 0.03984207916259765, 0.04006707382202149, 0.04007929611206055, 0.04004665756225586, 0.04004249572753906, 0.040372032165527344, 0.03978873443603516, 0.03963289642333984, 0.039822528839111325, 0.03977199935913086, 0.03982140731811523, 0.04001232147216797, 0.04004832077026367, 0.04073299026489258, 0.04062764739990234, 0.040459072113037106, 0.04008700942993164, 0.04501062393188476, 0.04060665512084961, 0.04008550262451172, 0.0401080322265625, 0.04084560012817383, 0.04064377593994141, 0.04078768157958984, 0.040882015228271486, 0.04077577590942383, 0.04081958389282227, 0.04016313552856445, 0.03999148941040039, 0.040062366485595705, 0.03993833541870117, 0.04003871917724609, 0.040046817779541014, 0.04004604721069336, 0.039843936920166016, 0.04031305694580078, 0.03990300750732422, 0.0399334716796875, 0.04038655853271484, 0.04002406311035156, 0.04007334518432617, 0.04072639846801758, 0.04008345413208008, 0.0405401611328125, 0.0404029426574707, 0.04017270278930664, 0.040012161254882814, 0.040384990692138675, 0.040022014617919925, 0.03988681411743164, 0.040398880004882814, 0.04014284896850586, 0.04003635025024414, 0.04005887985229492, 0.04011539077758789, 0.040121150970458985, 0.040140254974365235, 0.04038524627685547, 0.04011996841430664, 0.0399703369140625, 0.04013907241821289, 0.042086719512939456, 0.04004227066040039, 0.039870143890380856, 0.039946624755859375, 0.040005664825439456, 0.03985651016235352, 0.04005641555786133, 0.03995049667358398, 0.040638111114501954, 0.04031727981567383, 0.03999916839599609, 0.040016319274902346, 0.0399562873840332, 0.039924064636230466, 0.039992191314697265, 0.03996553421020508, 0.039876609802246096, 0.03997052764892578, 0.04009603118896484, 0.04026153564453125, 0.03999135971069336, 0.03997903823852539, 0.039923713684082034, 0.03981878280639648, 0.039956958770751956, 0.03990937423706055, 0.03982668685913086, 0.03992038345336914, 0.039806880950927735, 0.03993411254882812, 0.03978847885131836, 0.039994590759277346, 0.04069046401977539, 0.04047423934936523, 0.04067776107788086, 0.040136703491210936, 0.04011990356445312, 0.039953983306884766, 0.0406578254699707, 0.04041222381591797, 0.04133679962158203, 0.04018447875976563, 0.04001968002319336, 0.04019200134277344, 0.03996672058105469, 0.040011680603027344, 0.03989718246459961, 0.039731201171875, 0.03993804931640625, 0.03983744049072266, 0.039981311798095706, 0.03984336090087891, 0.03980499267578125, 0.03981536102294922, 0.040476417541503905, 0.04053859329223633, 0.04023007965087891, 0.04002284622192383, 0.04006092834472656, 0.04020367813110352, 0.04007587051391601, 0.03977199935913086, 0.039858177185058595, 0.03983990478515625, 0.03994009780883789, 0.04000735855102539, 0.03975715255737305, 0.04156243133544922, 0.04131222534179688, 0.040506046295166014, 0.04084326553344726, 0.0409354248046875, 0.04107468795776367, 0.04072403335571289, 0.04335795211791992, 0.041349822998046876, 0.041099262237548825, 0.04117299270629883, 0.041164833068847655, 0.04206300735473633, 0.04129465484619141, 0.041613311767578126, 0.040689697265625, 0.0406544303894043, 0.040208766937255856, 0.04007526397705078, 0.041471073150634766, 0.04000998306274414, 0.04017007827758789, 0.03997907257080078, 0.040273857116699216, 0.040094879150390624, 0.039876766204833984, 0.04063497543334961, 0.044542110443115235, 0.040513534545898434, 0.04023072052001953, 0.039954689025878905, 0.03998310470581055, 0.040298431396484376, 0.04035369491577148, 0.04017180633544922, 0.040133598327636716, 0.04149660873413086, 0.04061183929443359, 0.0401162223815918, 0.04012646484375, 0.040013824462890625, 0.040001823425292966, 0.04011513519287109, 0.04011088180541992, 0.04021452713012695, 0.040052734375, 0.040091487884521486, 0.040279647827148435, 0.040116416931152345, 0.04024563217163086, 0.040202239990234374, 0.04014899063110351, 0.04024041748046875, 0.040084190368652343, 0.04003635025024414, 0.04092892837524414, 0.04020864105224609, 0.040882335662841794, 0.04032096099853515, 0.040648670196533204, 0.0405442886352539, 0.04060979080200195, 0.040204288482666016, 0.040085407257080076, 0.040038494110107424, 0.040097793579101565, 0.04018374252319336, 0.0399667854309082, 0.04009574508666992, 0.040308769226074216, 0.039852096557617185, 0.04017334365844726, 0.039956607818603516, 0.039841663360595705, 0.03982963180541992, 0.04064460754394531, 0.04039680099487305, 0.040153087615966795, 0.03993088150024414, 0.039897567749023435, 0.04035228729248047, 0.040220672607421876, 0.04036742401123047, 0.040595230102539064, 0.0406201286315918, 0.04046912002563476, 0.04062841415405274, 0.04029849624633789, 0.04036812973022461, 0.040151039123535154, 0.040255615234375, 0.04017343902587891, 0.04039302444458008, 0.043101631164550784, 0.04103193664550781, 0.040839168548583986, 0.040568191528320315, 0.04052032089233398, 0.04041660690307617, 0.04150851058959961, 0.04096601486206055, 0.04061376190185547, 0.040548961639404295, 0.04098867034912109, 0.04123148727416992, 0.04062854385375977, 0.04074339294433594, 0.040810527801513674, 0.040693824768066406, 0.04058438491821289, 0.04048918533325195, 0.04048870468139648, 0.040462303161621097, 0.04082368087768555, 0.04113190460205078, 0.04100048065185547, 0.041151073455810545, 0.040888320922851565, 0.042937793731689454, 0.04106707382202148, 0.041009151458740234, 0.04072243118286133, 0.041030784606933594, 0.04098358535766602, 0.040701793670654296, 0.04046438217163086, 0.040529823303222655, 0.040610206604003905, 0.04115561676025391, 0.04066521453857422, 0.040562976837158204, 0.04059980773925781, 0.040802207946777344, 0.040609024047851563, 0.04064342498779297, 0.0405032958984375, 0.040503360748291015, 0.04028124618530273, 0.04051776123046875, 0.0408966064453125, 0.0415852165222168, 0.04085145568847656, 0.04080384063720703, 0.04061849594116211, 0.04052313613891602, 0.04119011306762695, 0.041717662811279296, 0.04194508743286133, 0.041306110382080076, 0.041312286376953125, 0.04102755355834961, 0.04098559951782227, 0.041305408477783204, 0.040992446899414066, 0.04058272171020508, 0.04036038589477539, 0.040336894989013675, 0.04036240005493164, 0.040376415252685545, 0.039962623596191404, 0.03999856185913086, 0.040212928771972654, 0.040585056304931644, 0.04028886413574219, 0.0399027214050293, 0.0399092788696289, 0.04017606353759766, 0.04002627182006836, 0.04017484664916992, 0.040178016662597654, 0.03996057510375976, 0.04007267379760742, 0.03981548690795898, 0.040332992553710936, 0.040080318450927736, 0.03994332885742188, 0.040166526794433596, 0.040069984436035155, 0.03994918441772461, 0.04000726318359375, 0.040530113220214846, 0.03996416091918945, 0.03978649520874023, 0.039819808959960935, 0.04069599914550781, 0.03990937423706055, 0.03985123062133789, 0.039776481628417966, 0.03987263870239258, 0.04051542282104492, 0.040944225311279295, 0.040613887786865234, 0.040904705047607424, 0.04087807846069336, 0.040820735931396485, 0.04025040054321289, 0.04009852981567383, 0.039863838195800784, 0.040291038513183594, 0.0400233268737793, 0.04032175827026367, 0.03987251281738281, 0.03989299011230469, 0.039981056213378906, 0.04015718460083008, 0.04031654357910156, 0.03999327850341797, 0.0399343376159668, 0.03975584030151367, 0.039855518341064454, 0.03988908767700195, 0.039879169464111325, 0.03986415863037109, 0.04012038421630859, 0.03984384155273438, 0.039846176147460936, 0.0399029426574707, 0.0397817268371582, 0.039879329681396486, 0.039948287963867186, 0.039909473419189455, 0.040007167816162106, 0.03983782577514648, 0.040630561828613285, 0.040467838287353515, 0.04061190414428711, 0.04043270492553711, 0.04002483367919922, 0.03995251083374023, 0.03996051025390625, 0.04178131103515625, 0.04007900619506836, 0.039922016143798825, 0.03997830581665039, 0.040118751525878904, 0.04059568023681641, 0.04342937469482422, 0.04038089752197266, 0.03984518432617187, 0.04016169738769531, 0.03986841583251953, 0.04000185775756836, 0.040430782318115234, 0.040048446655273434, 0.04171059036254883, 0.04042876815795898, 0.03992556762695312, 0.03997750473022461, 0.041105857849121095, 0.040529918670654294, 0.04088627243041992, 0.04163702392578125, 0.04100796890258789, 0.04101529693603516, 0.041793537139892575, 0.041414657592773435, 0.04322099304199219, 0.04128688049316406, 0.04111414337158203, 0.040878337860107423, 0.04040867233276367, 0.04027638244628906, 0.039946014404296876, 0.04116502380371094, 0.03989404678344727, 0.039863262176513675, 0.03987068939208984, 0.03982723236083984, 0.04041638565063477, 0.040487808227539064, 0.04056208038330078, 0.04071417617797852, 0.03993990325927734, 0.040151905059814454, 0.039929855346679685, 0.03994630432128906, 0.04062358474731445, 0.040049087524414065, 0.040242496490478515, 0.04070678329467774, 0.04417740631103516, 0.04100255966186524, 0.04122249603271484, 0.04093715286254883, 0.04056876754760742, 0.040104415893554686, 0.040062976837158204, 0.0401162223815918]",tokens/s,24.80456874884431,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,1172.025344,2579.365888,0.0,2176.843776,2071.865856,s,1,10.662482421875,10.662482421875,0.0,10.662482421875,10.662482421875,10.662482421875,10.662482421875,[10.662482421875],,kWh,9.731141670835465e-05,1.0726923338902716e-05,3.2090859006003725e-05,0.00014012919905326108,,MB,1418.231808,2921.201664,0.0,2506.09664,2438.447616,s,10,3.570441040039063,0.35704410400390624,0.0010478288176806417,0.35731207275390625,0.35793489990234373,0.35850973205566405,0.3589695977783203,"[0.3590845642089844, 0.35644183349609376, 0.355885498046875, 0.3555714111328125, 0.3557965393066406, 0.35734890747070314, 0.35727523803710937, 0.3576665954589844, 0.3578071594238281, 0.35756329345703125]",tokens/s,716.9982563196149,kWh,1.0657639402084366e-05,1.1753482089372533e-06,7.052446118142874e-06,1.888543372916449e-05,tokens/kWh,13555420.737023532,MB,1418.231808,2923.298816,0.0,2508.193792,2438.450176,s,10,26.25266845703125,2.625266845703125,0.009736074815399173,2.6230961914062503,2.6369916015625,2.640563916015625,2.643421767578125,"[2.619757568359375, 2.614363037109375, 2.617047119140625, 2.620974365234375, 2.625218017578125, 2.629352783203125, 2.612603515625, 2.63619775390625, 2.64413623046875, 2.63301806640625]",tokens/s,23.99756051584414,kWh,7.728882028291651e-05,8.525004957136324e-06,4.5978596306656125e-05,0.00013179242154670893,tokens/kWh,478024.4513351778,,s,630,26.250484935760504,0.041667436405969045,0.0005905863866787084,0.041560657501220705,0.04220121231079102,0.04253650283813477,0.04451807861328128,"[0.042738079071044925, 0.04202716827392578, 0.041908222198486327, 0.04183244705200195, 0.041414657592773435, 0.041385120391845706, 0.04112812805175781, 0.041134078979492186, 0.041167518615722654, 0.04141459274291992, 0.04107676696777344, 0.04115868759155274, 0.04075929641723633, 0.045001953125, 0.04141136169433594, 0.04187136077880859, 0.044851200103759765, 0.04142675018310547, 0.041136318206787106, 0.04094976043701172, 0.04119968032836914, 0.04121984100341797, 0.045332672119140625, 0.04156415939331055, 0.04251587295532227, 0.04392406463623047, 0.041232383728027344, 0.04151529693603516, 0.04100476837158203, 0.04101923370361328, 0.0411956787109375, 0.040890369415283206, 0.04125286483764649, 0.041224193572998044, 0.04116275024414062, 0.04126297760009766, 0.04091027069091797, 0.04108259201049805, 0.040865985870361325, 0.04096716690063477, 0.0408900146484375, 0.04095782470703125, 0.041033985137939454, 0.04086326217651367, 0.041216064453125, 0.04119750213623047, 0.04125753784179687, 0.041815967559814454, 0.042145790100097655, 0.042401569366455075, 0.041317726135253904, 0.04177190399169922, 0.0413573112487793, 0.04132863998413086, 0.0413994255065918, 0.04125750350952148, 0.041591423034667965, 0.04187887954711914, 0.041666942596435545, 0.04159878540039062, 0.04128377532958984, 0.04123814392089844, 0.04094604873657227, 0.042395648956298826, 0.041594879150390625, 0.041338878631591795, 0.041060352325439455, 0.04122332763671875, 0.0411759033203125, 0.04113187026977539, 0.04110147094726563, 0.0409354248046875, 0.041322494506835936, 0.04098847961425781, 0.041087169647216794, 0.04094547271728516, 0.04099235153198242, 0.04150518417358398, 0.040824638366699216, 0.041113792419433595, 0.04101091384887695, 0.041027969360351565, 0.041013343811035156, 0.04109088134765625, 0.041425086975097655, 0.0409876480102539, 0.041081985473632815, 0.041070465087890626, 0.04084643173217774, 0.04117923355102539, 0.041062240600585935, 0.04160406494140625, 0.04137686538696289, 0.04145654296875, 0.041472000122070314, 0.041320510864257816, 0.041259105682373044, 0.041506656646728514, 0.04118489456176758, 0.041212543487548825, 0.04125686264038086, 0.04133577728271484, 0.04147727966308594, 0.04152700805664063, 0.04163804626464844, 0.04143414306640625, 0.042603328704833986, 0.04159388732910156, 0.04200294494628906, 0.041417057037353516, 0.04141888046264648, 0.04185513687133789, 0.041721694946289065, 0.04275609588623047, 0.042046592712402346, 0.04203209686279297, 0.04196752166748047, 0.04187360000610352, 0.042071231842041014, 0.041949569702148436, 0.04227734375, 0.04188713455200195, 0.04184716796875, 0.04217036819458008, 0.04291177749633789, 0.04212876892089844, 0.043092769622802736, 0.04161040115356445, 0.04172067260742188, 0.04141984176635742, 0.041261951446533204, 0.041302078247070315, 0.04118431854248047, 0.041348033905029294, 0.04158982467651367, 0.04139420700073242, 0.041658527374267576, 0.04120652770996094, 0.04137472152709961, 0.04168806457519531, 0.041301502227783206, 0.04130620956420898, 0.0412632942199707, 0.04108924865722656, 0.04107392120361328, 0.041091968536376956, 0.041522911071777344, 0.04136742401123047, 0.0416855354309082, 0.04115584182739258, 0.041352832794189456, 0.04115110397338867, 0.041279647827148436, 0.04149251174926758, 0.0416987190246582, 0.041869983673095704, 0.04178326416015625, 0.04208214569091797, 0.04207430267333984, 0.0418711051940918, 0.042154048919677736, 0.04129811096191406, 0.04154115295410156, 0.041562591552734375, 0.041248287200927734, 0.041390209197998046, 0.04134332656860352, 0.04146499252319336, 0.04129264068603516, 0.04122009658813477, 0.041422847747802735, 0.041148414611816404, 0.04169036865234375, 0.041057022094726565, 0.04141260910034179, 0.041322494506835936, 0.04164995193481445, 0.04133091354370117, 0.04135116958618164, 0.041801727294921875, 0.04190003204345703, 0.04180582427978516, 0.041783294677734374, 0.041933982849121094, 0.043240318298339846, 0.04161225509643555, 0.04157952117919922, 0.041545726776123046, 0.041358463287353514, 0.04263324737548828, 0.04225843048095703, 0.04151500701904297, 0.04129609680175781, 0.041291519165039064, 0.04122627258300781, 0.04128969573974609, 0.04107881546020508, 0.041484256744384766, 0.04117712020874023, 0.04143308639526367, 0.04125696182250976, 0.041201663970947267, 0.04108486557006836, 0.04114182281494141, 0.041251136779785154, 0.041180736541748045, 0.041121726989746095, 0.04121427154541016, 0.0409554557800293, 0.04118518447875977, 0.04110041427612305, 0.04143497467041016, 0.04118700790405273, 0.041028064727783205, 0.0415618896484375, 0.04206796646118164, 0.04173004913330078, 0.04168076705932617, 0.041751873016357424, 0.041484222412109376, 0.04111859130859375, 0.041170047760009765, 0.04122297668457031, 0.041187614440917966, 0.041707294464111325, 0.041307937622070315, 0.04113750457763672, 0.04111859130859375, 0.04251644897460938, 0.04252262496948242, 0.04131427383422852, 0.042686527252197265, 0.04253900909423828, 0.041852928161621096, 0.041494815826416016, 0.041614112854003904, 0.0413111686706543, 0.04254435348510742, 0.04143183898925781, 0.04114163208007812, 0.043909759521484376, 0.04186111831665039, 0.04163545608520508, 0.041465473175048825, 0.041390113830566407, 0.041337566375732424, 0.044760704040527344, 0.041581119537353516, 0.04141241455078125, 0.0413573112487793, 0.04242812728881836, 0.04136758422851562, 0.04295004653930664, 0.04179609680175781, 0.041702625274658206, 0.04487382507324219, 0.041753055572509766, 0.04152374267578125, 0.04144416046142578, 0.041351966857910157, 0.04153772735595703, 0.04128755187988281, 0.04109120178222656, 0.0419697265625, 0.041793182373046876, 0.04220764923095703, 0.04180364990234375, 0.041472129821777344, 0.04143500900268555, 0.04137779235839844, 0.04127859115600586, 0.04128243255615234, 0.041836254119873045, 0.0411313591003418, 0.04153644943237305, 0.041573726654052734, 0.041060096740722654, 0.04116537475585937, 0.04109689712524414, 0.04121052932739258, 0.041422847747802735, 0.041404415130615234, 0.041818111419677735, 0.04147635269165039, 0.041712448120117186, 0.041661376953125, 0.041603073120117184, 0.04203241729736328, 0.04177772903442383, 0.04182649612426758, 0.04182761764526367, 0.04191712188720703, 0.04187360000610352, 0.041551681518554685, 0.04152873611450195, 0.04125142288208008, 0.04150460815429687, 0.04139996719360352, 0.041541694641113285, 0.04239750289916992, 0.04145011138916015, 0.0414535026550293, 0.04133894348144531, 0.04186435317993164, 0.04169302368164062, 0.04162876892089844, 0.041942817687988285, 0.04169270324707031, 0.04171152114868164, 0.04185094451904297, 0.04165811157226563, 0.04206476974487305, 0.04163350296020508, 0.041599262237548826, 0.041357440948486326, 0.042990943908691404, 0.04208009719848633, 0.0420843505859375, 0.04219539260864258, 0.04220147323608398, 0.0421212158203125, 0.042192352294921874, 0.041709121704101564, 0.041726337432861325, 0.04177913665771484, 0.041613983154296874, 0.042162174224853514, 0.04192489624023438, 0.04195100784301758, 0.041918464660644535, 0.04209766387939453, 0.04236991882324219, 0.042315841674804684, 0.04230656051635742, 0.041913345336914064, 0.04247148895263672, 0.04211705780029297, 0.04244070434570312, 0.04217036819458008, 0.041917537689208986, 0.04216105651855469, 0.04185843276977539, 0.042073951721191404, 0.04189673614501953, 0.041688095092773436, 0.04180627059936524, 0.04120630264282227, 0.04135683059692383, 0.041443649291992186, 0.04123235321044922, 0.04155804824829101, 0.041151649475097654, 0.041715774536132816, 0.04116390228271485, 0.041191455841064456, 0.04139513778686523, 0.04125196838378906, 0.04153519821166992, 0.04103798294067383, 0.0412283821105957, 0.04177616119384766, 0.04109078216552734, 0.04147670364379883, 0.041004894256591796, 0.04117343902587891, 0.04115264129638672, 0.04134707260131836, 0.04127334213256836, 0.04147196960449219, 0.041906208038330076, 0.041570526123046875, 0.04194192123413086, 0.041818592071533205, 0.0416828498840332, 0.04162393569946289, 0.04147417449951172, 0.041371902465820315, 0.04126259231567383, 0.04297558212280273, 0.041981952667236325, 0.041752574920654296, 0.04152323150634766, 0.04167472076416016, 0.041664798736572264, 0.041985759735107424, 0.04193484878540039, 0.041737377166748045, 0.041948001861572264, 0.04187136077880859, 0.04187052917480469, 0.04211999893188476, 0.04165427017211914, 0.04156147384643555, 0.04117068862915039, 0.0413493766784668, 0.04125350570678711, 0.041398529052734376, 0.04142649459838867, 0.041468097686767576, 0.04153936004638672, 0.041261215209960934, 0.04152121734619141, 0.041680896759033206, 0.0413573112487793, 0.0416255989074707, 0.041246528625488284, 0.04143312072753906, 0.04115286254882813, 0.041897792816162106, 0.04121964645385742, 0.041315776824951175, 0.04124367904663086, 0.04125894546508789, 0.0410931510925293, 0.04114755249023438, 0.04122307205200195, 0.04111967849731445, 0.041026752471923826, 0.04122297668457031, 0.04104508972167969, 0.04105104064941406, 0.0410041618347168, 0.0409035530090332, 0.041414657592773435, 0.04109862518310547, 0.04082956695556641, 0.04117708969116211, 0.040869888305664064, 0.04157561492919922, 0.04169539260864258, 0.04171023941040039, 0.04168659210205078, 0.04193123245239258, 0.04176287841796875, 0.04125436782836914, 0.04133635330200195, 0.04121062469482422, 0.04137385559082031, 0.04150067138671875, 0.041388031005859374, 0.041646080017089845, 0.043081344604492186, 0.04181577682495117, 0.043700897216796875, 0.04149203109741211, 0.04126473617553711, 0.041409534454345705, 0.04627644729614258, 0.042608543395996096, 0.041439327239990234, 0.04127686309814453, 0.04141113662719727, 0.040997055053710936, 0.04158240127563476, 0.04143299102783203, 0.04131660842895508, 0.04120947265625, 0.041201057434082033, 0.0412658576965332, 0.04107068634033203, 0.04141648101806641, 0.04100735855102539, 0.04106547164916992, 0.04347292709350586, 0.041393054962158206, 0.04168710327148437, 0.04144076919555664, 0.041345535278320314, 0.041332736968994144, 0.044879936218261716, 0.042097728729248045, 0.042592479705810544, 0.04187231826782226, 0.04140185546875, 0.04155209732055664, 0.04146176147460937, 0.04151055908203125, 0.04210300827026367, 0.042113151550292965, 0.041971710205078124, 0.041545726776123046, 0.041717758178710936, 0.04164940643310547, 0.041691871643066404, 0.04175814437866211, 0.04217459106445313, 0.041742721557617185, 0.04180339050292969, 0.041777633666992185, 0.042149246215820314, 0.04157817459106445, 0.04198495864868164, 0.04191555023193359, 0.041919486999511715, 0.04165820693969727, 0.04153484725952149, 0.04205632019042969, 0.04139360046386719, 0.04131631851196289, 0.041925216674804686, 0.04152127838134766, 0.041942913055419924, 0.04192470550537109, 0.041721920013427734, 0.042913791656494144, 0.04209596633911133, 0.041591423034667965, 0.04157238388061523, 0.04150447845458984, 0.04135964965820312, 0.0413675537109375, 0.041750526428222655, 0.041844959259033206, 0.04155779266357422, 0.04158816146850586, 0.041535423278808596, 0.04158473587036133, 0.041969249725341794, 0.04145452880859375, 0.041635009765625, 0.04197868728637695, 0.041940193176269534, 0.04188959884643555, 0.041724895477294924, 0.041740222930908205, 0.04143088150024414, 0.04165644836425781, 0.04140857696533203, 0.041537662506103516, 0.04171356964111328, 0.04200447845458984, 0.04193215942382812, 0.04208457565307617, 0.04213520050048828, 0.04223638534545898, 0.04248723220825195, 0.04233097457885742, 0.042116161346435546, 0.04208940887451172, 0.04167475128173828, 0.042250080108642576, 0.0418590087890625, 0.04169456100463867, 0.04173222351074219, 0.04201651382446289, 0.04225331115722656, 0.041961406707763674, 0.04222553634643555, 0.04220947265625, 0.042024959564208986, 0.041999423980712894, 0.042214336395263674, 0.04221484756469727, 0.04211503982543945, 0.04229568099975586, 0.0421802864074707, 0.04254569625854492, 0.04213081741333008, 0.042145919799804685, 0.04244332885742187, 0.042102718353271486, 0.04216118240356445, 0.04219798278808594, 0.04198556900024414, 0.04253343963623047, 0.04216534423828125, 0.04281631851196289, 0.043017311096191405, 0.041982784271240234, 0.04206387329101562, 0.04173337554931641, 0.0415300178527832, 0.04163369750976562, 0.041681182861328124, 0.04195756912231445, 0.0419749755859375, 0.0421126708984375, 0.0422143669128418, 0.0425912971496582, 0.04196233749389648, 0.04173577499389648, 0.0415728645324707, 0.04175667190551758, 0.04160006332397461, 0.04163257598876953, 0.04142502212524414, 0.04142694473266602, 0.04148428726196289, 0.04135935974121094, 0.04155984115600586, 0.041512222290039064, 0.04167776107788086, 0.041602462768554685, 0.04153545761108399, 0.04149107360839844, 0.042100513458251956, 0.04192879867553711, 0.041990272521972655, 0.041950977325439454, 0.0422011833190918, 0.04215584182739258, 0.04218505477905273, 0.042196990966796875, 0.042288448333740236, 0.04189641571044922, 0.04154390335083008, 0.04169728088378906, 0.041543678283691404, 0.04165017700195312, 0.04148614501953125, 0.04158246231079102, 0.04138016128540039, 0.04148031997680664, 0.04137561416625977, 0.04150886535644531, 0.041823902130126954, 0.04146006393432617, 0.04155187225341797, 0.041848159790039065, 0.04179216003417969, 0.04215193557739258, 0.041764671325683594, 0.04244403076171875, 0.04163433456420899, 0.041681312561035154, 0.04191984176635742, 0.0414760627746582, 0.04187411117553711, 0.04160505676269531, 0.041807071685791015]",tokens/s,23.99955663835238,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,854.867968,6477.971456,0.0,6075.449344,6044.13184,s,1,14.8949248046875,14.8949248046875,0.0,14.8949248046875,14.8949248046875,14.8949248046875,14.8949248046875,[14.8949248046875],,kWh,0.00022386509401668868,2.468673406614509e-05,7.27650582119932e-05,0.00032131688629482696,,MB,1374.04416,7002.259456,0.0,6587.154432,6469.997056,s,10,10.294670227050782,1.0294670227050782,0.004768682765396884,1.0309503784179688,1.0328842163085938,1.0346115417480468,1.0359934020996093,"[1.0179098510742188, 1.0253687744140625, 1.027137451171875, 1.0301861572265625, 1.0311337890625, 1.0311171875, 1.0307835693359375, 1.0321942138671876, 1.0325003662109375, 1.0363388671875]",tokens/s,248.67236575225286,kWh,2.997852435083208e-05,3.3061149903762562e-06,1.9971627088399835e-05,5.3256266429608166e-05,tokens/kWh,4806946.058420557,MB,1397.583872,7014.842368,0.0,6599.737344,6469.999616,s,10,49.6400703125,4.96400703125,0.010783846605389363,4.962759521484375,4.976219873046875,4.9762227294921875,4.9762250146484375,"[4.9470048828125, 4.95001513671875, 4.95448486328125, 4.95989453125, 4.96049169921875, 4.97458935546875, 4.96502734375, 4.97621923828125, 4.97611767578125, 4.9762255859375]",tokens/s,12.691359944374573,kWh,0.00014550772834791888,1.6050666250186643e-05,9.628816036379995e-05,0.00025784655496190553,tokens/kWh,244331.36215183357,,s,630,49.637541809082066,0.07878974890330481,0.0015071019199308364,0.07833703994750976,0.07968798294067382,0.08010508651733399,0.08888681457519532,"[0.08879615783691407, 0.07936029052734375, 0.07879094696044922, 0.0791262435913086, 0.07853648376464843, 0.07822866821289062, 0.07792390441894531, 0.07782556915283204, 0.07790249633789062, 0.07815987396240234, 0.07800409698486328, 0.07794255828857422, 0.0778878402709961, 0.07795235443115234, 0.07799874877929687, 0.07793459320068359, 0.0778891830444336, 0.07786121368408203, 0.07797760009765625, 0.0779714584350586, 0.07786319732666015, 0.07917472076416016, 0.07863097381591796, 0.07822396850585937, 0.0779115219116211, 0.07798633575439454, 0.07800991821289062, 0.07806406402587891, 0.07797897338867188, 0.07798236846923828, 0.07931609344482422, 0.07925132751464843, 0.0786476821899414, 0.07822921752929687, 0.07863184356689454, 0.07811398315429688, 0.07838175964355469, 0.0782767333984375, 0.07804723358154297, 0.07816352081298829, 0.07799263763427734, 0.0794170913696289, 0.07924877166748047, 0.07882189178466797, 0.07826966094970703, 0.07811334228515625, 0.0781253433227539, 0.07821311950683593, 0.07809974670410157, 0.07822415924072265, 0.07819638061523437, 0.07828720092773438, 0.07954831695556641, 0.07934556579589844, 0.07928205108642578, 0.07897503662109374, 0.07845913696289063, 0.07815126037597656, 0.07820262145996094, 0.07827667236328124, 0.07813590240478516, 0.07820025634765625, 0.07822617340087891, 0.08844032287597656, 0.07940473937988281, 0.0787628173828125, 0.07832166290283203, 0.07904051208496093, 0.07851350402832032, 0.07809664154052734, 0.07782441711425782, 0.07781081390380859, 0.07779756927490235, 0.07785145568847657, 0.07788326263427735, 0.07784652709960938, 0.07918150329589843, 0.07908563232421875, 0.07863494110107422, 0.0781374740600586, 0.07794464111328125, 0.07784265899658203, 0.07799823760986328, 0.07787884521484376, 0.07799443054199219, 0.07794483184814453, 0.08007852935791016, 0.07913081359863282, 0.07865561676025391, 0.0782213134765625, 0.07806880187988281, 0.07797856140136719, 0.07806710052490234, 0.07798601531982421, 0.07808243560791016, 0.07799807739257812, 0.07806147003173829, 0.07939635467529296, 0.07879126739501953, 0.07833382415771484, 0.0780552978515625, 0.07809049224853516, 0.07814771270751954, 0.07807373046875, 0.07810460662841796, 0.07814669036865235, 0.07819554901123046, 0.07938457489013671, 0.07880313873291016, 0.0783460464477539, 0.0781475830078125, 0.07817625427246094, 0.07807794952392579, 0.07819878387451172, 0.07822073364257813, 0.0781376953125, 0.07818672180175781, 0.07970595550537109, 0.07968287658691406, 0.07971721649169922, 0.07910982513427735, 0.0786063003540039, 0.07839574432373046, 0.07833116912841796, 0.07834902191162109, 0.07828662109375, 0.08937897491455078, 0.07928425598144531, 0.07856896209716797, 0.07811737823486328, 0.07783757019042968, 0.07787391662597656, 0.07783833312988281, 0.07917990112304688, 0.07849990081787109, 0.07824518585205079, 0.07788358306884766, 0.07819264221191406, 0.07793695831298827, 0.07806156921386719, 0.07859609222412109, 0.07804431915283203, 0.07788630676269531, 0.0794419174194336, 0.07885993957519531, 0.07838345336914063, 0.07794207763671875, 0.07789433288574218, 0.0779688949584961, 0.07796771240234375, 0.07794601440429688, 0.07792870330810547, 0.07933968353271484, 0.07919676971435546, 0.07900978851318359, 0.07862643432617188, 0.07823603057861328, 0.07803260803222656, 0.07801679992675781, 0.07804723358154297, 0.07808819580078125, 0.07807158660888672, 0.07800547027587891, 0.07932176208496093, 0.07917193603515625, 0.07923712158203125, 0.07874969482421874, 0.07836441802978515, 0.07807142639160156, 0.07818303680419922, 0.0780445785522461, 0.07817420959472657, 0.07806829071044921, 0.07822544097900391, 0.07804886627197266, 0.0795335693359375, 0.07900048065185547, 0.07863910675048828, 0.07814723205566407, 0.07818793487548828, 0.07852127838134766, 0.07873974609375, 0.07814524841308594, 0.0781983642578125, 0.0795950698852539, 0.07949807739257812, 0.07966508483886718, 0.0794288330078125, 0.07886431884765625, 0.08892384338378906, 0.07944979095458984, 0.07876876831054687, 0.07932109069824218, 0.07865570831298828, 0.07837641906738281, 0.07788070678710937, 0.07786784362792969, 0.07793267059326171, 0.0779587173461914, 0.07785247802734375, 0.07800281524658204, 0.0791797103881836, 0.07939897918701172, 0.0788666534423828, 0.0785117416381836, 0.07791426849365235, 0.07803622436523437, 0.07792463684082031, 0.07805590057373046, 0.07790182495117187, 0.07813324737548828, 0.07920412445068359, 0.07903376007080078, 0.07866604614257812, 0.07864486694335937, 0.07820582580566406, 0.07810867309570313, 0.07797532653808593, 0.07812528228759766, 0.07795279693603516, 0.07807202911376954, 0.0792279052734375, 0.07892684936523438, 0.08001853179931641, 0.07929743957519532, 0.07872092437744141, 0.0780565414428711, 0.07816623687744141, 0.07813200378417968, 0.07811414337158203, 0.07810467529296874, 0.07811113739013673, 0.07819213104248048, 0.07959824371337891, 0.07968704223632812, 0.07899609375, 0.07865875244140624, 0.07809638214111328, 0.07818732452392578, 0.07806361389160156, 0.07824002838134765, 0.07809609222412109, 0.07824384307861328, 0.07825612640380859, 0.07953778839111328, 0.07968425750732422, 0.07970787048339843, 0.07975936126708984, 0.07922217559814453, 0.07885440063476562, 0.0782628173828125, 0.07850784301757813, 0.088946044921875, 0.0793699188232422, 0.07933023834228516, 0.07869625854492188, 0.07828498840332031, 0.07783833312988281, 0.0778260498046875, 0.07781375885009766, 0.07784003448486328, 0.07774604797363281, 0.07787071990966797, 0.07929328155517579, 0.07940860748291016, 0.07880054473876953, 0.0783246078491211, 0.07796121978759765, 0.07795507049560547, 0.07790374755859375, 0.07793472290039062, 0.07796031951904298, 0.07788604736328125, 0.07938262176513672, 0.07941069030761719, 0.0788650894165039, 0.07834153747558593, 0.07797001647949219, 0.07798281860351562, 0.07806249237060547, 0.07798348999023437, 0.07800863647460937, 0.07801849365234376, 0.07962009429931641, 0.07949295806884765, 0.07891580963134766, 0.0784444808959961, 0.0780574722290039, 0.07805123138427734, 0.07812252807617187, 0.07806594848632813, 0.07809613037109375, 0.07804163360595703, 0.07951757049560547, 0.07952806091308594, 0.07965081787109375, 0.0801075210571289, 0.07931597137451171, 0.07877120208740235, 0.07818844604492188, 0.07809238433837891, 0.07848140716552734, 0.07825612640380859, 0.07832524871826171, 0.07809446716308593, 0.07819283294677734, 0.07953545379638671, 0.07975625610351562, 0.08002268981933594, 0.07966508483886718, 0.07906588745117188, 0.07899526214599609, 0.07828908538818359, 0.078166015625, 0.07830675506591797, 0.088672607421875, 0.07951439666748047, 0.07876592254638672, 0.07847138977050781, 0.0779993896484375, 0.07794329833984374, 0.0779878387451172, 0.07797964477539063, 0.0779524154663086, 0.07919055938720704, 0.07923308563232422, 0.07958662414550781, 0.078863037109375, 0.07851609802246094, 0.07811817932128906, 0.07808694458007813, 0.07808988952636718, 0.07804476928710938, 0.07803782653808594, 0.07811062622070312, 0.07930057525634765, 0.07957107543945313, 0.0795688934326172, 0.0791747817993164, 0.07869920349121094, 0.07872940826416015, 0.0784874267578125, 0.07815590667724609, 0.07809004974365234, 0.07811090850830078, 0.07823264312744141, 0.07837792205810547, 0.0781325454711914, 0.08034989166259765, 0.08036131286621094, 0.08039030456542968, 0.07920025634765625, 0.07919574737548828, 0.07835916900634765, 0.07833168029785156, 0.07820079803466796, 0.07816809844970703, 0.07830089569091797, 0.07829945373535156, 0.07825516510009765, 0.07842915344238281, 0.08010316467285156, 0.08013836669921875, 0.08035673522949219, 0.08010189056396484, 0.07944620513916016, 0.07900179290771485, 0.07855903625488281, 0.0783298568725586, 0.07833971405029297, 0.07839344024658203, 0.07816835021972657, 0.07829248046875, 0.07825238037109375, 0.07860578918457031, 0.07997817230224609, 0.0802356185913086, 0.08039974212646485, 0.09060873413085938, 0.07929539489746094, 0.07859814453125, 0.07828067016601563, 0.07778717041015625, 0.07791410827636719, 0.0792616958618164, 0.07871833801269532, 0.07823612976074219, 0.07780531311035156, 0.07785676574707032, 0.07781827545166016, 0.07786396789550781, 0.07931593322753906, 0.07872306823730468, 0.07926950073242188, 0.07940953826904297, 0.07939686584472656, 0.07875094604492187, 0.07841053009033203, 0.07785062408447266, 0.07800201416015624, 0.07784873962402344, 0.07799807739257812, 0.0779221420288086, 0.07803123474121093, 0.07938835144042969, 0.0794808349609375, 0.0798372802734375, 0.07933235168457031, 0.0787239990234375, 0.07827830505371093, 0.07801081848144531, 0.07803612518310547, 0.07834649658203124, 0.07809699249267578, 0.07801757049560547, 0.07805628967285157, 0.07986329650878907, 0.0795736312866211, 0.07885209655761719, 0.0785182113647461, 0.07936393737792968, 0.07903663635253906, 0.07853609466552734, 0.07818857574462891, 0.07811727905273437, 0.078178466796875, 0.07805471801757813, 0.07817868804931641, 0.08000873565673829, 0.08016770935058594, 0.0796385269165039, 0.07898521423339844, 0.07863680267333985, 0.07833436584472656, 0.07808393859863282, 0.07822950744628906, 0.07813247680664062, 0.07831014251708984, 0.07813353729248047, 0.07902178955078125, 0.0800636444091797, 0.0911365737915039, 0.07960486602783202, 0.07876287841796875, 0.07834162902832031, 0.0779161605834961, 0.07780188751220703, 0.0779552993774414, 0.0790995864868164, 0.07929670715332031, 0.07892326354980468, 0.07855155181884765, 0.07824960327148438, 0.07799199676513671, 0.07821548461914063, 0.0792616958618164, 0.07861023712158204, 0.07800182342529297, 0.07798368072509766, 0.07917449951171875, 0.07887580871582031, 0.08002963256835938, 0.07923152160644531, 0.07858966064453125, 0.07820873260498047, 0.07799673461914063, 0.0779530258178711, 0.07805951690673828, 0.07798124694824218, 0.07801618957519531, 0.07920441436767578, 0.07881782531738281, 0.08016419219970704, 0.0800038070678711, 0.07935190582275391, 0.07871692657470702, 0.078355712890625, 0.07803526306152343, 0.07806409454345703, 0.07820285034179687, 0.07814134216308594, 0.07805462646484375, 0.07841267395019531, 0.07956995391845703, 0.08010665893554687, 0.080109375, 0.08065042877197266, 0.07940691375732421, 0.07886847686767579, 0.0783729248046875, 0.07814134216308594, 0.07813737487792968, 0.07825945281982422, 0.07809715270996094, 0.07823110198974609, 0.07820291137695312, 0.07958774566650391, 0.07965264129638672, 0.0801151351928711, 0.08070127868652344, 0.07989958190917969, 0.07924531555175782, 0.07885823822021484, 0.07842144012451172, 0.08899110412597656, 0.07941532897949219, 0.07872982025146484, 0.07837308502197265, 0.07932649230957031, 0.07892623901367188, 0.07862486267089844, 0.07818649291992187, 0.07785472106933594, 0.07794278717041016, 0.07783747100830078, 0.07792495727539063, 0.07845689392089844, 0.07967533111572266, 0.07947452545166016, 0.079585693359375, 0.0794537582397461, 0.0787992935180664, 0.07844863891601563, 0.07803427124023438, 0.07795798492431641, 0.07808294677734375, 0.07801036834716797, 0.07807270050048828, 0.07796259307861328, 0.07978057861328125, 0.07996934509277344, 0.07964768218994141, 0.07898438262939453, 0.07858668518066406, 0.0781670684814453, 0.07813187408447266, 0.07812742614746093, 0.07815167999267578, 0.07821107482910156, 0.07877804565429687, 0.07818271636962891, 0.08063385772705078, 0.07973811340332031, 0.08020249938964844, 0.07972370910644531, 0.07930675506591797, 0.07874591827392578, 0.07829891204833984, 0.07828546905517578, 0.0782681884765625, 0.0782154541015625, 0.07840476989746094, 0.07880809783935547, 0.07901983642578125, 0.08002294158935547, 0.079927490234375, 0.07998639678955079, 0.08012665557861329, 0.07978511810302734, 0.07942047882080078, 0.07882083129882812, 0.07855545806884766, 0.07826963043212891, 0.07832249450683594, 0.07913062286376953, 0.07832559967041015, 0.07861849975585937, 0.08940338897705079, 0.07943974304199218, 0.07930063629150391, 0.07873340606689454, 0.07828876495361328, 0.0778323211669922, 0.07785676574707032, 0.07783404541015625, 0.07790946960449219, 0.07779785919189453, 0.07801232147216797, 0.07937673950195312, 0.07938480377197266, 0.07876585388183593, 0.07846463775634765, 0.07923149108886719, 0.0786266860961914, 0.07820441436767578, 0.07788361358642579, 0.07804508972167969, 0.07785100555419922, 0.07921868896484376, 0.07879011535644531, 0.07988483428955079, 0.07896415710449219, 0.0786761245727539, 0.0780025634765625, 0.07807183837890624, 0.07928803253173829, 0.07876227569580078, 0.0783810272216797, 0.078115966796875, 0.07800713348388671, 0.07950137329101563, 0.07958080291748047, 0.07967129516601562, 0.07893440246582031, 0.07848041534423827, 0.07812518310546875, 0.07822627258300781, 0.07890329742431641, 0.07914006042480469, 0.0786480941772461, 0.07824332427978516, 0.07825254058837891, 0.07951702117919922, 0.07969644927978516, 0.080674560546875, 0.08010095977783203, 0.0793625259399414, 0.0789109115600586, 0.0784343032836914, 0.07823859405517578, 0.07829414367675781, 0.07827645111083985, 0.07843328094482421, 0.07826227569580078, 0.07852754974365235, 0.07979718780517578, 0.08046387481689453, 0.07981449890136719, 0.08016255950927734, 0.08085135650634766]",tokens/s,12.69200643382245,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,2579.480576,11834.097664,0.0,11431.575552,10953.091072,s,1,21.4465703125,21.4465703125,0.0,21.4465703125,21.4465703125,21.4465703125,21.4465703125,[21.4465703125],,kWh,0.0004130404149083271,4.5554387062217006e-05,0.00013822649946999832,0.0005968213014405424,,MB,1955.258368,12729.581568,0.0,12314.476544,11624.13056,s,10,17.742991455078123,1.7742991455078125,0.007950765908732383,1.7766617431640626,1.780648742675781,1.7823486022949218,1.7837084899902342,"[1.7542496337890625, 1.768703125, 1.7705572509765626, 1.77411669921875, 1.7759564208984375, 1.7773670654296876, 1.7784588623046875, 1.779262939453125, 1.78027099609375, 1.7840484619140624]",tokens/s,144.28232164127635,kWh,5.17618478358338e-05,5.7089704444165825e-06,3.443766643899948e-05,9.190848471924987e-05,tokens/kWh,2785379.3997583096,MB,1959.489536,12731.67872,0.0,12316.573696,11624.13312,s,10,88.64462304687501,8.864462304687502,0.022566856518242345,8.86867822265625,8.8869935546875,8.8880236328125,8.8888476953125,"[8.8152001953125, 8.84196875, 8.847572265625, 8.861177734375, 8.88145703125, 8.8635400390625, 8.8890537109375, 8.8867646484375, 8.884072265625, 8.87381640625]",tokens/s,7.107030052650321,kWh,0.0002596584991087491,2.864199501486212e-05,0.00017231169340480024,0.0004606121875284114,tokens/kWh,136774.49643278064,,s,630,88.64077651977529,0.14069964526948475,0.0015867360498308255,0.1406297607421875,0.14163049621582033,0.14197435760498045,0.15080949584960937,"[0.15167564392089844, 0.13853395080566405, 0.13920521545410156, 0.1398828430175781, 0.13778533935546874, 0.13848130798339844, 0.13956089782714845, 0.1405545654296875, 0.13841754150390626, 0.13912080383300782, 0.13875004577636718, 0.137955810546875, 0.13989273071289063, 0.14069862365722657, 0.14081925964355468, 0.13978028869628906, 0.13995606994628906, 0.1387890167236328, 0.13880934143066406, 0.13894015502929688, 0.1396329345703125, 0.13911241149902343, 0.13959507751464845, 0.13940623474121094, 0.13892381286621094, 0.13907763671875, 0.13987142944335937, 0.14006150817871094, 0.14087767028808593, 0.14043894958496095, 0.14049906921386718, 0.13939981079101563, 0.13907148742675782, 0.13899571228027344, 0.13998899841308593, 0.13995747375488282, 0.14005453491210937, 0.13928323364257814, 0.14009315490722657, 0.13923091125488282, 0.13933628845214843, 0.1406033935546875, 0.14066482543945313, 0.1400722198486328, 0.14025596618652345, 0.14024041748046875, 0.14070831298828124, 0.14095155334472656, 0.14024908447265624, 0.14078346252441407, 0.13981875610351563, 0.140400634765625, 0.1394851531982422, 0.140159423828125, 0.1395897979736328, 0.14045167541503906, 0.13952975463867187, 0.14016355895996094, 0.14019891357421874, 0.13961318969726563, 0.14014041137695313, 0.1396696319580078, 0.1405665283203125, 0.1516444549560547, 0.1386498260498047, 0.13888079833984374, 0.13874124145507813, 0.13867433166503906, 0.13879248046875, 0.14105194091796874, 0.14244557189941406, 0.1396999969482422, 0.13998431396484376, 0.14041349792480468, 0.14058026123046874, 0.13903318786621094, 0.13994189453125, 0.14113381958007812, 0.139859130859375, 0.13946144104003907, 0.13947084045410157, 0.1396590118408203, 0.13804570007324218, 0.13964002990722657, 0.14000527954101563, 0.1404827880859375, 0.13929539489746093, 0.14030950927734376, 0.13992243957519532, 0.13978985595703125, 0.14001609802246093, 0.1408691864013672, 0.1419022674560547, 0.14039039611816406, 0.14125599670410155, 0.14167222595214843, 0.14022857666015626, 0.14031149291992187, 0.13945046997070312, 0.14016000366210937, 0.140447998046875, 0.139702392578125, 0.14080873107910155, 0.13956300354003906, 0.13942562866210936, 0.13972854614257812, 0.1394134063720703, 0.14044956970214845, 0.14014956665039063, 0.1395587158203125, 0.13955500793457032, 0.13957533264160157, 0.13938233947753906, 0.14057650756835938, 0.14114816284179688, 0.1404054718017578, 0.14016313171386718, 0.1406681671142578, 0.14122674560546875, 0.14045785522460938, 0.14109260559082032, 0.14139213562011718, 0.14147286987304689, 0.14056541442871093, 0.14145928955078124, 0.1413080596923828, 0.14968031311035157, 0.13871104431152342, 0.13890765380859374, 0.13966310119628905, 0.14032511901855468, 0.1400443572998047, 0.14024803161621094, 0.14165647888183594, 0.1404851837158203, 0.13940325927734376, 0.13896864318847657, 0.139114501953125, 0.1397091827392578, 0.13929991149902343, 0.14091084289550782, 0.1400200958251953, 0.1396956787109375, 0.13880979919433595, 0.13926400756835938, 0.13993370056152343, 0.13966744995117186, 0.1416826934814453, 0.14058026123046874, 0.14097789001464844, 0.14069827270507812, 0.14077593994140625, 0.140699462890625, 0.14073968505859374, 0.14001759338378905, 0.14014349365234374, 0.14013031005859375, 0.1402040252685547, 0.1396815643310547, 0.14000355529785155, 0.14024089050292968, 0.14005223083496093, 0.13989613342285157, 0.1392459259033203, 0.14064918518066405, 0.13937855529785156, 0.14084886169433594, 0.14043746948242186, 0.13993606567382813, 0.14030029296875, 0.13953753662109375, 0.14065078735351563, 0.1394877166748047, 0.14186714172363282, 0.13987225341796874, 0.14037353515625, 0.14061138916015625, 0.14046070861816407, 0.14084867858886718, 0.1408885498046875, 0.14166835021972657, 0.1412710418701172, 0.1408737335205078, 0.1411124725341797, 0.14110972595214843, 0.14143101501464844, 0.14096159362792968, 0.14117913818359376, 0.14116053771972656, 0.15192962646484376, 0.13879692077636718, 0.13889532470703125, 0.13867843627929688, 0.138756103515625, 0.1393315887451172, 0.1423946533203125, 0.14336492919921875, 0.13976976013183592, 0.14028390502929688, 0.14029209899902345, 0.14055734252929689, 0.14039698791503907, 0.1394669189453125, 0.14088998413085937, 0.13998335266113282, 0.13968179321289062, 0.13961027526855468, 0.13983308410644532, 0.13971235656738282, 0.13902053833007813, 0.14053721618652343, 0.14012847900390624, 0.1400590362548828, 0.13929612731933594, 0.1401627197265625, 0.13909449768066406, 0.14073292541503907, 0.14046733093261718, 0.1406239013671875, 0.14133935546875, 0.14058009338378907, 0.14106098937988282, 0.14090232849121093, 0.1412130889892578, 0.1408354949951172, 0.14124237060546874, 0.14121926879882812, 0.14129119873046875, 0.14113804626464843, 0.14182284545898438, 0.1410779266357422, 0.14115087890625, 0.14116435241699218, 0.1413017578125, 0.14077340698242188, 0.14098634338378907, 0.1412562255859375, 0.14072674560546874, 0.14010064697265626, 0.14015586853027343, 0.14085324096679688, 0.14005657958984374, 0.140410400390625, 0.14038041687011718, 0.14076707458496093, 0.1404174041748047, 0.1400641326904297, 0.14074534606933595, 0.14035752868652343, 0.14120970153808593, 0.14054768371582033, 0.14092265319824218, 0.1502230682373047, 0.14049685668945314, 0.1401656036376953, 0.14030653381347657, 0.1397227478027344, 0.13951795959472657, 0.1397881317138672, 0.14096400451660157, 0.14042521667480468, 0.13955072021484374, 0.13902752685546876, 0.14053411865234375, 0.13967829895019532, 0.13966744995117186, 0.140548095703125, 0.14006428527832032, 0.14006935119628905, 0.13901145935058593, 0.14027020263671874, 0.14016307067871095, 0.1400975341796875, 0.14074639892578125, 0.1406356201171875, 0.14026637268066405, 0.14057647705078125, 0.14082281494140625, 0.14103053283691405, 0.14088015747070312, 0.1418116455078125, 0.14118159484863282, 0.1407447052001953, 0.14050480651855468, 0.14172589111328124, 0.140767333984375, 0.14121369934082031, 0.14179327392578125, 0.1412888946533203, 0.141102783203125, 0.14122892761230468, 0.1411461181640625, 0.14113973999023438, 0.14186262512207032, 0.14117529296875, 0.14089830017089844, 0.1412894744873047, 0.14210159301757813, 0.14136550903320313, 0.14096646118164063, 0.14131210327148438, 0.1419489288330078, 0.1408139190673828, 0.14095606994628906, 0.14137548828125, 0.14143487548828124, 0.14141439819335938, 0.14136489868164062, 0.14144546508789063, 0.14145692443847657, 0.14122441101074218, 0.14194883728027344, 0.14110719299316407, 0.14180732727050782, 0.14092323303222656, 0.1521254425048828, 0.1389150390625, 0.13948538208007813, 0.13885072326660156, 0.13885821533203124, 0.1387987823486328, 0.1423593292236328, 0.14204261779785157, 0.1395587158203125, 0.14008563232421875, 0.13907589721679686, 0.13879682922363282, 0.1397598419189453, 0.14145535278320312, 0.14170469665527344, 0.14079379272460937, 0.14048313903808593, 0.1398681640625, 0.14064639282226563, 0.14073199462890626, 0.14106428527832032, 0.14186119079589843, 0.14133042907714843, 0.14182803344726563, 0.14105970764160156, 0.1407779235839844, 0.14106163024902343, 0.14052198791503906, 0.14072422790527345, 0.14033424377441406, 0.1411899871826172, 0.13998002624511718, 0.1405918731689453, 0.1406397705078125, 0.1401646728515625, 0.14019427490234376, 0.1402490234375, 0.14058341979980468, 0.14030419921875, 0.1403519287109375, 0.14033689880371095, 0.1403353271484375, 0.14024172973632812, 0.14030050659179688, 0.14035430908203125, 0.14047219848632814, 0.14039462280273438, 0.13996572875976562, 0.14035848999023437, 0.14035673522949219, 0.14015155029296875, 0.1409290313720703, 0.14062387084960937, 0.14065469360351562, 0.14022441101074218, 0.14122598266601563, 0.14127119445800781, 0.139708251953125, 0.1415448303222656, 0.14047911071777344, 0.14122393798828126, 0.14125375366210938, 0.14153817749023437, 0.1518218231201172, 0.13909677124023437, 0.1395067138671875, 0.1395227508544922, 0.13918850708007813, 0.13877862548828124, 0.14154547119140626, 0.14162063598632812, 0.14002645874023437, 0.13988975524902345, 0.1391786193847656, 0.13909635925292968, 0.1391636505126953, 0.14156185913085936, 0.14152088928222656, 0.14089161682128906, 0.14001759338378905, 0.14011439514160157, 0.14078579711914063, 0.14039859008789063, 0.1410086669921875, 0.14206997680664063, 0.14124998474121095, 0.14136991882324218, 0.1408143310546875, 0.14077078247070313, 0.1410952911376953, 0.14102496337890624, 0.141916259765625, 0.1415663299560547, 0.14150245666503905, 0.140759033203125, 0.1411762237548828, 0.14107093811035157, 0.14123158264160157, 0.1418714599609375, 0.14158834838867188, 0.14135472106933594, 0.14117129516601562, 0.1412095947265625, 0.1410109405517578, 0.1410864715576172, 0.14186111450195313, 0.14125669860839843, 0.14123971557617188, 0.14129212951660156, 0.1407447052001953, 0.14102291870117187, 0.14111776733398437, 0.1422274627685547, 0.14134681701660157, 0.14065869140625, 0.14134474182128906, 0.14075091552734376, 0.14066685485839844, 0.14236058044433594, 0.14172979736328126, 0.14079721069335938, 0.14153514099121095, 0.1416276092529297, 0.1409071044921875, 0.14107142639160156, 0.14145417785644532, 0.15092530822753905, 0.1394619140625, 0.13908023071289063, 0.13889091491699218, 0.13895120239257813, 0.1392021484375, 0.1414475860595703, 0.14243635559082032, 0.1400925750732422, 0.13919888305664063, 0.13947538757324218, 0.1390960693359375, 0.1390118408203125, 0.1407236785888672, 0.1420377960205078, 0.14086268615722655, 0.1393467254638672, 0.13997669982910158, 0.14041702270507814, 0.13980656433105468, 0.1417044219970703, 0.1420625305175781, 0.14135824584960938, 0.14010569763183595, 0.14088485717773438, 0.14101881408691405, 0.14053330993652344, 0.14101580810546874, 0.1423953857421875, 0.14190419006347657, 0.14078857421875, 0.1408822021484375, 0.14124703979492187, 0.14117459106445313, 0.14111663818359374, 0.14239637756347656, 0.14138163757324218, 0.1412096710205078, 0.14143072509765625, 0.14144300842285157, 0.14108035278320313, 0.1413573455810547, 0.14239744567871093, 0.14074281311035156, 0.14147331237792968, 0.14083718872070314, 0.14211276245117188, 0.14011187744140624, 0.14168031311035156, 0.14192585754394532, 0.14151971435546876, 0.14089974975585937, 0.1414190673828125, 0.14091062927246092, 0.14130790710449218, 0.14108262634277344, 0.1415412139892578, 0.14134255981445312, 0.1409252471923828, 0.1414505615234375, 0.14102803039550782, 0.1413666534423828, 0.14135565185546875, 0.15093923950195312, 0.13967196655273437, 0.13980058288574218, 0.13944422912597657, 0.13889695739746094, 0.13977850341796874, 0.14082418823242188, 0.14177836608886718, 0.14045018005371093, 0.13991993713378906, 0.1400791015625, 0.139042236328125, 0.13993017578125, 0.14018386840820313, 0.14108441162109375, 0.14016709899902344, 0.14037525939941406, 0.14001026916503906, 0.13914111328125, 0.14092288208007814, 0.1410109405517578, 0.1416048583984375, 0.14102117919921875, 0.14044918823242186, 0.14044972229003908, 0.14107878112792968, 0.14148434448242186, 0.14125238037109375, 0.14174220275878907, 0.14128924560546874, 0.14114591979980468, 0.1410361328125, 0.14173965454101561, 0.14057327270507813, 0.14216281127929686, 0.14096601867675781, 0.14159747314453125, 0.14088397216796875, 0.1413773193359375, 0.1414146270751953, 0.1407078399658203, 0.14218438720703125, 0.14155783081054688, 0.14128536987304688, 0.14074485778808593, 0.1406728057861328, 0.14171658325195313, 0.14085014343261718, 0.14215577697753906, 0.14109027099609375, 0.14117123413085939, 0.14107034301757812, 0.14076109313964844, 0.14128742980957032, 0.14040882873535157, 0.1421271057128906, 0.14118092346191408, 0.14118502807617186, 0.14060922241210938, 0.14083103942871095, 0.1415978546142578, 0.14066697692871094, 0.14108134460449218, 0.15052595520019532, 0.14046141052246094, 0.14027842712402344, 0.1404620819091797, 0.1408737335205078, 0.14038015747070312, 0.140787109375, 0.14201097106933594, 0.1405911102294922, 0.14028390502929688, 0.14004428100585936, 0.14007501220703125, 0.14010777282714842, 0.14025100708007812, 0.14070182800292969, 0.14113484191894532, 0.1407518768310547, 0.14002723693847657, 0.1401084442138672, 0.1399781494140625, 0.14018006896972657, 0.14006874084472656, 0.14101651000976562, 0.14093997192382812, 0.14014215087890625, 0.14001513671875, 0.14015763854980468, 0.1401571502685547, 0.1401835479736328, 0.14051327514648437, 0.1408942108154297, 0.13991836547851563, 0.1407211151123047, 0.14011392211914062, 0.14094950866699218, 0.14042842102050782, 0.14044851684570311, 0.14168281555175782, 0.13976605224609376, 0.14189462280273438, 0.1407413787841797, 0.1412127685546875, 0.14112042236328126, 0.14000726318359374, 0.1419951629638672, 0.13985069274902343, 0.14132774353027344, 0.14130245971679686, 0.1412623291015625, 0.1412058868408203, 0.14046003723144532, 0.14108447265625, 0.1405824279785156, 0.14134967041015625, 0.14139573669433594, 0.14129341125488282, 0.14128550720214844, 0.14092108154296876, 0.140474365234375, 0.1408000030517578, 0.14134271240234375, 0.14115225219726563, 0.14122393798828126]",tokens/s,7.107338459060651,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 68.12 MiB is free. Process 80805 has 14.67 GiB memory in use. Of the allocated memory 14.27 GiB is allocated by PyTorch, and 293.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,821.415936,4724.752384,0.0,4322.230272,4218.036736,s,1,14.351474609375,14.351474609375,0.0,14.351474609375,14.351474609375,14.351474609375,14.351474609375,[14.351474609375],,kWh,0.00019900312649582096,2.1944331832433224e-05,6.47244962240015e-05,0.0002856719545522557,,MB,1333.325824,5379.063808,0.0,4963.958784,4656.747008,s,10,9.564135437011718,0.9564135437011718,0.0033190574814543784,0.95649658203125,0.9603512451171875,0.9607914672851562,0.9611436450195312,"[0.9501656494140625, 0.9525548095703125, 0.9548316040039062, 0.9549065551757813, 0.955095458984375, 0.957897705078125, 0.958536865234375, 0.96025341796875, 0.961231689453125, 0.9586616821289062]",tokens/s,267.6666403210057,kWh,2.7861596941284008e-05,3.072639896877226e-06,1.8509737030000048e-05,4.944397386816128e-05,tokens/kWh,5177577.366305653,MB,1356.865536,5379.063808,0.0,4963.958784,4656.749568,s,10,44.9467119140625,4.494671191406249,0.0030528697989721082,4.495209716796875,4.497829443359375,4.498362719726562,4.498789340820312,"[4.49347509765625, 4.49457373046875, 4.48803076171875, 4.49889599609375, 4.49098828125, 4.495845703125, 4.4977109375, 4.4962900390625, 4.4963984375, 4.4945029296875]",tokens/s,14.016598170841762,kWh,0.00013151262449871866,1.4506799415840399e-05,8.691540286559989e-05,0.00023293482678015895,tokens/kWh,270461.91791431274,,s,630,44.94348873901363,0.07133887101430741,0.0013379155902712727,0.07112695693969727,0.07140997085571289,0.07179733123779297,0.08129961662292481,"[0.08122982025146484, 0.07254121398925781, 0.07181820678710937, 0.07144448089599609, 0.07138304138183593, 0.07091404724121093, 0.07098143768310547, 0.07100434875488282, 0.07094195556640626, 0.071146240234375, 0.071021728515625, 0.0710597152709961, 0.07092899322509766, 0.07102249908447265, 0.07109391784667969, 0.07095750427246093, 0.07105503845214843, 0.07107001495361329, 0.07100415802001953, 0.07111065673828125, 0.07107373046875, 0.07098783874511719, 0.07091200256347656, 0.07103414154052734, 0.07121379089355469, 0.07111676788330078, 0.07108396911621094, 0.07098719787597656, 0.07104303741455079, 0.07109808349609376, 0.07105181121826172, 0.07104966735839843, 0.07104307556152344, 0.07107161712646484, 0.07113740539550781, 0.07101849365234375, 0.07109555053710938, 0.07126297760009766, 0.07108972930908203, 0.07106604766845703, 0.07102464294433594, 0.07112703704833985, 0.0710635528564453, 0.07123104095458985, 0.07107218933105469, 0.07121920013427735, 0.07115289306640625, 0.07128755187988281, 0.07122329711914062, 0.07109385681152344, 0.07112099456787109, 0.07154310607910157, 0.07112703704833985, 0.07115366363525391, 0.07118585968017578, 0.07123967742919922, 0.07132332611083984, 0.07128300476074219, 0.0713447036743164, 0.07127244567871094, 0.07128057861328126, 0.07124998474121094, 0.07136460876464844, 0.08103526306152344, 0.07262185668945312, 0.07229625701904296, 0.07158195495605468, 0.0709879379272461, 0.07089689636230469, 0.07092237091064453, 0.07102118682861328, 0.07097727966308594, 0.07108777618408203, 0.07111331176757812, 0.07111663818359375, 0.07099817657470703, 0.07105331420898438, 0.07088889312744141, 0.0710948486328125, 0.07106092834472656, 0.07112147521972656, 0.07107542419433593, 0.07121711730957031, 0.07110700988769532, 0.07112223815917969, 0.07108393859863281, 0.07108483123779297, 0.07107568359375, 0.07120502471923829, 0.0710730209350586, 0.07099807739257813, 0.07113504028320312, 0.07105830383300782, 0.07095500946044922, 0.07101993560791016, 0.07119449615478515, 0.07111894226074218, 0.0710703353881836, 0.07101398468017578, 0.071102783203125, 0.07111190032958985, 0.07110131072998047, 0.07123967742919922, 0.071271484375, 0.0711628189086914, 0.07113056182861328, 0.07129964447021485, 0.07120896148681641, 0.07123056030273438, 0.0713511962890625, 0.07120486450195312, 0.07124582672119141, 0.07115980529785157, 0.07123763275146484, 0.07117414093017578, 0.0711842269897461, 0.07123779296875, 0.07118761444091797, 0.071144287109375, 0.07114342498779297, 0.07122873687744141, 0.07123197174072265, 0.07127471923828126, 0.07134819030761719, 0.07123356628417969, 0.07133554840087891, 0.08136768341064453, 0.07243952178955078, 0.07167391967773437, 0.07135254669189453, 0.07080960083007813, 0.07082109069824219, 0.0709189453125, 0.07093247985839844, 0.07085817718505859, 0.0708603515625, 0.0709908447265625, 0.07094012451171874, 0.07084905242919921, 0.07091609954833984, 0.07091814422607422, 0.0708853759765625, 0.07085465240478515, 0.07090115356445313, 0.0710450897216797, 0.07085276794433594, 0.07084214019775391, 0.07091257476806641, 0.07089523315429687, 0.07094265747070312, 0.07099449920654297, 0.07092566680908204, 0.07101302337646484, 0.07091168212890625, 0.0709020767211914, 0.07092201232910156, 0.07091535949707031, 0.0710289306640625, 0.07147344207763671, 0.07099440002441407, 0.07107097625732423, 0.07098563385009765, 0.07111971282958984, 0.07105331420898438, 0.07109410858154297, 0.07100595092773437, 0.07103939056396484, 0.07105945587158204, 0.0710712661743164, 0.07104319763183593, 0.0710225601196289, 0.07118067169189453, 0.07113465881347657, 0.07110918426513672, 0.07109324645996094, 0.07114035034179687, 0.07105535888671875, 0.07112703704833985, 0.07121510314941407, 0.07112908935546874, 0.07125401306152344, 0.07120486450195312, 0.07115904235839844, 0.07118281555175782, 0.07123177337646484, 0.07126153564453125, 0.07125635528564453, 0.07127283477783203, 0.07128806304931641, 0.08119580841064453, 0.07281254577636719, 0.07198310089111327, 0.07153049468994141, 0.07109622192382813, 0.0709551010131836, 0.07091337585449219, 0.07097513580322265, 0.07088626861572266, 0.07098524475097656, 0.07088188934326171, 0.07094377899169922, 0.07101538848876954, 0.07108761596679687, 0.07089376068115234, 0.07094918060302734, 0.07099353790283203, 0.07077721405029297, 0.071019775390625, 0.0710818862915039, 0.07104115295410156, 0.07111484527587891, 0.07113587188720703, 0.0712042236328125, 0.07125465393066406, 0.07130521392822266, 0.07122723388671875, 0.07123104095458985, 0.07114364624023438, 0.07126048278808594, 0.07118649291992188, 0.0712069091796875, 0.07130473327636719, 0.07123811340332031, 0.07124969482421875, 0.07127677154541015, 0.07138098907470704, 0.07134822082519532, 0.07123069000244141, 0.07121385955810547, 0.07128253173828125, 0.07116979217529297, 0.07120687866210937, 0.0712708511352539, 0.0712437744140625, 0.07126630401611328, 0.07136857604980469, 0.07125997161865234, 0.07121132659912109, 0.07130467224121094, 0.07138153839111328, 0.07173324584960937, 0.07130316925048828, 0.07139286041259765, 0.07122499084472657, 0.07127072143554687, 0.07127836608886719, 0.0712108154296875, 0.07211504364013673, 0.07121644592285156, 0.07147795104980469, 0.07146598052978516, 0.0713861083984375, 0.08151289367675782, 0.07233740997314453, 0.07185794830322266, 0.07125424194335937, 0.0708116455078125, 0.07083417510986328, 0.0707952651977539, 0.07083964538574218, 0.07074470520019531, 0.07077686309814453, 0.07079036712646485, 0.07089027404785156, 0.07085257720947266, 0.07098297882080078, 0.0708512954711914, 0.07104716491699219, 0.0708116455078125, 0.07089561462402344, 0.07091529846191406, 0.07100054168701173, 0.0708856964111328, 0.0709048309326172, 0.07097404479980468, 0.07085062408447265, 0.07091030120849609, 0.0709775390625, 0.07100211334228515, 0.07108787536621093, 0.0709585952758789, 0.0710066909790039, 0.0709716796875, 0.07099187469482422, 0.07094406127929688, 0.071006591796875, 0.07109254455566406, 0.07112019348144531, 0.07111750030517579, 0.07112274932861327, 0.07122348785400391, 0.0712499237060547, 0.07112687683105469, 0.0712000961303711, 0.07123177337646484, 0.07118083190917969, 0.07119229125976563, 0.07114985656738282, 0.07115366363525391, 0.07106969451904296, 0.07119667053222656, 0.07129293060302734, 0.07130316925048828, 0.07118953704833984, 0.07133478546142578, 0.0713090591430664, 0.07127894592285157, 0.07123709106445313, 0.07127094268798828, 0.0713602523803711, 0.0712391357421875, 0.07141609954833984, 0.07137741088867187, 0.07178828430175781, 0.07155693054199219, 0.08175043487548828, 0.07242384338378906, 0.07171004486083984, 0.0714158706665039, 0.07096991729736328, 0.07100418853759766, 0.07103667449951172, 0.07083753967285156, 0.07085327911376953, 0.07109458923339844, 0.07095091247558594, 0.07083757019042969, 0.07085536193847657, 0.07083417510986328, 0.07092387390136719, 0.07091014099121094, 0.07097904205322265, 0.07104537963867187, 0.07112710571289063, 0.07103494262695312, 0.07095286560058593, 0.07094319915771484, 0.07105702209472656, 0.07119705963134766, 0.07104486083984375, 0.07112115478515625, 0.07105535888671875, 0.07102464294433594, 0.07100825500488281, 0.07107379150390625, 0.07112006378173828, 0.07116678619384766, 0.07108403015136719, 0.07170867156982422, 0.07116799926757812, 0.07114745330810547, 0.07113938903808593, 0.07123267364501953, 0.07131632232666016, 0.07109363555908203, 0.07118707275390625, 0.07110655975341797, 0.07107788848876953, 0.07103871917724609, 0.07174534606933594, 0.07122771453857422, 0.07132115173339844, 0.07127471923828126, 0.07129942321777344, 0.07127654266357422, 0.07126198577880859, 0.07129519653320313, 0.07137689971923829, 0.07120486450195312, 0.07123149108886719, 0.07132978820800781, 0.0713912353515625, 0.0713969955444336, 0.0714181137084961, 0.07141593933105468, 0.07148070526123047, 0.07146482849121094, 0.07144306945800781, 0.08281702423095703, 0.07240873718261719, 0.07174940490722656, 0.07140975952148437, 0.07089814758300782, 0.07102668762207032, 0.07097344207763671, 0.07090790557861328, 0.07095295715332031, 0.07096115112304688, 0.07081346893310547, 0.0711538848876953, 0.07112089538574219, 0.07108403015136719, 0.07107778930664063, 0.07103702545166016, 0.07103635406494141, 0.07101907348632812, 0.07115145874023437, 0.07108009338378907, 0.07120896148681641, 0.07121715545654297, 0.07107299041748047, 0.07110896301269531, 0.07096521759033203, 0.07112895965576171, 0.07110902404785156, 0.07112723541259766, 0.07106678771972656, 0.0710848617553711, 0.07110044860839844, 0.07114895629882813, 0.07109439849853516, 0.07104889678955079, 0.07119542694091797, 0.0711352310180664, 0.07113113403320312, 0.0711060791015625, 0.0711233901977539, 0.07106768035888672, 0.07114339447021484, 0.07107778930664063, 0.07123776245117187, 0.07121510314941407, 0.07120281219482422, 0.071253662109375, 0.07157494354248047, 0.07129145812988281, 0.07124006652832031, 0.07131136322021485, 0.07120182037353516, 0.07130358123779297, 0.07126687622070313, 0.07140962982177734, 0.07138102722167969, 0.07150959777832032, 0.07167820739746093, 0.07141187286376953, 0.07136255645751953, 0.0713707504272461, 0.07131136322021485, 0.0713700180053711, 0.07133052825927734, 0.081328125, 0.07261500549316406, 0.0719427490234375, 0.07152451324462891, 0.07095279693603515, 0.07112675476074219, 0.07101500701904297, 0.0710013427734375, 0.07104742431640625, 0.07118899536132813, 0.07114342498779297, 0.07110655975341797, 0.0710202865600586, 0.07108838653564453, 0.0710696029663086, 0.07125411224365234, 0.0710696029663086, 0.0710964126586914, 0.07108198547363281, 0.07108918762207031, 0.07113359832763672, 0.07108252716064453, 0.0710219497680664, 0.07106947326660157, 0.0710255355834961, 0.07106764984130859, 0.070940673828125, 0.07106559753417968, 0.07100822448730469, 0.07102877044677734, 0.07111869049072266, 0.07119805145263672, 0.07114220428466797, 0.07110844421386718, 0.07104713439941407, 0.07128822326660156, 0.0712425308227539, 0.07130316925048828, 0.0712437744140625, 0.07122720336914062, 0.07110403442382812, 0.07125218963623046, 0.0712012176513672, 0.07112089538574219, 0.07109222412109376, 0.07124172973632813, 0.07110028839111328, 0.07118246459960938, 0.07124172973632813, 0.07124140930175782, 0.07125023651123047, 0.07125357055664062, 0.07130770874023437, 0.07131881713867187, 0.07125049591064453, 0.0713770523071289, 0.0713175048828125, 0.07133369445800782, 0.0712972183227539, 0.07144652557373046, 0.07118438720703125, 0.07134719848632813, 0.0713881607055664, 0.08253663635253906, 0.07243312072753906, 0.07169078063964844, 0.07136665344238281, 0.07097478485107422, 0.07138905334472656, 0.0708301773071289, 0.07089839935302734, 0.0708730239868164, 0.07093049621582032, 0.0708381118774414, 0.07089373016357423, 0.07096905517578125, 0.07099954986572266, 0.07097196960449219, 0.07105558776855468, 0.07094429016113281, 0.07100399780273438, 0.07108822631835937, 0.07102047729492188, 0.07104163360595703, 0.07113724517822266, 0.0710185317993164, 0.07100621032714843, 0.07100617980957032, 0.07113526153564453, 0.07112499237060547, 0.07106764984130859, 0.07107894134521485, 0.07104815673828126, 0.0709775390625, 0.07102220916748046, 0.07105951690673828, 0.07101789093017578, 0.07105219268798828, 0.07097309112548827, 0.07109462738037109, 0.07117135620117188, 0.07134690856933594, 0.07127654266357422, 0.07113728332519531, 0.07117139434814453, 0.07104486083984375, 0.07114755249023437, 0.07123651123046874, 0.0711720962524414, 0.07122943878173828, 0.07113021087646484, 0.07113164520263672, 0.07120681762695312, 0.07121298980712891, 0.07120748901367188, 0.07243097686767579, 0.0719468765258789, 0.0721981430053711, 0.07137670135498046, 0.07125628662109375, 0.07115773010253906, 0.07114752197265625, 0.0712437744140625, 0.07125606536865234, 0.07130464172363281, 0.07136109161376954, 0.08158646392822265, 0.07253977966308593, 0.07180473327636719, 0.07145935821533203, 0.07088511657714844, 0.07089360046386718, 0.07093670654296876, 0.07095337677001953, 0.07087513732910156, 0.0710635528564453, 0.0709939193725586, 0.07103692626953124, 0.0709520034790039, 0.07093897247314453, 0.07087165069580079, 0.07099715423583984, 0.07094358062744141, 0.07103683471679688, 0.07093209838867187, 0.07099440002441407, 0.070830078125, 0.07111190032958985, 0.07124371337890625, 0.07106646728515625, 0.07104099273681641, 0.07113065338134765, 0.07103948974609375, 0.07106559753417968, 0.07109222412109376, 0.07097958374023437, 0.07099545288085937, 0.0710654067993164, 0.07097968292236329, 0.07103753662109374, 0.07112089538574219, 0.07104512023925781, 0.07126630401611328, 0.07113836669921875, 0.07118739318847657, 0.0712273941040039, 0.07119257354736327, 0.07137484741210938, 0.07110655975341797, 0.0711493148803711, 0.07116604614257813, 0.07113254547119141, 0.07110940551757812, 0.07112413024902343, 0.07112940979003907, 0.07120336151123047, 0.07114137268066406, 0.07249100494384765, 0.07196002960205078, 0.07161090850830078, 0.07111996459960937, 0.07117302703857421, 0.07116185760498046, 0.07120063781738281, 0.07127664184570312, 0.07120486450195312, 0.0712704315185547, 0.07129702758789062, 0.07121715545654297]",tokens/s,14.017603387632029,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 422.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 328.12 MiB is free. Process 210385 has 14.42 GiB memory in use. Of the allocated memory 14.22 GiB is allocated by PyTorch, and 91.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,819.757056,4900.913152,0.0,4498.39104,4467.073024,s,1,14.0958232421875,14.0958232421875,0.0,14.0958232421875,14.0958232421875,14.0958232421875,14.0958232421875,[14.0958232421875],,kWh,0.00020411292603334912,2.2507998501803273e-05,6.653088655800277e-05,0.00029315181109315516,,MB,1349.705728,5469.241344,0.0,5054.13632,4891.887616,s,10,10.266162475585936,1.0266162475585936,0.0054737403646174495,1.026439270019531,1.0335439208984376,1.0346160766601562,1.0354738012695313,"[1.0164664916992188, 1.0203831176757812, 1.023156982421875, 1.02528271484375, 1.0264105224609374, 1.026468017578125, 1.035688232421875, 1.02985693359375, 1.029143798828125, 1.0333056640625]",tokens/s,249.3628954429624,kWh,2.9933798422496997e-05,3.300701058696321e-06,1.977212692879926e-05,5.3006626409992576e-05,tokens/kWh,4829584.852654195,MB,1374.334976,5483.921408,0.0,5068.816384,4891.890176,s,10,47.77382080078125,4.777382080078125,0.007082660639085872,4.775672851562501,4.786922509765625,4.788916088867187,4.790510952148437,"[4.76849755859375, 4.76989404296875, 4.77000732421875, 4.77542724609375, 4.77591845703125, 4.77417431640625, 4.78092724609375, 4.7864794921875, 4.78158544921875, 4.79090966796875]",tokens/s,13.187138676371005,kWh,0.00013992151799500409,1.5434918101586717e-05,9.285018539120227e-05,0.00024820662148779303,tokens/kWh,253820.78698129486,,s,630,47.77154670715332,0.07582785191611638,0.0014800845251917352,0.07554291152954101,0.07625243759155274,0.07681034545898438,0.08658680892944336,"[0.08657119750976562, 0.07677318572998047, 0.07611567687988281, 0.07578047943115235, 0.07525901031494141, 0.07526274871826172, 0.07530818939208984, 0.07532630157470703, 0.0753031997680664, 0.07532825469970703, 0.07527113342285156, 0.07528050994873046, 0.07531027221679687, 0.07537324523925781, 0.07544588470458985, 0.07536201477050782, 0.07531177520751953, 0.07532371520996094, 0.07540089416503906, 0.07544012451171875, 0.07540940856933594, 0.07539078521728515, 0.0754136962890625, 0.07537203216552735, 0.0753363494873047, 0.07540252685546875, 0.0754120330810547, 0.07551385498046875, 0.07540262603759766, 0.07538751983642578, 0.07535977935791016, 0.07542825317382812, 0.07540332794189453, 0.07556025695800782, 0.07554755401611328, 0.07559571075439453, 0.07549520111083985, 0.07553215789794922, 0.07545283508300782, 0.07554972839355469, 0.0754593276977539, 0.07553135681152344, 0.07556591796875, 0.07562246704101562, 0.07553228759765625, 0.07554048156738281, 0.07554252624511719, 0.07565676879882813, 0.07551840209960937, 0.07558348846435547, 0.07560192108154297, 0.07570419311523438, 0.075587646484375, 0.07556288146972656, 0.07561235046386719, 0.07565068817138672, 0.07551219177246093, 0.07559327697753906, 0.07558399963378906, 0.07573497772216797, 0.07566767883300782, 0.07566928100585937, 0.07573299407958985, 0.08659318542480468, 0.0768186264038086, 0.07613346862792969, 0.0757293472290039, 0.07526457977294922, 0.07538050842285156, 0.07523971557617187, 0.07527184295654298, 0.07526624298095703, 0.07532546997070312, 0.07530883026123047, 0.07535430145263672, 0.0752166748046875, 0.0753438720703125, 0.07534941101074219, 0.0754490203857422, 0.07530003356933594, 0.07532025909423828, 0.07529494476318359, 0.07536822509765626, 0.07532157135009766, 0.07545629119873047, 0.07537049865722656, 0.07544630432128906, 0.07529878234863281, 0.07538620758056641, 0.07544911956787109, 0.07553353881835938, 0.07542982482910156, 0.07541532897949219, 0.07548432159423828, 0.07548825836181641, 0.0754448013305664, 0.07548457336425782, 0.0754716796875, 0.07557734680175782, 0.07549132537841798, 0.0754764175415039, 0.07549174499511718, 0.07549759674072265, 0.07545040130615234, 0.07552003479003906, 0.07554444885253907, 0.07565106964111327, 0.07552812957763672, 0.07554473876953124, 0.07555260467529297, 0.07558364868164062, 0.07545622253417969, 0.07553052520751953, 0.07561939239501952, 0.07568275451660156, 0.07560749053955078, 0.07565574645996094, 0.07561830139160156, 0.07570947265625, 0.07698473358154297, 0.07563731384277343, 0.075720703125, 0.07579798126220703, 0.07564348602294922, 0.07563667297363282, 0.07565119934082032, 0.0863581085205078, 0.07697138977050781, 0.07623308563232421, 0.07581712341308594, 0.07527542114257812, 0.07534883117675781, 0.07529267120361328, 0.07525545501708984, 0.07520905303955078, 0.07528857421875, 0.07524352264404296, 0.07528966522216797, 0.07528463745117188, 0.07521564483642579, 0.07529782104492187, 0.07535062408447266, 0.07536466979980469, 0.07534579467773438, 0.07539116668701172, 0.07531311798095704, 0.07526982116699218, 0.07537849426269531, 0.0755074234008789, 0.07546144104003906, 0.07536348724365234, 0.0753897933959961, 0.07535001373291016, 0.07544563293457031, 0.07539161682128906, 0.0754892807006836, 0.07561727905273438, 0.07553692626953125, 0.07543196868896485, 0.07542195129394531, 0.07545388793945312, 0.07555763244628906, 0.07547698974609375, 0.07555593872070313, 0.07553049468994141, 0.07565379333496093, 0.07550486755371094, 0.07549542236328124, 0.07565155029296874, 0.07569039916992187, 0.07566944122314453, 0.07558755493164063, 0.07555072021484376, 0.07577375793457031, 0.07556934356689453, 0.0756467514038086, 0.0755642852783203, 0.07565122985839844, 0.07549120330810546, 0.07564588928222657, 0.07576274871826172, 0.07635977935791016, 0.07567967987060546, 0.07566223907470702, 0.07568771362304688, 0.07573474884033203, 0.0756495361328125, 0.07563875579833984, 0.07568796539306641, 0.08697212982177735, 0.07685558319091797, 0.07616659545898438, 0.07573741149902344, 0.07527568054199218, 0.07538470458984375, 0.0753930892944336, 0.07633164978027343, 0.07575068664550781, 0.07545132446289063, 0.0752845458984375, 0.07536844635009765, 0.07532921600341796, 0.07537519836425781, 0.07547670745849609, 0.07540531158447265, 0.07524556732177734, 0.07529235076904296, 0.07536172485351562, 0.07546562957763672, 0.07542189025878906, 0.07547865295410157, 0.07600553894042969, 0.07543318176269531, 0.07532227325439453, 0.07542156982421876, 0.07551590728759766, 0.07550361633300781, 0.07542784118652343, 0.07545584106445312, 0.07544898986816406, 0.07542704010009765, 0.07540611267089843, 0.075480224609375, 0.07549021148681641, 0.07716876983642579, 0.07553123474121094, 0.07563145446777343, 0.0755937271118164, 0.07561420440673829, 0.07548722839355469, 0.07550361633300781, 0.07555481719970703, 0.07562393951416016, 0.07550950622558594, 0.07563913726806641, 0.07556752014160156, 0.07567900848388671, 0.0762740478515625, 0.07562684631347656, 0.0756162567138672, 0.07572854614257812, 0.07557129669189454, 0.07561756896972656, 0.07564796447753906, 0.07572876739501953, 0.07560128021240234, 0.07566732788085938, 0.07569245147705078, 0.07580921936035157, 0.07571177673339843, 0.07576255798339844, 0.07585273742675781, 0.08651814270019531, 0.07677049255371093, 0.07604726409912109, 0.07563878631591797, 0.07533331298828125, 0.0753760986328125, 0.07537340545654297, 0.07535353851318359, 0.07535465240478516, 0.07567974090576172, 0.0753152313232422, 0.07543603515625, 0.07541468811035157, 0.07549935913085938, 0.07539158630371094, 0.07577641296386718, 0.07577804565429687, 0.07553404998779296, 0.07546697235107422, 0.07545043182373047, 0.07537664031982422, 0.07550482940673828, 0.075498046875, 0.0758397445678711, 0.07543193817138671, 0.07542765045166015, 0.0761995849609375, 0.07552668762207031, 0.07540697479248047, 0.07543385314941406, 0.07583999633789062, 0.07556710052490234, 0.0756490249633789, 0.07554268646240235, 0.07551983642578125, 0.07558348846435547, 0.07547917175292969, 0.07556492614746094, 0.07554790496826172, 0.07556172943115234, 0.07557132720947266, 0.07566236877441407, 0.07551817321777343, 0.07559846496582032, 0.07555276489257813, 0.07552108764648438, 0.07548028564453126, 0.07566307067871093, 0.07553158569335937, 0.07557599639892579, 0.07567276763916016, 0.07576044464111328, 0.07562620544433593, 0.07562799835205078, 0.07571334075927734, 0.0770191650390625, 0.07636124420166016, 0.07599305725097656, 0.07566329956054688, 0.07571715545654296, 0.07560153961181641, 0.07562630462646484, 0.07563116455078125, 0.08808444976806641, 0.07705801391601562, 0.07633574676513671, 0.07580825805664063, 0.07523407745361328, 0.07528828430175781, 0.07531139373779297, 0.0753497314453125, 0.0753121566772461, 0.0752649917602539, 0.07525775909423828, 0.07530300903320312, 0.07533324432373047, 0.0754916763305664, 0.0754217300415039, 0.0753748779296875, 0.0754846420288086, 0.07536870574951172, 0.07544947052001953, 0.07540825653076172, 0.07536224365234374, 0.07531501007080078, 0.07531273651123047, 0.0753568344116211, 0.07656140899658204, 0.07601443481445312, 0.07566678619384766, 0.07541228485107422, 0.07534182739257812, 0.07546675109863281, 0.07539884948730469, 0.0754527359008789, 0.07539859008789063, 0.07536819458007812, 0.07543462371826172, 0.07548899078369141, 0.07544175720214844, 0.07546096038818359, 0.07545299530029297, 0.07548937225341797, 0.07542169952392579, 0.07545433807373046, 0.07547277069091797, 0.07563868713378906, 0.07553676605224609, 0.07610111999511719, 0.07664387512207031, 0.07626009368896484, 0.07553644561767578, 0.0755814437866211, 0.0756654052734375, 0.07564854431152344, 0.07552480316162109, 0.07557097625732422, 0.07551171112060547, 0.07566960144042968, 0.07553561401367187, 0.07560902404785157, 0.07560787200927735, 0.07582006072998047, 0.0756410903930664, 0.0756575698852539, 0.07570259094238281, 0.08696959686279297, 0.0768519058227539, 0.07722096252441406, 0.07566025543212891, 0.07526399993896485, 0.075275390625, 0.0752540512084961, 0.07524617767333984, 0.07525772857666016, 0.07525910186767579, 0.07529564666748047, 0.07526412963867188, 0.0752204818725586, 0.07529046630859375, 0.07532972717285157, 0.07676892852783203, 0.07610643005371094, 0.07562035369873046, 0.07546288299560547, 0.07540243530273437, 0.07529682922363282, 0.07528707122802734, 0.07536358642578125, 0.07551411437988281, 0.07543145751953124, 0.07541871643066406, 0.0753870391845703, 0.07545622253417969, 0.07532355499267578, 0.07545021057128906, 0.0758661117553711, 0.07663206481933593, 0.07609139251708984, 0.07561011505126954, 0.07545670318603516, 0.07557689666748046, 0.07551631927490235, 0.07556285095214844, 0.07552188873291016, 0.07557955169677734, 0.07543807983398437, 0.07551590728759766, 0.0755445785522461, 0.07571177673339843, 0.07599343872070312, 0.07694579315185547, 0.07621222686767579, 0.07587840270996093, 0.07553596496582031, 0.07558956909179687, 0.07565555572509766, 0.07577308654785156, 0.0756534423828125, 0.07561190032958984, 0.0756473617553711, 0.07573964691162109, 0.07562601470947265, 0.07560620880126953, 0.07591903686523438, 0.07703376007080077, 0.07632086181640625, 0.07635785675048828, 0.07602806091308593, 0.0868249282836914, 0.0769249267578125, 0.07608464050292969, 0.07562646484375, 0.0752786865234375, 0.0753663330078125, 0.07536457824707031, 0.07534185791015625, 0.07527843475341797, 0.0753090591430664, 0.07533356475830078, 0.0754483871459961, 0.07538687896728516, 0.0753617935180664, 0.07537260437011718, 0.07546870422363282, 0.07686921691894531, 0.07632969665527344, 0.07598054504394532, 0.0755381088256836, 0.0753938217163086, 0.07546409606933593, 0.0754900131225586, 0.07553948974609374, 0.07553724670410156, 0.07553363037109374, 0.07554505920410157, 0.07556281280517578, 0.07541011047363282, 0.07574681854248047, 0.07680022430419922, 0.07621616363525391, 0.07562815856933594, 0.07552873229980468, 0.07556095886230468, 0.07562649536132812, 0.07558553314208985, 0.07559590148925781, 0.0756447982788086, 0.07569510650634766, 0.07558029174804687, 0.07562457275390624, 0.07569436645507813, 0.07566130828857422, 0.07565503692626953, 0.07695362854003907, 0.07636946868896484, 0.0760005111694336, 0.075615234375, 0.07576332855224609, 0.07616486358642578, 0.07597443389892578, 0.07569414520263672, 0.07568217468261719, 0.07574729919433594, 0.07583686065673828, 0.0756803207397461, 0.07591766357421875, 0.07906018829345703, 0.07648960113525391, 0.07600943756103516, 0.07570845031738281, 0.07577801513671875, 0.08679631805419921, 0.07692489624023438, 0.07623452758789062, 0.07577606201171876, 0.07527756500244141, 0.07534275054931641, 0.07536383819580078, 0.07536691284179688, 0.07540354919433594, 0.07530585479736328, 0.07536287689208984, 0.07537673950195313, 0.07537664031982422, 0.07542569732666016, 0.07535234832763672, 0.07534719848632812, 0.07533606719970704, 0.07541193389892578, 0.07544198608398438, 0.07544226837158204, 0.07537049865722656, 0.07540656280517578, 0.0760389404296875, 0.07675289916992188, 0.0760791015625, 0.07558732604980468, 0.07543424224853515, 0.0754521255493164, 0.07543836975097656, 0.07544019317626953, 0.07547897338867188, 0.07549235534667968, 0.07543910217285156, 0.07542578887939454, 0.07548271942138672, 0.07605699157714843, 0.07671305847167968, 0.07606166076660156, 0.07566483306884765, 0.07558121490478516, 0.075491455078125, 0.07554313659667969, 0.07557881927490234, 0.07571513366699219, 0.07555276489257813, 0.0755772476196289, 0.07557708740234376, 0.07562854766845703, 0.0761098861694336, 0.0768361587524414, 0.07652655792236328, 0.07593312072753906, 0.0756803207397461, 0.07690793609619141, 0.07641741180419923, 0.0762515869140625, 0.07562754821777344, 0.07565494537353516, 0.07568035125732422, 0.07578768157958984, 0.07563785552978515, 0.07568482971191406, 0.07589087677001953, 0.0885060806274414, 0.07679414367675781, 0.07611939239501953, 0.075669921875, 0.07525759887695313, 0.07533824157714844, 0.07532681274414063, 0.07537088012695313, 0.07525843048095703, 0.07527804565429687, 0.07528857421875, 0.0754155502319336, 0.07538278198242188, 0.07573670196533203, 0.07651068878173828, 0.07606774139404297, 0.07598432159423828, 0.07538249969482422, 0.07540758514404297, 0.0754012451171875, 0.07662652587890625, 0.07610121917724609, 0.07573136138916016, 0.07541484832763672, 0.07535481262207032, 0.07546272277832031, 0.07551993560791016, 0.0755051498413086, 0.0754417953491211, 0.07546546936035156, 0.07544640350341797, 0.07551494598388672, 0.07610873413085938, 0.076712158203125, 0.07617104339599609, 0.07586969757080078, 0.07545613098144531, 0.0755516128540039, 0.07555174255371094, 0.07570738983154297, 0.07568998718261719, 0.07565878295898437, 0.07688854217529296, 0.0763984603881836, 0.07585529327392578, 0.07561491394042968, 0.07583660888671875, 0.07717705535888672, 0.07642960357666016, 0.07648416137695313, 0.0756579818725586, 0.07567155456542969, 0.07561593627929687, 0.07557561492919922, 0.07565312194824218, 0.07573680114746094, 0.0770032958984375, 0.07638601684570312, 0.07592736053466796, 0.07574918365478515, 0.07622492980957031, 0.07685225677490234, 0.07637474822998047]",tokens/s,13.187766430548997,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 28.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 4.12 MiB is free. Process 184500 has 14.73 GiB memory in use. Of the allocated memory 14.49 GiB is allocated by PyTorch, and 137.12 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1071.198208,9784.19712,0.0,9388.949504,9304.608768,s,1,33.34423828125,33.34423828125,0.0,33.34423828125,33.34423828125,33.34423828125,33.34423828125,[33.34423828125],,kWh,0.0007540017895958953,8.316458329058557e-05,0.00027355355217595356,0.0011107199250624345,,MB,1440.055296,10197.336064,0.0,9789.505536,9597.896704,s,10,8.040341491699218,0.8040341491699218,0.0023356114356497845,0.8030022888183594,0.8071809997558594,0.8079320526123047,0.8085328948974609,"[0.8070140991210938, 0.80868310546875, 0.8029677734375, 0.8014375610351563, 0.8019359741210937, 0.8016148071289062, 0.8054298095703125, 0.8030368041992187, 0.8028924560546875, 0.8053291015625]",tokens/s,318.39443668442726,kWh,2.351742092180127e-05,2.59274873552298e-06,1.414095148369321e-05,4.025112114101746e-05,tokens/kWh,6360071.291011222,MB,1457.840128,10197.336064,0.0,9789.505536,9597.899264,s,10,376.00018359375,37.600018359375,0.16895826576110604,37.603142578125,37.6943734375,37.853501171875,37.980803359375,"[38.01262890625, 37.4249765625, 37.36190625, 37.58154296875, 37.6554140625, 37.65901171875, 37.64826953125, 37.55991796875, 37.4717734375, 37.6247421875]",tokens/s,1.6755310967631987,kWh,0.001098004854404441,0.0001211190368270474,0.0004170829212729071,0.0016362068125043952,tokens/kWh,38503.69007055504,,s,630,375.9972374267582,0.5968210117885044,0.00443225393698296,0.5958622131347656,0.6028890075683593,0.6050607788085938,0.6108657287597656,"[0.6028455810546876, 0.601858154296875, 0.6067220458984375, 0.6018051147460938, 0.6022013549804688, 0.6019547119140625, 0.6029561157226563, 0.6032117919921876, 0.6029594116210938, 0.612116943359375, 0.606504150390625, 0.6009823608398438, 0.6021836547851562, 0.6019022827148437, 0.6039021606445313, 0.604809814453125, 0.6030663452148437, 0.6019942626953125, 0.5999908447265625, 0.6002979125976563, 0.6002705688476563, 0.6017738037109375, 0.6031939086914062, 0.6015057983398437, 0.602144775390625, 0.6057083129882812, 0.6109083862304687, 0.60127978515625, 0.603673095703125, 0.6017578735351562, 0.6011526489257812, 0.6061841430664062, 0.6056959838867187, 0.6005821533203125, 0.6010084228515625, 0.60104052734375, 0.6021704711914062, 0.6035027465820313, 0.6000682983398438, 0.60494677734375, 0.6022489624023437, 0.6039393310546874, 0.6041251220703125, 0.6077747192382813, 0.6067715454101562, 0.6014808349609375, 0.6006743774414063, 0.6013679809570313, 0.604412353515625, 0.6046351928710938, 0.6008729858398437, 0.6072293701171875, 0.603181640625, 0.6019993896484375, 0.6030438232421875, 0.6034349975585938, 0.6034082641601562, 0.6035252685546875, 0.6033141479492188, 0.6079754028320312, 0.6067548217773437, 0.6035963745117188, 0.6037651977539062, 0.6079428100585937, 0.6043832397460938, 0.6001909790039063, 0.5939834594726563, 0.5917960205078125, 0.5906411743164063, 0.591706298828125, 0.5910667114257813, 0.593160888671875, 0.59062841796875, 0.5940695190429688, 0.5983685302734375, 0.611418212890625, 0.5901310424804688, 0.5917617797851562, 0.59225, 0.5902669067382813, 0.5958062133789063, 0.59508935546875, 0.5886234741210937, 0.5887841796875, 0.5903662109375, 0.5908258056640625, 0.588355224609375, 0.5906476440429688, 0.5884783935546875, 0.5889412231445312, 0.589370849609375, 0.596641845703125, 0.5964757080078125, 0.5919793090820312, 0.5924515991210938, 0.5938770141601563, 0.5919756469726563, 0.605154052734375, 0.6060114135742187, 0.60096923828125, 0.6042640991210938, 0.597992919921875, 0.5971519775390625, 0.597608642578125, 0.5909672241210937, 0.5906780395507812, 0.592594482421875, 0.5959049682617188, 0.5965209350585937, 0.592761962890625, 0.5915247802734375, 0.5908375854492187, 0.5888574829101563, 0.5906227416992188, 0.5963468627929688, 0.5918330688476563, 0.5916201171875, 0.590329833984375, 0.5922201538085937, 0.591213623046875, 0.5905967407226562, 0.5925728759765625, 0.5887221069335937, 0.5880332641601562, 0.5918829956054688, 0.6064515380859375, 0.5910056762695313, 0.5916113891601562, 0.5899713134765625, 0.5907954711914063, 0.5892399291992187, 0.5882429809570312, 0.590924072265625, 0.5881221923828125, 0.5944566040039062, 0.592416748046875, 0.594651123046875, 0.594651123046875, 0.592395751953125, 0.5933265991210938, 0.5949963989257813, 0.6025059204101563, 0.594542724609375, 0.5944998779296875, 0.5925673217773437, 0.5933776245117187, 0.5965089721679687, 0.5941063842773437, 0.59082666015625, 0.5932439575195313, 0.5892044677734375, 0.5920706787109375, 0.5915607299804687, 0.5894164428710937, 0.5919284057617188, 0.5921057739257812, 0.5938305053710937, 0.5996436767578125, 0.5973121337890624, 0.5914080200195313, 0.5909532470703125, 0.5937902221679687, 0.59068212890625, 0.60174609375, 0.59428271484375, 0.588859375, 0.588945068359375, 0.5886996459960937, 0.591849853515625, 0.5910482177734375, 0.5970559692382813, 0.591372314453125, 0.589613037109375, 0.591730712890625, 0.5980591430664063, 0.5909022216796875, 0.590512451171875, 0.589815673828125, 0.5902853393554688, 0.5953883666992188, 0.6008665771484375, 0.5984281616210938, 0.5926400146484375, 0.5929915771484375, 0.5961532592773438, 0.5939130859375, 0.595165283203125, 0.5936787719726563, 0.59468798828125, 0.5955399780273437, 0.6051920166015625, 0.5947493286132812, 0.59442724609375, 0.5943694458007812, 0.5935206298828125, 0.5939766845703125, 0.6018453979492188, 0.5960242919921875, 0.6012723388671875, 0.594763427734375, 0.5954083862304688, 0.596875732421875, 0.5931905517578125, 0.5947828369140625, 0.5952688598632813, 0.5937939453125, 0.5983102416992188, 0.5983355712890625, 0.5950941162109376, 0.5954183349609375, 0.5935399780273437, 0.5976902465820313, 0.5952139892578125, 0.602274169921875, 0.5936903686523437, 0.5936167602539062, 0.59613232421875, 0.5942803344726563, 0.5962844848632812, 0.595685302734375, 0.5948220825195313, 0.5952529907226562, 0.5961361083984374, 0.5995029907226562, 0.6011207885742188, 0.5961404418945313, 0.5949779052734375, 0.5952394409179688, 0.5943582763671875, 0.5994179077148437, 0.5971793823242187, 0.5935636596679688, 0.594302978515625, 0.5938565063476563, 0.5945220947265625, 0.5976268920898438, 0.6006149291992188, 0.5948243408203125, 0.5963108520507813, 0.5984461059570313, 0.601712646484375, 0.5953228759765625, 0.5968281860351563, 0.5951876831054688, 0.5951051025390625, 0.5932755737304688, 0.6020219116210938, 0.5925532836914063, 0.5928407592773437, 0.5972486572265625, 0.6136115112304688, 0.5967626342773438, 0.5968455810546875, 0.5948057861328125, 0.5957645874023437, 0.6007316284179688, 0.59920263671875, 0.5974547119140625, 0.5957315673828125, 0.5976392822265625, 0.5954722900390625, 0.5965398559570313, 0.6025751342773438, 0.5965640869140625, 0.595261474609375, 0.594862060546875, 0.5949419555664063, 0.595453125, 0.59492626953125, 0.5956987915039063, 0.595679931640625, 0.5981223754882813, 0.6001539306640625, 0.6035088500976562, 0.5981088256835938, 0.5971773681640625, 0.5991137084960938, 0.597984130859375, 0.607182861328125, 0.5975752563476563, 0.5976677856445313, 0.5981531982421875, 0.59568896484375, 0.594882568359375, 0.5987255859375, 0.5973217163085938, 0.596738037109375, 0.5974507446289062, 0.6019256591796875, 0.6013419799804688, 0.5962445068359375, 0.599704833984375, 0.5971033325195313, 0.594956298828125, 0.5952719116210937, 0.608171630859375, 0.5966090087890625, 0.5966104736328125, 0.6002491455078125, 0.5949972534179687, 0.5944392700195312, 0.5959771118164062, 0.5941405639648437, 0.5949609985351563, 0.5957724609375, 0.601207763671875, 0.601726806640625, 0.5959757690429688, 0.596114013671875, 0.5957713623046875, 0.5965967407226562, 0.5979279174804687, 0.603367431640625, 0.60029541015625, 0.59593115234375, 0.5955235595703126, 0.5981327514648438, 0.5978749389648438, 0.5946727294921875, 0.5946375122070312, 0.5988843383789062, 0.6046651611328125, 0.595911376953125, 0.5954580688476563, 0.5949931640625, 0.5955930786132813, 0.5955933227539062, 0.6029557495117187, 0.5952073974609375, 0.5954889526367187, 0.59537060546875, 0.6116347045898437, 0.5965767211914063, 0.595527587890625, 0.6024244995117187, 0.5977281494140625, 0.598308837890625, 0.599946533203125, 0.6005275268554687, 0.5966800537109375, 0.5993351440429687, 0.5958189086914063, 0.5957488403320312, 0.5959925537109375, 0.6074736938476563, 0.595789306640625, 0.6001536254882812, 0.5938220825195313, 0.5955078125, 0.5955349731445313, 0.5965604248046875, 0.5952570190429688, 0.5971381225585938, 0.5971549072265625, 0.60133056640625, 0.6001909790039063, 0.5976555786132812, 0.5961093139648438, 0.59944140625, 0.6011473999023438, 0.5951934814453125, 0.6015812377929688, 0.5974590454101563, 0.5977605590820313, 0.5963837890625, 0.5938460693359375, 0.5969553833007812, 0.5939158935546875, 0.5954846801757813, 0.5952341918945312, 0.5960772094726563, 0.6067825317382812, 0.5993645629882812, 0.596664306640625, 0.5983659057617188, 0.5962693481445313, 0.5970942993164062, 0.59681396484375, 0.5955235595703126, 0.5929987182617188, 0.593972900390625, 0.592578125, 0.5972463989257812, 0.5952061157226562, 0.5933255615234375, 0.596800048828125, 0.5977803955078125, 0.6018182373046875, 0.5944452514648437, 0.5963675537109375, 0.595975830078125, 0.5949536743164062, 0.595263427734375, 0.6091591186523437, 0.595114501953125, 0.60036328125, 0.594218994140625, 0.5989412231445312, 0.5965144653320312, 0.5959823608398438, 0.596597412109375, 0.5954786376953125, 0.595578857421875, 0.6014299926757812, 0.6001397705078125, 0.5972869262695313, 0.5978328247070313, 0.5955162963867188, 0.59441943359375, 0.5973363037109375, 0.607594482421875, 0.5962998046875, 0.5958758544921875, 0.6099005737304688, 0.5968058471679687, 0.5973866577148438, 0.5946961669921875, 0.5964100341796875, 0.5956954956054688, 0.6040125732421875, 0.6104871215820312, 0.5966166381835938, 0.5963967895507812, 0.5998364868164062, 0.5974837036132813, 0.5974556884765625, 0.5977908325195312, 0.6004901733398438, 0.5943525390625, 0.596400390625, 0.5953576049804687, 0.597854248046875, 0.5953843383789063, 0.5959700317382812, 0.5951561889648438, 0.594907958984375, 0.6006824951171875, 0.6024642333984375, 0.5962158203125, 0.6019788818359375, 0.5938500366210937, 0.5992626342773437, 0.6107612915039062, 0.5971549072265625, 0.6003631591796875, 0.59579296875, 0.5981795654296875, 0.5943314819335938, 0.59375634765625, 0.5951851196289063, 0.5953706665039062, 0.5994371337890625, 0.59782763671875, 0.5985177612304687, 0.5939118041992187, 0.5950873413085938, 0.5945466918945312, 0.5935913696289062, 0.5980929565429688, 0.5980690307617188, 0.5955616455078125, 0.5945672607421875, 0.5931753540039062, 0.5958485717773437, 0.5952047119140625, 0.5952163696289062, 0.5934772338867188, 0.5939346923828125, 0.5987833251953125, 0.6131842651367188, 0.594884521484375, 0.5942286987304688, 0.596968017578125, 0.594314453125, 0.5932551879882813, 0.5972001953125, 0.5957782592773437, 0.5934410400390625, 0.5956968994140625, 0.592969482421875, 0.5955541381835937, 0.5919118041992187, 0.5964756469726562, 0.593744140625, 0.5946019897460938, 0.6026619262695313, 0.5984078979492188, 0.594002197265625, 0.5937841796875, 0.5958027954101562, 0.5939199829101562, 0.5926154174804688, 0.5971343383789063, 0.596537353515625, 0.5970851440429688, 0.5933087158203125, 0.6032945556640625, 0.5925457153320313, 0.5930682373046875, 0.59221337890625, 0.5930604248046875, 0.5945692138671875, 0.5978451538085937, 0.598553466796875, 0.5981306762695312, 0.59144189453125, 0.5925986938476563, 0.5965909423828125, 0.5966292724609376, 0.5924080200195313, 0.5907730712890625, 0.5921033935546876, 0.5935547485351562, 0.59438720703125, 0.5947315673828125, 0.5929389038085937, 0.5950689086914063, 0.5974876098632812, 0.6009405517578125, 0.5931389770507812, 0.59338232421875, 0.5940979614257812, 0.5931397094726563, 0.5954005737304687, 0.5951715087890626, 0.5949296875, 0.5940667724609375, 0.5914609985351562, 0.5911316528320313, 0.5908694458007813, 0.5943107299804687, 0.6021800537109375, 0.5940326538085937, 0.5967232055664062, 0.5942800903320312, 0.6022614135742187, 0.5944718627929687, 0.5990830078125, 0.5948395385742188, 0.5916785888671875, 0.5934662475585938, 0.5949556884765625, 0.5983237915039062, 0.5984083251953125, 0.5919404296875, 0.5921072387695312, 0.5899962768554687, 0.5920009155273438, 0.590489501953125, 0.5917916259765625, 0.5956400146484375, 0.5975126953125, 0.5985182495117187, 0.5944566040039062, 0.594249755859375, 0.594356201171875, 0.5968773193359375, 0.593915771484375, 0.5965591430664062, 0.5984838256835937, 0.5934976806640625, 0.5949558715820312, 0.5969436645507813, 0.59708349609375, 0.5944019165039063, 0.5960962524414063, 0.5939883422851563, 0.6109265747070313, 0.5965557861328125, 0.596955078125, 0.6001993408203125, 0.5978842163085938, 0.5951463623046875, 0.5976583251953125, 0.5990177001953125, 0.5944686279296875, 0.5951160278320312, 0.6075435791015625, 0.5938401489257813, 0.5951651611328125, 0.5972459716796875, 0.5951201171875, 0.5962383422851563, 0.5981963500976563, 0.6022985229492187, 0.595721923828125, 0.5977508544921875, 0.5967196655273438, 0.5956064453125, 0.5976572265625, 0.6013710327148437, 0.6000189208984374, 0.598857666015625, 0.5942784423828125, 0.5940072021484375, 0.594172119140625, 0.5938895874023438, 0.5953923950195312, 0.5947453002929688, 0.5956060180664062, 0.60209765625, 0.602881591796875, 0.596602783203125, 0.5950305786132812, 0.5968320922851562, 0.5974058837890625, 0.5957222290039063, 0.6005103759765625, 0.5988222045898437, 0.5954149169921875, 0.5952000732421875, 0.5938410034179687, 0.5949718017578125, 0.592651123046875, 0.5953099975585937, 0.5948351440429688, 0.594010986328125, 0.604080078125, 0.5999165649414062, 0.5941574096679687, 0.5981466064453125, 0.5956267700195312, 0.595819580078125, 0.5965194091796875, 0.5995903930664063, 0.60062109375, 0.5965291748046875, 0.5959049682617188, 0.5951348876953125, 0.5948922729492188]",tokens/s,1.6755442255682014,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1029.533696,1613.692928,0.0,1218.445312,1206.173696,s,1,9.286396484375,9.286396484375,0.0,9.286396484375,9.286396484375,9.286396484375,9.286396484375,[9.286396484375],,kWh,6.412064128747564e-05,7.065838385538754e-06,2.272446262401029e-05,9.391094229702468e-05,,MB,1264.78336,1911.488512,0.0,1503.657984,1463.228416,s,10,1.9235433197021485,0.19235433197021484,0.0011884075023120932,0.19304808044433594,0.19332053985595704,0.19348340530395508,0.1936136976623535,"[0.19133229064941407, 0.1917051544189453, 0.19364627075195312, 0.19322828674316406, 0.19320034790039062, 0.19289581298828126, 0.1932348175048828, 0.1898927001953125, 0.191123291015625, 0.1932843475341797]",tokens/s,1330.8772273433406,kWh,5.887339673249622e-06,6.492632882767605e-07,3.903458678319871e-06,1.0440061639846254e-05,tokens/kWh,24520928.02047575,MB,1282.797568,1911.488512,0.0,1503.657984,1463.230976,s,10,19.328538940429688,1.9328538940429687,0.014617236633712626,1.9286599731445313,1.950318566894531,1.9520762145996093,1.953482332763672,"[1.9538338623046876, 1.948485595703125, 1.949927978515625, 1.9435340576171876, 1.9235828857421875, 1.9234945068359375, 1.932079833984375, 1.90653173828125, 1.921828369140625, 1.9252401123046874]",tokens/s,32.59428981888657,kWh,5.5863144591333356e-05,6.161526894123156e-06,3.236473700287956e-05,9.438940848833609e-05,tokens/kWh,667447.767805273,,s,630,19.324281274795513,0.030673462340945288,0.0006182519787459518,0.03063547134399414,0.03119975719451904,0.03146232471466064,0.033014284744262705,"[0.03193935966491699, 0.031149696350097657, 0.031248767852783202, 0.031381376266479494, 0.031077791213989257, 0.030824480056762697, 0.030999040603637694, 0.03083692741394043, 0.031040639877319337, 0.030936031341552733, 0.031209375381469725, 0.03102505683898926, 0.0310784969329834, 0.03102083206176758, 0.03119740867614746, 0.031133184432983397, 0.0311234245300293, 0.03129769515991211, 0.03108697509765625, 0.03099465560913086, 0.031038528442382814, 0.031042272567749024, 0.030817792892456054, 0.030840959548950195, 0.03092313575744629, 0.030785535812377928, 0.03071753692626953, 0.031951263427734376, 0.03122380828857422, 0.03105295944213867, 0.030931808471679687, 0.030823711395263673, 0.031011552810668946, 0.03097577667236328, 0.030938623428344726, 0.030558015823364256, 0.030684032440185548, 0.03052864074707031, 0.03101081657409668, 0.03107904052734375, 0.030738719940185545, 0.030607551574707032, 0.030889791488647463, 0.030642175674438478, 0.030662656784057617, 0.03060326385498047, 0.03059868812561035, 0.030370271682739258, 0.03100054359436035, 0.03077903938293457, 0.03085919952392578, 0.03078764724731445, 0.03053401565551758, 0.030683135986328124, 0.030838783264160157, 0.030596416473388673, 0.03049235153198242, 0.03050547218322754, 0.030487039566040038, 0.030818304061889647, 0.03225395202636719, 0.03427072143554687, 0.03180390357971191, 0.03107766342163086, 0.03127964782714844, 0.03068671989440918, 0.030993375778198242, 0.03072991943359375, 0.031406080245971676, 0.030779424667358397, 0.030735519409179686, 0.030576864242553712, 0.030505664825439455, 0.03088755226135254, 0.03079315185546875, 0.030740608215332033, 0.031095136642456056, 0.030859487533569336, 0.031346752166748044, 0.03075209617614746, 0.030596992492675782, 0.030626783370971678, 0.030613439559936523, 0.03049251174926758, 0.030511135101318358, 0.030498912811279297, 0.031700544357299805, 0.030793792724609376, 0.03130204772949219, 0.031089920043945313, 0.030810432434082033, 0.030942880630493164, 0.03152518463134766, 0.030717824935913084, 0.03071343994140625, 0.030714879989624022, 0.03059507179260254, 0.03078144073486328, 0.03062716865539551, 0.03125545692443848, 0.031219455718994142, 0.030937183380126954, 0.030879648208618164, 0.030899232864379882, 0.03108963203430176, 0.031246335983276367, 0.03103116798400879, 0.030793855667114258, 0.030855167388916017, 0.030772960662841797, 0.03127734375, 0.0312476806640625, 0.031133951187133788, 0.030928991317749024, 0.031011167526245116, 0.03131999969482422, 0.030591039657592772, 0.03068454360961914, 0.030521440505981445, 0.03163190460205078, 0.030510368347167968, 0.030865888595581054, 0.031190912246704103, 0.031409727096557614, 0.030780223846435546, 0.03100057601928711, 0.03170259284973145, 0.030836511611938476, 0.030798175811767577, 0.030865568161010742, 0.030814079284667967, 0.031085983276367187, 0.03545161437988281, 0.030899744033813476, 0.030393856048583984, 0.030634975433349608, 0.030341119766235353, 0.030765087127685546, 0.030860448837280275, 0.03066143989562988, 0.03079167938232422, 0.03061939239501953, 0.030599424362182617, 0.030365695953369142, 0.030588159561157225, 0.03169356727600098, 0.03065353584289551, 0.030489471435546874, 0.030730272293090822, 0.031198688507080078, 0.03110966491699219, 0.030736576080322264, 0.03064575958251953, 0.030663007736206054, 0.0305511360168457, 0.03038912010192871, 0.030332927703857423, 0.03041279983520508, 0.03041814422607422, 0.03163593673706055, 0.030933311462402344, 0.030840831756591795, 0.03213312149047851, 0.030825792312622072, 0.03113772773742676, 0.030685184478759765, 0.030700288772583007, 0.030644256591796874, 0.030558176040649414, 0.03090185546875, 0.0314619197845459, 0.031014432907104494, 0.030826751708984374, 0.03099068832397461, 0.030715648651123046, 0.03088380813598633, 0.030816064834594727, 0.032008480072021485, 0.030816192626953124, 0.03093231964111328, 0.030886240005493164, 0.031324480056762694, 0.03160857582092285, 0.03093120002746582, 0.030855167388916017, 0.030926847457885744, 0.030863359451293947, 0.030705663681030275, 0.030793727874755858, 0.031332319259643554, 0.031185792922973632, 0.030764671325683595, 0.030676767349243163, 0.030568479537963867, 0.030425664901733398, 0.030551679611206056, 0.03044940757751465, 0.03057491111755371, 0.03210886383056641, 0.03291340637207031, 0.03057663917541504, 0.03140518379211426, 0.030505056381225585, 0.03152060890197754, 0.030688095092773437, 0.03045315170288086, 0.03052409553527832, 0.03036345672607422, 0.030603456497192382, 0.031088479995727537, 0.030588960647583006, 0.030861440658569335, 0.030713855743408205, 0.03041004753112793, 0.030870208740234373, 0.030443584442138672, 0.030262975692749022, 0.03010380744934082, 0.030627840042114256, 0.030467584609985353, 0.030845024108886718, 0.030916351318359375, 0.031265663146972655, 0.03058870315551758, 0.030965375900268554, 0.030585056304931642, 0.03066080093383789, 0.03046806335449219, 0.030812192916870117, 0.03047216033935547, 0.030414848327636718, 0.030830591201782227, 0.03146265602111816, 0.033055488586425784, 0.030476287841796876, 0.030723487854003906, 0.03152144050598144, 0.030500799179077148, 0.030589088439941407, 0.03032249641418457, 0.03044937515258789, 0.030777664184570314, 0.030623424530029298, 0.030951744079589845, 0.03070358467102051, 0.030611488342285158, 0.030500864028930662, 0.030414848327636718, 0.030332191467285156, 0.034527713775634766, 0.03066320037841797, 0.030447328567504883, 0.03129855918884277, 0.030702592849731446, 0.030822336196899416, 0.030776735305786132, 0.03064694404602051, 0.030756095886230468, 0.030618368148803712, 0.03074483108520508, 0.030727935791015626, 0.030702720642089842, 0.03173465538024902, 0.030705055236816405, 0.030895999908447266, 0.030634687423706054, 0.030855199813842774, 0.031153535842895506, 0.03101145553588867, 0.030814207077026368, 0.03095347213745117, 0.030904319763183592, 0.030494207382202147, 0.030544256210327147, 0.030516544342041017, 0.030260032653808593, 0.030406656265258788, 0.03037942314147949, 0.030209823608398436, 0.030255327224731444, 0.030333248138427735, 0.0306977596282959, 0.03033497619628906, 0.030494720458984374, 0.03036524772644043, 0.030357152938842773, 0.030101728439331055, 0.030089759826660158, 0.02990492820739746, 0.03043078422546387, 0.030320671081542967, 0.030384544372558595, 0.03012124824523926, 0.030194047927856446, 0.030441791534423827, 0.03126601600646973, 0.03012620735168457, 0.030314464569091797, 0.030141151428222657, 0.030060543060302734, 0.030625343322753906, 0.030757312774658204, 0.030480384826660156, 0.03041436767578125, 0.03031907272338867, 0.032696319580078126, 0.030203903198242187, 0.029980031967163086, 0.029765344619750975, 0.02974742317199707, 0.029896928787231446, 0.030038496017456055, 0.030400320053100584, 0.030422880172729493, 0.03044576072692871, 0.031105024337768555, 0.03103664016723633, 0.030429855346679687, 0.030050207138061523, 0.030120159149169923, 0.0303570556640625, 0.030480255126953126, 0.030326335906982423, 0.03037696075439453, 0.030736032485961913, 0.03075107192993164, 0.031100639343261717, 0.030785951614379883, 0.0306112003326416, 0.030468223571777343, 0.030449663162231445, 0.030232479095458984, 0.03036739158630371, 0.030532032012939452, 0.03058687973022461, 0.03107427215576172, 0.030882976531982423, 0.030563199996948242, 0.030829952239990233, 0.030816415786743163, 0.030998239517211913, 0.030880159378051757, 0.030681440353393555, 0.030646272659301758, 0.030676128387451172, 0.030503776550292967, 0.030681087493896485, 0.030472192764282226, 0.03054204750061035, 0.030822175979614258, 0.03057868766784668, 0.030535680770874023, 0.030627840042114256, 0.03066499137878418, 0.03187235260009766, 0.03351590347290039, 0.030826496124267577, 0.031231231689453125, 0.031242528915405274, 0.030554399490356446, 0.03014214324951172, 0.029775999069213868, 0.029433280944824218, 0.029615039825439452, 0.029750783920288085, 0.02986240005493164, 0.02972159957885742, 0.02958639907836914, 0.029600799560546873, 0.02950003242492676, 0.029663616180419922, 0.030043167114257814, 0.030362592697143555, 0.030826303482055666, 0.030253440856933593, 0.03036518478393555, 0.03053932762145996, 0.03043609619140625, 0.030784448623657226, 0.030197471618652345, 0.029794944763183593, 0.029920927047729494, 0.029939775466918946, 0.030177215576171874, 0.03015577507019043, 0.03039743995666504, 0.030629888534545898, 0.030117727279663085, 0.030763168334960938, 0.030037567138671874, 0.02983286476135254, 0.03019968032836914, 0.030153568267822266, 0.03004627227783203, 0.029945024490356444, 0.03035219192504883, 0.030412063598632813, 0.03049977684020996, 0.030524255752563477, 0.030616704940795898, 0.030946432113647462, 0.030443935394287108, 0.030280191421508788, 0.03020572853088379, 0.030048255920410157, 0.030500255584716796, 0.030558176040649414, 0.03028396797180176, 0.03013033676147461, 0.030271167755126952, 0.030124639511108397, 0.030676992416381835, 0.030352800369262696, 0.030300096511840822, 0.030536352157592775, 0.030543680191040038, 0.03068854331970215, 0.03095427131652832, 0.030979936599731445, 0.03088934326171875, 0.031060895919799804, 0.030901344299316406, 0.03096259117126465, 0.03079987144470215, 0.030883520126342774, 0.030957887649536133, 0.030775039672851563, 0.03091996765136719, 0.03082748794555664, 0.030964960098266603, 0.03117136001586914, 0.030974143981933593, 0.03108768081665039, 0.032895679473876956, 0.03597654342651367, 0.03103379249572754, 0.03098624038696289, 0.03071824073791504, 0.030818304061889647, 0.030806272506713868, 0.03085081672668457, 0.03130844879150391, 0.030642208099365235, 0.030466047286987305, 0.030930944442749023, 0.030590400695800782, 0.03060793685913086, 0.030473472595214844, 0.030313056945800783, 0.029954208374023437, 0.029782175064086914, 0.029740896224975586, 0.029689855575561523, 0.029909023284912108, 0.02999497604370117, 0.029714431762695313, 0.029794111251831054, 0.030002880096435546, 0.030128416061401368, 0.03032713508605957, 0.030076799392700197, 0.03013222312927246, 0.030103551864624024, 0.030089216232299806, 0.029828895568847658, 0.0299399356842041, 0.030065887451171874, 0.030010143280029298, 0.030585983276367187, 0.030186368942260743, 0.030663007736206054, 0.03018502426147461, 0.030046304702758788, 0.029968351364135743, 0.02994588851928711, 0.030472095489501954, 0.030543487548828126, 0.03049295997619629, 0.030257343292236328, 0.03031449508666992, 0.030281728744506835, 0.030097408294677733, 0.03009231948852539, 0.030200864791870115, 0.029879648208618163, 0.030091840744018553, 0.030234207153320314, 0.030261695861816405, 0.030213727951049804, 0.029981088638305665, 0.030068960189819336, 0.03038800048828125, 0.030717248916625976, 0.030468799591064452, 0.030419231414794922, 0.030553823471069337, 0.030470144271850585, 0.030453760147094725, 0.030537952423095704, 0.03038800048828125, 0.030226272583007814, 0.03125811195373535, 0.030377952575683594, 0.0303351993560791, 0.03139225578308105, 0.030742624282836913, 0.030685087203979493, 0.030742528915405274, 0.030652671813964843, 0.030658208847045898, 0.03081020736694336, 0.030519296646118164, 0.030504959106445313, 0.03047395133972168, 0.030502239227294923, 0.03048748779296875, 0.031145984649658204, 0.03058687973022461, 0.03061667251586914, 0.03079020881652832, 0.03052275276184082, 0.030554719924926758, 0.03053545570373535, 0.030403167724609374, 0.030318592071533205, 0.030392416000366212, 0.030620607376098632, 0.03066364860534668, 0.03172147178649903, 0.032428192138671874, 0.030704896926879884, 0.030882495880126953, 0.03051532745361328, 0.030471967697143554, 0.03038003158569336, 0.030367935180664062, 0.0306658878326416, 0.030306976318359376, 0.03003392028808594, 0.029822975158691405, 0.029728191375732422, 0.029669631958007814, 0.029907264709472657, 0.02980659294128418, 0.030216192245483397, 0.030208000183105467, 0.03017103958129883, 0.03005459213256836, 0.030293888092041015, 0.030795808792114257, 0.03196272087097168, 0.031117727279663086, 0.030910463333129884, 0.03073843193054199, 0.030533632278442382, 0.030208000183105467, 0.030002239227294922, 0.02990995216369629, 0.03014860725402832, 0.029949119567871094, 0.02993849563598633, 0.02993382453918457, 0.03029737663269043, 0.030556415557861327, 0.03039254379272461, 0.03008438491821289, 0.030268096923828126, 0.030947359085083007, 0.030005088806152345, 0.029749568939208985, 0.029777055740356446, 0.02991584014892578, 0.029818336486816407, 0.03317334365844726, 0.030820512771606447, 0.030173728942871094, 0.030269439697265626, 0.030316287994384766, 0.03057651138305664, 0.030595455169677734, 0.030626943588256836, 0.03063596725463867, 0.030720064163208008, 0.030573440551757813, 0.030288160324096678, 0.029955808639526366, 0.03073036766052246, 0.030771072387695313, 0.030856447219848634, 0.030895135879516603, 0.03068441581726074, 0.030853343963623048, 0.03089638328552246, 0.03083798408508301, 0.03072492790222168, 0.03081827163696289, 0.031329408645629886, 0.030720895767211914, 0.030898239135742186, 0.030657920837402344, 0.030904895782470704, 0.03077529525756836, 0.030906368255615234, 0.030883136749267577, 0.03075334358215332, 0.031346815109252926, 0.030863231658935546, 0.03088140869140625, 0.030924896240234374, 0.030691072463989257, 0.03130844879150391, 0.031180799484252928, 0.030699520111083983, 0.030435327529907227, 0.030918655395507814, 0.030045440673828125, 0.029753311157226563, 0.02957187271118164, 0.029708288192749024, 0.029659135818481445, 0.030083072662353515, 0.030528608322143554, 0.030221504211425783, 0.029923168182373047, 0.029823936462402344, 0.029893760681152345, 0.02994361686706543, 0.03020582389831543, 0.030463743209838866, 0.030859104156494142]",tokens/s,32.601471228930144,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1033.510912,1613.692928,0.0,1218.445312,1206.173696,s,1,9.1831015625,9.1831015625,0.0,9.1831015625,9.1831015625,9.1831015625,9.1831015625,[9.1831015625],,kWh,6.382324057082845e-05,7.032595791535425e-06,2.3776685687981747e-05,9.463252205034562e-05,,MB,1275.94496,1911.488512,0.0,1503.657984,1463.228416,s,10,1.9248706817626953,0.19248706817626954,0.0011163075534855215,0.19229170989990235,0.19386943054199218,0.19389991760253908,0.19392430725097656,"[0.19239820861816406, 0.19070640563964844, 0.1913957061767578, 0.1932269744873047, 0.19386265563964844, 0.19218521118164061, 0.1920773468017578, 0.1912587127685547, 0.19382905578613283, 0.19393040466308595]",tokens/s,1329.9594742934557,kWh,5.888236950166705e-06,6.493722790158906e-07,3.927919809000313e-06,1.0465529038182906e-05,tokens/kWh,24461257.43533825,MB,1293.959168,1911.488512,0.0,1503.657984,1463.230976,s,10,19.02480383300781,1.9024803833007808,0.010420759895332518,1.9060280151367186,1.91608125,1.916356787109375,1.9165772167968749,"[1.91602001953125, 1.907142822265625, 1.90618115234375, 1.8814990234375, 1.91663232421875, 1.894983154296875, 1.9058748779296875, 1.8914891357421875, 1.8981241455078126, 1.906857177734375]",tokens/s,33.114664704555715,kWh,5.504454917149943e-05,6.071256374725795e-06,3.197691447039852e-05,9.309272001662375e-05,tokens/kWh,676744.6475809276,,s,630,19.021390398025527,0.030192683171469073,0.0006366162060178063,0.030160079956054686,0.030726429557800295,0.03112152633666992,0.03256156150817872,"[0.03147190475463867, 0.030670848846435547, 0.03013222312927246, 0.03253247833251953, 0.031373311996459964, 0.030482080459594725, 0.030052703857421877, 0.03110403251647949, 0.03020899200439453, 0.030287872314453124, 0.030420799255371094, 0.030612960815429687, 0.03062652778625488, 0.030502912521362304, 0.030594335556030274, 0.030865280151367188, 0.030816703796386718, 0.0306911678314209, 0.03205366516113281, 0.03079311943054199, 0.030898944854736328, 0.0305860481262207, 0.030468927383422852, 0.030503999710083007, 0.03061199951171875, 0.03057411193847656, 0.030427520751953124, 0.030842527389526368, 0.03051532745361328, 0.03050569534301758, 0.030340576171875, 0.030360095977783202, 0.03041279983520508, 0.030324735641479493, 0.030393632888793945, 0.030544416427612305, 0.030306495666503907, 0.03021824073791504, 0.029959552764892577, 0.02966387176513672, 0.030558048248291017, 0.030363487243652343, 0.030124256134033203, 0.030185567855834962, 0.03015872001647949, 0.030052480697631837, 0.03001046371459961, 0.02985580825805664, 0.029909343719482423, 0.03005900764465332, 0.03001747131347656, 0.02985580825805664, 0.02969183921813965, 0.02971564865112305, 0.029700479507446288, 0.03053366470336914, 0.029576992034912108, 0.029780288696289063, 0.03010345649719238, 0.030109535217285155, 0.03006224060058594, 0.03016783905029297, 0.03027168083190918, 0.03059030342102051, 0.029733535766601562, 0.029560287475585936, 0.02962486457824707, 0.02972585678100586, 0.029810592651367186, 0.029748416900634764, 0.030097152709960936, 0.03027078437805176, 0.03016160011291504, 0.029750463485717773, 0.03001545524597168, 0.029565759658813476, 0.029846656799316407, 0.030018367767333985, 0.030181024551391603, 0.030147008895874024, 0.030040063858032227, 0.029724672317504884, 0.029503488540649415, 0.02959974479675293, 0.029951391220092775, 0.030121919631958007, 0.029962656021118163, 0.030168544769287108, 0.030272287368774416, 0.030215648651123046, 0.030045824050903322, 0.030067615509033203, 0.03006185531616211, 0.030034656524658202, 0.0300214729309082, 0.030553375244140625, 0.03001638412475586, 0.030294015884399415, 0.0302608642578125, 0.03039244842529297, 0.030893951416015624, 0.03100454330444336, 0.03044937515258789, 0.03053443145751953, 0.030562271118164064, 0.030666784286499025, 0.031053823471069338, 0.030410463333129883, 0.030341279983520507, 0.030695552825927733, 0.03063155174255371, 0.030656896591186523, 0.030756864547729492, 0.03167027282714844, 0.030574464797973634, 0.03072012710571289, 0.03052463912963867, 0.030728992462158204, 0.031059455871582032, 0.030797536849975587, 0.030519872665405273, 0.0305828800201416, 0.030630016326904298, 0.030220287322998047, 0.030007295608520508, 0.030038015365600586, 0.0328135986328125, 0.03022233581542969, 0.02997983932495117, 0.030430015563964845, 0.03312844848632813, 0.03140950393676758, 0.030319263458251953, 0.030099456787109374, 0.029825023651123047, 0.02958950424194336, 0.029716480255126954, 0.03003523254394531, 0.02948579216003418, 0.02978553581237793, 0.029833408355712892, 0.029782400131225586, 0.029634559631347656, 0.029586559295654298, 0.02984659194946289, 0.03013612747192383, 0.03012531280517578, 0.030110176086425782, 0.03011612892150879, 0.03003392028808594, 0.029754623413085938, 0.02961859130859375, 0.031641120910644534, 0.03035219192504883, 0.029744384765625, 0.0300053768157959, 0.029936384201049805, 0.02981670379638672, 0.029741056442260744, 0.029849311828613282, 0.030029727935791017, 0.029796415328979493, 0.029763391494750976, 0.029919647216796876, 0.030354816436767577, 0.030196319580078124, 0.02982310485839844, 0.02981260871887207, 0.029968095779418946, 0.03043951988220215, 0.029894975662231444, 0.02999622344970703, 0.029813568115234376, 0.030148672103881835, 0.030187456130981446, 0.030225887298583984, 0.03003878402709961, 0.03015452766418457, 0.03017932891845703, 0.030459552764892577, 0.030379648208618163, 0.030444255828857424, 0.030461759567260743, 0.030400608062744142, 0.03146275138854981, 0.032117694854736326, 0.030817888259887696, 0.03131619262695313, 0.03060918426513672, 0.031135839462280275, 0.03052947235107422, 0.030322751998901366, 0.030492671966552733, 0.030754175186157227, 0.03046272087097168, 0.033539104461669925, 0.030597984313964845, 0.03172761535644531, 0.03074662399291992, 0.031956703186035156, 0.031353120803833005, 0.030352928161621093, 0.0298951358795166, 0.02976153564453125, 0.031053823471069338, 0.029845279693603517, 0.02982464027404785, 0.029753952026367186, 0.029446144104003907, 0.029144512176513673, 0.029117151260375975, 0.029109407424926757, 0.02908844757080078, 0.029038591384887694, 0.029066911697387697, 0.029027999877929686, 0.02897737693786621, 0.02913942337036133, 0.029352960586547853, 0.029248287200927734, 0.03015497589111328, 0.030650367736816408, 0.030121984481811522, 0.02994611167907715, 0.02981452751159668, 0.029454336166381836, 0.029177087783813477, 0.029346559524536135, 0.029274112701416017, 0.029747200012207032, 0.029337600708007814, 0.02939673614501953, 0.029191936492919922, 0.02941798400878906, 0.029132352828979493, 0.029137344360351564, 0.029148160934448244, 0.029645471572875975, 0.030146848678588866, 0.03027078437805176, 0.0303721923828125, 0.03037161636352539, 0.029995647430419922, 0.030144512176513674, 0.029380607604980468, 0.02953990364074707, 0.02925312042236328, 0.029225536346435547, 0.02908812713623047, 0.029171072006225585, 0.02915977668762207, 0.029172000885009767, 0.03607721710205078, 0.031161344528198243, 0.030402559280395508, 0.030726144790649414, 0.030373888015747072, 0.03018547248840332, 0.031059839248657226, 0.0296646728515625, 0.030405344009399413, 0.030150432586669922, 0.030270944595336913, 0.03034601593017578, 0.03042505645751953, 0.03062918472290039, 0.03074892807006836, 0.030724544525146485, 0.030691551208496093, 0.030666528701782228, 0.030697471618652345, 0.030633983612060548, 0.030451711654663087, 0.030605024337768554, 0.030593311309814453, 0.03044175910949707, 0.030468864440917967, 0.03056662368774414, 0.030532352447509764, 0.03060940742492676, 0.03096985626220703, 0.03058483123779297, 0.03120742416381836, 0.030673952102661134, 0.030624736785888673, 0.030513151168823242, 0.0304167366027832, 0.0302675838470459, 0.029951423645019532, 0.030043872833251953, 0.0298536319732666, 0.029813631057739258, 0.029740991592407225, 0.030238143920898436, 0.029778560638427733, 0.029693952560424806, 0.030902271270751954, 0.030255104064941408, 0.030251007080078125, 0.03014633560180664, 0.030308223724365233, 0.029878591537475584, 0.029650976181030273, 0.029572479248046873, 0.02969254493713379, 0.02955580711364746, 0.030058496475219725, 0.029608991622924803, 0.030372831344604494, 0.031525152206420895, 0.030296607971191405, 0.029773920059204102, 0.030176799774169923, 0.02967190361022949, 0.029900800704956054, 0.030562559127807618, 0.02984934425354004, 0.029457887649536132, 0.029769535064697265, 0.029928159713745118, 0.030232288360595702, 0.029323551177978517, 0.029793664932250975, 0.02940787124633789, 0.029423519134521483, 0.029452384948730467, 0.030261247634887696, 0.0305930233001709, 0.030457855224609375, 0.03030966377258301, 0.03032143974304199, 0.029996992111206055, 0.0295731201171875, 0.029466623306274413, 0.029237152099609375, 0.0294421443939209, 0.02977791976928711, 0.029549760818481444, 0.029331584930419922, 0.029870784759521485, 0.030295391082763672, 0.029911712646484376, 0.03063596725463867, 0.03024492835998535, 0.03029769515991211, 0.03010806465148926, 0.030234624862670898, 0.03023244857788086, 0.03024028778076172, 0.03069615936279297, 0.030556032180786133, 0.03025833511352539, 0.030364511489868164, 0.03032268714904785, 0.030289920806884765, 0.0300579833984375, 0.03014713668823242, 0.030193599700927734, 0.030340896606445313, 0.030306528091430664, 0.030437376022338865, 0.030220287322998047, 0.030422336578369142, 0.030309055328369142, 0.030322656631469727, 0.030199840545654298, 0.03033420753479004, 0.03025814437866211, 0.030395296096801756, 0.030511615753173828, 0.030287456512451173, 0.02955753517150879, 0.030259199142456054, 0.03011337661743164, 0.030341535568237304, 0.029748319625854492, 0.03011062431335449, 0.02978201675415039, 0.030707231521606447, 0.02985603141784668, 0.030085823059082032, 0.029706239700317383, 0.029693952560424806, 0.02991923141479492, 0.02981180763244629, 0.029794912338256836, 0.029935039520263673, 0.030290815353393556, 0.029920448303222658, 0.029885248184204103, 0.030122112274169922, 0.029937088012695314, 0.02961862373352051, 0.02953625679016113, 0.029621984481811522, 0.029270303726196288, 0.029302783966064453, 0.02951372718811035, 0.029501279830932616, 0.029347999572753906, 0.030097183227539064, 0.02998089599609375, 0.030412607192993164, 0.03051247978210449, 0.030390911102294922, 0.030271455764770507, 0.030892288208007813, 0.030330495834350588, 0.030062976837158202, 0.030293088912963867, 0.030241695404052735, 0.03014588737487793, 0.030069408416748048, 0.030640127182006836, 0.030138368606567382, 0.02999091148376465, 0.030154367446899415, 0.030277055740356447, 0.029973440170288086, 0.02996544075012207, 0.03018227195739746, 0.03034316825866699, 0.030504543304443358, 0.030414848327636718, 0.030081600189208985, 0.029887712478637696, 0.029780607223510742, 0.029892608642578124, 0.03032678413391113, 0.030405792236328125, 0.03063075256347656, 0.03067193603515625, 0.0313306884765625, 0.03149676895141602, 0.030713855743408205, 0.03196723175048828, 0.030928415298461916, 0.030880224227905272, 0.03334944152832031, 0.030935232162475585, 0.030488576889038086, 0.031073888778686522, 0.030202272415161133, 0.03042508888244629, 0.030476287841796876, 0.030324575424194335, 0.03048464012145996, 0.031204479217529297, 0.030380287170410157, 0.030325376510620117, 0.030095392227172852, 0.030054527282714842, 0.03214102554321289, 0.030268831253051756, 0.029962976455688475, 0.02958470344543457, 0.02954924774169922, 0.029471872329711914, 0.029723007202148436, 0.02965318489074707, 0.029503807067871094, 0.029632511138916014, 0.02959974479675293, 0.0297042236328125, 0.029775840759277344, 0.029822975158691405, 0.02982707214355469, 0.02958745574951172, 0.02966067123413086, 0.029731327056884766, 0.029639904022216796, 0.03023072052001953, 0.02969455909729004, 0.029650943756103516, 0.029388799667358398, 0.029288448333740235, 0.029300159454345703, 0.02973548889160156, 0.030031871795654298, 0.029841407775878907, 0.030092832565307617, 0.030171104431152344, 0.03017900848388672, 0.030012224197387697, 0.03137273597717285, 0.03243244934082031, 0.029967744827270507, 0.02961417579650879, 0.02957391929626465, 0.029663232803344725, 0.02960588836669922, 0.029711456298828126, 0.03009222412109375, 0.029931488037109374, 0.029850976943969727, 0.030073503494262695, 0.030000768661499023, 0.03001350402832031, 0.02994550323486328, 0.029940383911132813, 0.029755392074584962, 0.029646848678588866, 0.03025263977050781, 0.030159263610839843, 0.030766304016113282, 0.030427520751953124, 0.03034169578552246, 0.030438655853271483, 0.03043609619140625, 0.030310144424438478, 0.030457151412963866, 0.03025948715209961, 0.03032534408569336, 0.030351295471191406, 0.030457984924316405, 0.030357183456420897, 0.030318912506103517, 0.030263296127319338, 0.030308351516723633, 0.03027939224243164, 0.030592607498168944, 0.030286495208740234, 0.03029609680175781, 0.03030191993713379, 0.030442848205566406, 0.03040287971496582, 0.030372480392456054, 0.03034854316711426, 0.030376096725463868, 0.0326907844543457, 0.030641216278076172, 0.029911231994628907, 0.029751903533935548, 0.0295118408203125, 0.029531776428222658, 0.029945663452148438, 0.029612607955932617, 0.029659135818481445, 0.030253055572509766, 0.02998681640625, 0.029995008468627928, 0.030125280380249024, 0.030415647506713866, 0.02988595199584961, 0.03000275230407715, 0.029937728881835938, 0.030157247543334962, 0.029712831497192383, 0.029604032516479493, 0.02938230323791504, 0.02953353691101074, 0.02948080062866211, 0.03003443145751953, 0.029743263244628906, 0.029941856384277345, 0.03022870445251465, 0.030332927703857423, 0.030121984481811522, 0.0301496639251709, 0.029737951278686524, 0.029608991622924803, 0.029509695053100585, 0.029567903518676757, 0.02953625679016113, 0.029878047943115233, 0.030072256088256834, 0.030046367645263673, 0.03093286323547363, 0.030395584106445314, 0.030153663635253906, 0.03016089630126953, 0.030114015579223632, 0.029929248809814454, 0.029468704223632812, 0.029554176330566406, 0.029436447143554687, 0.029460416793823243, 0.029993120193481444, 0.030185312271118165, 0.030063711166381835, 0.030452543258666993, 0.030347360610961913, 0.030359424591064454, 0.030214271545410155, 0.030494527816772463, 0.03057254409790039, 0.03004640007019043, 0.030040063858032227, 0.030523359298706056, 0.030780736923217773, 0.030468671798706055, 0.030453439712524413, 0.030441951751708985, 0.030451711654663087, 0.030410751342773438, 0.03057459259033203, 0.030395967483520508, 0.030413248062133788, 0.030379167556762697, 0.030392768859863282, 0.030478559494018554, 0.030447391510009764, 0.030369407653808595, 0.030289983749389647, 0.03035580825805664, 0.030615936279296874, 0.030416576385498047, 0.03034876823425293, 0.031877983093261716, 0.032573440551757815, 0.030557439804077147, 0.03061222457885742, 0.031016960144042968, 0.030611455917358397, 0.030390207290649413, 0.03079583930969238, 0.030473312377929686, 0.030247840881347656, 0.030003200531005858, 0.029865983963012696, 0.029671424865722655, 0.029715904235839842, 0.02972060775756836, 0.029571647644042968, 0.02958460807800293, 0.029520639419555662, 0.029822399139404297, 0.02959008026123047, 0.029546432495117188, 0.029445375442504883]",tokens/s,33.120607212046714,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 232.12 MiB is free. Process 22770 has 14.51 GiB memory in use. Of the allocated memory 14.07 GiB is allocated by PyTorch, and 332.11 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1085.898752,8106.47552,0.0,7711.227904,7603.953664,s,1,18.641384765625,18.641384765625,0.0,18.641384765625,18.641384765625,18.641384765625,18.641384765625,[18.641384765625],,kWh,0.00033724906177082703,3.719273954371149e-05,0.0001315656608080057,0.0005060074621225442,,MB,1238.069248,9870.180352,0.0,9462.349824,8756.504576,s,10,16.571733886718746,1.657173388671875,0.005695760134307078,1.6594805297851563,1.6617247680664062,1.6619040100097655,1.662047403564453,"[1.642461181640625, 1.65444482421875, 1.6555794677734375, 1.6585115966796875, 1.6541756591796875, 1.6610345458984375, 1.660449462890625, 1.662083251953125, 1.6616849365234374, 1.6613089599609374]",tokens/s,154.4799124521114,kWh,4.8202024260833316e-05,5.316259229355053e-06,3.2103109015799156e-05,8.562139250598752e-05,tokens/kWh,2989906.990616835,MB,1256.022016,9870.180352,0.0,9462.349824,8756.507136,s,10,82.34577734374999,8.234577734375,0.016255357341610887,8.241509277343749,8.25075615234375,8.251183740234374,8.251525810546875,"[8.2009775390625, 8.2143671875, 8.223310546875, 8.228544921875, 8.2426123046875, 8.24040625, 8.2453994140625, 8.24788671875, 8.251611328125, 8.2506611328125]",tokens/s,7.650665526783283,kWh,0.0002409144947095827,2.657342281018311e-05,0.0001601282392136,0.0004276161567333656,tokens/kWh,147328.39021160468,,s,630,82.34075679016118,0.13069961395263674,0.0019530654863721322,0.13057984161376954,0.13235903625488282,0.1331479217529297,0.1400793441772461,"[0.13893551635742188, 0.12739881896972657, 0.1281716766357422, 0.1281764831542969, 0.12808396911621095, 0.12900732421875, 0.12900901794433595, 0.13369232177734375, 0.13002336120605468, 0.12795699310302736, 0.1282478790283203, 0.12825497436523436, 0.129184326171875, 0.12880531311035157, 0.13193011474609376, 0.13184819030761719, 0.13043276977539062, 0.12846925354003907, 0.12811241149902344, 0.1283865966796875, 0.1292335968017578, 0.13049856567382812, 0.13147955322265625, 0.13099417114257814, 0.12988134765625, 0.1286023406982422, 0.12846339416503907, 0.12916841125488282, 0.1301554870605469, 0.13091839599609376, 0.13138729858398437, 0.1306288604736328, 0.12922966003417968, 0.12963623046875, 0.12919171142578126, 0.1294476776123047, 0.13091845703125, 0.1312957763671875, 0.13156556701660158, 0.13001231384277342, 0.12954217529296874, 0.12889170837402344, 0.13035519409179688, 0.12991693115234376, 0.13176422119140624, 0.13158108520507814, 0.1312407989501953, 0.1301724853515625, 0.12928025817871094, 0.13052044677734376, 0.13092332458496095, 0.13003366088867188, 0.13144985961914063, 0.13050064086914062, 0.1318668212890625, 0.12899346923828126, 0.13061897277832032, 0.13022227478027343, 0.13028025817871094, 0.13159014892578125, 0.132210693359375, 0.13058805847167967, 0.13121315002441405, 0.13991322326660155, 0.1275998077392578, 0.1280213165283203, 0.12786447906494142, 0.12774230194091796, 0.1277675552368164, 0.13016575622558593, 0.13545875549316405, 0.1306542663574219, 0.1288970184326172, 0.12824986267089844, 0.12756537628173828, 0.12829945373535157, 0.12905062866210937, 0.13305145263671875, 0.13252703857421874, 0.1309327392578125, 0.12860826110839843, 0.12896617126464843, 0.1283973388671875, 0.1281212158203125, 0.13095698547363283, 0.132129150390625, 0.13204249572753907, 0.12965846252441407, 0.12837135314941406, 0.1293758087158203, 0.12868858337402345, 0.1301436767578125, 0.13119340515136718, 0.13201773071289064, 0.13140013122558594, 0.12929776000976562, 0.12912503051757812, 0.12901376342773438, 0.13061695861816405, 0.1302408905029297, 0.13229055786132812, 0.13236972045898437, 0.13076141357421875, 0.12902787780761718, 0.1294844512939453, 0.12975765991210939, 0.13076693725585936, 0.1304470977783203, 0.13214288330078125, 0.13153660583496093, 0.12992530822753906, 0.13021650695800782, 0.13024575805664063, 0.13055442810058593, 0.131830078125, 0.13075421142578125, 0.13242594909667968, 0.13120729064941405, 0.1299488983154297, 0.13001808166503906, 0.1299571228027344, 0.1305771484375, 0.13168412780761718, 0.1315616912841797, 0.13172735595703125, 0.13051235961914062, 0.14113037109375, 0.12770079803466797, 0.1281025848388672, 0.12817768859863282, 0.12807420349121093, 0.12819046020507813, 0.13012991333007812, 0.13605679321289063, 0.1302139129638672, 0.12912435913085937, 0.12855705261230468, 0.1283108215332031, 0.12955081176757813, 0.12886416625976563, 0.1329439697265625, 0.13226393127441408, 0.13094912719726562, 0.12882534790039063, 0.12912640380859375, 0.12847718811035155, 0.12873274230957032, 0.13159059143066407, 0.13233561706542968, 0.13138893127441406, 0.12994309997558592, 0.1294775695800781, 0.12870761108398437, 0.1293076171875, 0.13037132263183593, 0.13155743408203124, 0.13206716918945313, 0.13080812072753906, 0.12963145446777344, 0.12936402893066407, 0.12912669372558594, 0.1300955810546875, 0.13120716857910156, 0.13225091552734375, 0.13093244934082032, 0.13088870239257813, 0.12988211059570312, 0.12900146484375, 0.12990821838378908, 0.1315527648925781, 0.13109075927734376, 0.1322421112060547, 0.13127430725097655, 0.1305072021484375, 0.12969334411621095, 0.12982716369628905, 0.13123583984375, 0.1306480712890625, 0.1312522277832031, 0.1315635223388672, 0.13078732299804688, 0.13080986022949218, 0.12949913024902343, 0.13116578674316406, 0.13121168518066406, 0.13136607360839844, 0.13177305603027345, 0.13169273376464843, 0.13022207641601563, 0.14015536499023437, 0.12814131164550782, 0.12783926391601563, 0.12821160888671876, 0.12814118957519532, 0.12914837646484376, 0.13038485717773438, 0.13575932312011718, 0.13007516479492187, 0.12905673217773436, 0.12838899230957032, 0.12839543151855468, 0.12823904418945312, 0.1293679656982422, 0.13350775146484375, 0.13243382263183595, 0.12989244079589843, 0.1287118377685547, 0.1287560272216797, 0.12811318969726562, 0.12890521240234376, 0.13169218444824218, 0.13325045776367186, 0.13136370849609375, 0.1295311737060547, 0.12929922485351564, 0.128901123046875, 0.1284931182861328, 0.13039231872558593, 0.13224159240722655, 0.1323702392578125, 0.1308605499267578, 0.13014291381835938, 0.1295064392089844, 0.1300693817138672, 0.12980224609375, 0.1313804473876953, 0.13241629028320312, 0.13214720153808593, 0.13099203491210937, 0.1301973114013672, 0.1295870361328125, 0.13003411865234374, 0.13066444396972657, 0.13156556701660158, 0.13164544677734374, 0.132347900390625, 0.13004595947265624, 0.13087129211425783, 0.12948069763183595, 0.13137510681152345, 0.13122122192382812, 0.13173788452148438, 0.13170384216308595, 0.1305425567626953, 0.13078486633300782, 0.1300413818359375, 0.1302959747314453, 0.13059756469726563, 0.1317069091796875, 0.13183180236816405, 0.13174374389648438, 0.13154917907714844, 0.14049481201171876, 0.1283507843017578, 0.12919430541992188, 0.1283706817626953, 0.12837673950195314, 0.12830915832519532, 0.13027468872070314, 0.13690963745117188, 0.13102284240722656, 0.1295626220703125, 0.12902400207519532, 0.12853363037109375, 0.12835311889648438, 0.13054556274414061, 0.13333721923828126, 0.13229055786132812, 0.13028457641601562, 0.13049728393554688, 0.1287006378173828, 0.12937152099609375, 0.12869007873535157, 0.131152099609375, 0.13243843078613282, 0.131874755859375, 0.1308625030517578, 0.13033139038085936, 0.12958924865722657, 0.13054566955566407, 0.12919923400878905, 0.13162342834472657, 0.13164991760253905, 0.13192752075195313, 0.13001373291015625, 0.13120863342285155, 0.13006402587890625, 0.1314231414794922, 0.12982272338867187, 0.13268569946289063, 0.13119850158691407, 0.13076130676269532, 0.13126783752441407, 0.13021852111816407, 0.13056431579589844, 0.13036338806152345, 0.13152870178222656, 0.13144009399414064, 0.13100086975097655, 0.13126246643066405, 0.130616455078125, 0.13119541931152343, 0.13120547485351564, 0.13072515869140625, 0.13122396850585938, 0.1311808624267578, 0.13150822448730468, 0.1304289245605469, 0.13058253479003906, 0.1306409912109375, 0.13108901977539061, 0.13035871887207032, 0.131402587890625, 0.13177037048339843, 0.13162413024902345, 0.13996461486816406, 0.12824986267089844, 0.12899690246582032, 0.12820938110351562, 0.1282027587890625, 0.12900338745117187, 0.13072543334960937, 0.135719482421875, 0.13090815734863281, 0.12924844360351562, 0.12854150390625, 0.12843008422851562, 0.12855705261230468, 0.13024870300292968, 0.13279641723632812, 0.1326755828857422, 0.13027122497558594, 0.12992889404296876, 0.1286961212158203, 0.1288605194091797, 0.12978378295898438, 0.13254060363769532, 0.1318338623046875, 0.13180928039550782, 0.1308112030029297, 0.1294015350341797, 0.12876153564453124, 0.12974432373046876, 0.13192076110839843, 0.1318666229248047, 0.13188710021972655, 0.13182917785644532, 0.13013241577148438, 0.1293429718017578, 0.1296328887939453, 0.13080166625976564, 0.13147048950195311, 0.13216444396972657, 0.1321697235107422, 0.13107814025878906, 0.12951513671875, 0.12986105346679688, 0.1301329345703125, 0.13050221252441407, 0.13167864990234374, 0.13249932861328126, 0.131217529296875, 0.1313846435546875, 0.12980653381347657, 0.1308134765625, 0.12995606994628905, 0.13157452392578126, 0.13173146057128907, 0.13190943908691405, 0.13118278503417968, 0.13089727783203126, 0.1306956787109375, 0.13019354248046874, 0.13020159912109375, 0.13052291870117189, 0.13164361572265626, 0.1316532440185547, 0.13308735656738283, 0.14012620544433593, 0.12829696655273437, 0.12820457458496093, 0.12826751708984374, 0.12816073608398437, 0.12899737548828125, 0.13085081481933594, 0.13553567504882813, 0.1309697265625, 0.12925340270996094, 0.12850051879882812, 0.12850994873046875, 0.12881846618652343, 0.13050726318359376, 0.13317747497558594, 0.13320101928710937, 0.13059170532226563, 0.130110595703125, 0.12919692993164061, 0.1286264953613281, 0.12942970275878907, 0.13166160583496095, 0.1332943115234375, 0.131778564453125, 0.13052674865722655, 0.1299471435546875, 0.12895443725585937, 0.12917648315429686, 0.13096754455566406, 0.1318968963623047, 0.13240777587890626, 0.13143650817871094, 0.13108837890625, 0.129544189453125, 0.1289581756591797, 0.13014653015136718, 0.13153286743164064, 0.13238636779785157, 0.13170938110351563, 0.13111488342285157, 0.13149606323242188, 0.12948419189453125, 0.13031024169921876, 0.13073049926757813, 0.132927490234375, 0.13182879638671874, 0.13113235473632812, 0.13153689575195313, 0.13080342102050782, 0.12925570678710938, 0.1304289245605469, 0.13183193969726562, 0.13213848876953124, 0.13127308654785155, 0.13165568542480469, 0.13163427734375, 0.13061827087402345, 0.13018464660644533, 0.13065887451171876, 0.131842041015625, 0.13173554992675782, 0.13281607055664063, 0.13068780517578124, 0.14088627624511718, 0.12847657775878907, 0.12828726196289061, 0.12817864990234376, 0.1282721252441406, 0.13027101135253907, 0.13066493225097656, 0.13762098693847657, 0.13073458862304688, 0.12884378051757814, 0.12850143432617187, 0.1285246124267578, 0.12947251892089845, 0.13043302917480468, 0.1336376953125, 0.13255520629882814, 0.13028483581542968, 0.1293585205078125, 0.1285509490966797, 0.13010943603515626, 0.1297097930908203, 0.13242396545410157, 0.13242274475097657, 0.13157635498046874, 0.12990882873535156, 0.12923695373535157, 0.12992953491210937, 0.1295474548339844, 0.13100099182128908, 0.13325328063964845, 0.13212608337402343, 0.13156005859375, 0.12980429077148437, 0.12916940307617186, 0.12946636962890626, 0.13043096923828126, 0.13245216369628907, 0.13192965698242187, 0.13177468872070314, 0.13058908081054688, 0.13014630126953125, 0.1291673583984375, 0.1298303680419922, 0.13090255737304687, 0.13227801513671875, 0.13193855285644532, 0.13237452697753907, 0.13075833129882813, 0.13074464416503906, 0.12919375610351563, 0.1314959411621094, 0.13160418701171875, 0.13327615356445313, 0.13225779724121095, 0.13110067749023438, 0.13025074768066405, 0.13086309814453126, 0.13005413818359374, 0.13144677734375, 0.131693603515625, 0.13102998352050782, 0.1325137939453125, 0.1304718780517578, 0.1415303955078125, 0.1285926055908203, 0.12825190734863282, 0.1283656005859375, 0.12828361511230468, 0.1293097229003906, 0.13108460998535157, 0.13580560302734376, 0.13170396423339845, 0.12993417358398437, 0.12859187316894533, 0.12940083312988282, 0.12825907897949218, 0.12942437744140625, 0.13311180114746093, 0.1330708465576172, 0.13054937744140624, 0.12996237182617187, 0.12910797119140624, 0.12851405334472657, 0.12912828063964843, 0.1317111358642578, 0.1323970489501953, 0.13160652160644531, 0.13123487854003907, 0.12992813110351562, 0.12900965881347656, 0.12895436096191407, 0.13127679443359375, 0.13221612548828124, 0.131932861328125, 0.13253631591796874, 0.13065414428710936, 0.13003372192382812, 0.12951962280273438, 0.130702392578125, 0.13098080444335938, 0.13345587158203126, 0.13215335083007812, 0.13170687866210937, 0.1302650909423828, 0.13012991333007812, 0.12970188903808594, 0.13027122497558594, 0.1325972442626953, 0.13206723022460937, 0.1317255096435547, 0.1314349060058594, 0.13033401489257812, 0.13015315246582032, 0.13065362548828124, 0.13179058837890625, 0.13291807556152344, 0.1315450897216797, 0.1315691223144531, 0.13074858093261718, 0.13038812255859375, 0.12999293518066407, 0.13138534545898437, 0.13204071044921875, 0.1320028533935547, 0.1316414031982422, 0.13171580505371094, 0.14180744934082032, 0.12851321411132813, 0.12808647155761718, 0.12903887939453124, 0.12826214599609376, 0.12901951599121095, 0.13128533935546874, 0.13619609069824218, 0.13124610900878905, 0.1289318389892578, 0.129225830078125, 0.12940176391601563, 0.12878224182128906, 0.1296315155029297, 0.1337425537109375, 0.1322852783203125, 0.13041664123535157, 0.12938380432128907, 0.12946905517578125, 0.12852345275878907, 0.1294569549560547, 0.13196493530273437, 0.13230047607421874, 0.1317952575683594, 0.13015449523925782, 0.12972393798828125, 0.12955859375, 0.12914073181152344, 0.13074269104003905, 0.1320939483642578, 0.1321553955078125, 0.13203660583496094, 0.13028099060058593, 0.12952943420410157, 0.13014309692382814, 0.13009642028808593, 0.13128572082519532, 0.132115966796875, 0.13234768676757813, 0.1316829833984375, 0.12997843933105468, 0.13084262084960938, 0.13017219543457031, 0.13082083129882813, 0.13194444274902345, 0.1323534698486328, 0.13208018493652343, 0.13037158203125, 0.13044940185546874, 0.13119488525390624, 0.1305432891845703, 0.13164166259765625, 0.13235784912109375, 0.13192218017578125, 0.13110176086425782, 0.13091119384765626, 0.13022547912597657, 0.13100898742675782, 0.13138665771484376, 0.1316790466308594, 0.13319180297851563, 0.1308078155517578, 0.1314009246826172]",tokens/s,7.651132009941381,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,2132.529152,11826.823168,0.0,11431.575552,10953.091072,s,1,22.00828515625,22.00828515625,0.0,22.00828515625,22.00828515625,22.00828515625,22.00828515625,[22.00828515625],,kWh,0.0004223264299625043,4.657848849772365e-05,0.0001591851273479994,0.0006280900458082273,,MB,1606.619136,12722.307072,0.0,12314.476544,11624.128512,s,10,18.991810424804687,1.899181042480469,0.006285078080570184,1.9012791137695313,1.9044025756835938,1.9050386291503907,1.9055474719238281,"[1.883064453125, 1.8956903076171876, 1.8957227783203126, 1.8987581787109375, 1.89982373046875, 1.9027657470703125, 1.9033148193359375, 1.90426123046875, 1.9027344970703124, 1.9056746826171875]",tokens/s,134.79494280631894,kWh,5.527992636249715e-05,6.09700754509649e-06,3.678297387080254e-05,9.815990777839619e-05,tokens/kWh,2607989.410278791,MB,1610.563584,12724.404224,0.0,12316.573696,11624.131072,s,10,93.7228251953125,9.37228251953125,0.025483996830139362,9.3828662109375,9.3948287109375,9.39677373046875,9.39832974609375,"[9.3171259765625, 9.34112109375, 9.3562275390625, 9.366119140625, 9.38015625, 9.385576171875, 9.390072265625, 9.394396484375, 9.3933115234375, 9.39871875]",tokens/s,6.721948454788035,kWh,0.0002742070814204203,3.024713545027438e-05,0.00018208203455439553,0.00048653625142509014,tokens/kWh,129486.75420478886,,s,630,93.71858575439465,0.14875965992761037,0.0018670747989621259,0.14857625579833983,0.15037186431884766,0.1509479766845703,0.15800266555786133,"[0.1578668212890625, 0.14541209411621095, 0.1457986602783203, 0.14605926513671874, 0.1453059539794922, 0.1458253173828125, 0.15310092163085937, 0.14805197143554688, 0.14697065734863282, 0.14716067504882813, 0.1450806427001953, 0.14627027893066405, 0.14753695678710937, 0.1493697967529297, 0.14764236450195312, 0.1489152069091797, 0.14611293029785155, 0.14620063781738282, 0.14700703430175782, 0.14798915100097657, 0.14824479675292968, 0.14863360595703126, 0.14670236206054688, 0.1462451171875, 0.14616213989257812, 0.1480745849609375, 0.1482296600341797, 0.1492954864501953, 0.1476485137939453, 0.14700953674316405, 0.14799667358398438, 0.1458524169921875, 0.14820358276367188, 0.14860841369628905, 0.1489126739501953, 0.14828134155273437, 0.14825471496582032, 0.14572134399414063, 0.14747238159179688, 0.1499832305908203, 0.14793516540527343, 0.14889718627929688, 0.1484554901123047, 0.1467065887451172, 0.14656761169433594, 0.14847109985351561, 0.1483701171875, 0.1493987274169922, 0.14844192504882814, 0.14801837158203124, 0.14762471008300782, 0.14702188110351563, 0.14848345947265626, 0.14958409118652344, 0.14734915161132814, 0.14943206787109375, 0.14713548278808594, 0.14738394165039062, 0.1484449920654297, 0.14857887268066405, 0.14798460388183593, 0.1500403594970703, 0.1471443786621094, 0.157739013671875, 0.14625526428222657, 0.14730096435546874, 0.14523155212402344, 0.14586297607421875, 0.14646885681152344, 0.15389430236816407, 0.14810812377929689, 0.14865177917480468, 0.14545510864257813, 0.1459568634033203, 0.14678118896484374, 0.14951321411132812, 0.14969378662109376, 0.14967196655273438, 0.14652275085449218, 0.146429443359375, 0.14754412841796874, 0.14606150817871094, 0.14944248962402343, 0.15077203369140624, 0.14768975830078124, 0.14713536071777344, 0.14652297973632813, 0.1462025604248047, 0.1475850830078125, 0.14964041137695314, 0.14925868225097655, 0.14944496154785156, 0.14662185668945313, 0.1462395477294922, 0.14750338745117186, 0.1484638671875, 0.1490513916015625, 0.15031826782226562, 0.14733544921875, 0.14775144958496095, 0.14702386474609375, 0.14782669067382812, 0.148791015625, 0.14926876831054686, 0.14798643493652344, 0.1500037078857422, 0.14678016662597657, 0.14770314025878906, 0.14914802551269532, 0.1483410186767578, 0.14852272033691405, 0.14906600952148438, 0.14852854919433595, 0.14907656860351562, 0.14708122253417968, 0.14872735595703124, 0.1490979766845703, 0.1485731201171875, 0.14914968872070314, 0.14837554931640626, 0.14685600280761718, 0.14935443115234376, 0.14855081176757812, 0.1491198425292969, 0.14914764404296876, 0.1484083251953125, 0.15698739624023436, 0.1463190155029297, 0.14816015625, 0.1452280578613281, 0.1454698486328125, 0.14778080749511718, 0.15338783264160155, 0.148872314453125, 0.14789926147460938, 0.14662246704101561, 0.14626815795898437, 0.14814002990722655, 0.14752153015136718, 0.15078604125976564, 0.1497046661376953, 0.14700108337402343, 0.14695452880859375, 0.14835098266601562, 0.1465528259277344, 0.14892031860351562, 0.15001365661621094, 0.148453125, 0.14939535522460937, 0.1458305206298828, 0.147615234375, 0.1497912292480469, 0.14783247375488281, 0.1486422119140625, 0.15010809326171876, 0.14765802001953124, 0.14784707641601563, 0.14703085327148438, 0.14873190307617187, 0.15023922729492187, 0.1482073211669922, 0.14931936645507812, 0.14695074462890625, 0.1481719055175781, 0.14858438110351563, 0.14817170715332031, 0.14839132690429688, 0.15061024475097656, 0.14844342041015626, 0.14807472229003907, 0.14878448486328125, 0.147706298828125, 0.14908546447753906, 0.1482872314453125, 0.1488506622314453, 0.15016563415527343, 0.14749990844726563, 0.14795529174804686, 0.14851461791992188, 0.1486037139892578, 0.14991500854492187, 0.14820147705078124, 0.1487445068359375, 0.14731590270996095, 0.147917724609375, 0.14872108459472655, 0.14986502075195313, 0.1490370635986328, 0.1496309814453125, 0.15805815124511718, 0.14560710144042968, 0.1467632598876953, 0.14735002136230468, 0.14718975830078124, 0.146513916015625, 0.15251251220703124, 0.14778976440429686, 0.1474512023925781, 0.14939622497558594, 0.14654197692871093, 0.14752616882324218, 0.1492071075439453, 0.14929075622558594, 0.1484126739501953, 0.1469706268310547, 0.14818861389160157, 0.14991593933105468, 0.147013916015625, 0.1480990753173828, 0.15048854064941405, 0.14776963806152343, 0.14847821044921874, 0.1482629089355469, 0.14788995361328125, 0.14904920959472656, 0.14798883056640624, 0.14853453063964844, 0.15028915405273438, 0.14759730529785156, 0.14849842834472657, 0.14829977416992188, 0.14836531066894532, 0.1494530487060547, 0.14814183044433593, 0.1485455322265625, 0.14985011291503905, 0.1477181396484375, 0.14847795104980469, 0.14866998291015626, 0.14788861083984375, 0.14911692810058594, 0.14912307739257813, 0.1486840057373047, 0.14943650817871093, 0.14737274169921874, 0.14937692260742189, 0.14788009643554687, 0.14844511413574218, 0.15067123413085937, 0.148555908203125, 0.14955711364746094, 0.1474397430419922, 0.14872157287597657, 0.15056291198730468, 0.14832025146484376, 0.1486929931640625, 0.14895513916015626, 0.1483014678955078, 0.15027235412597656, 0.14828924560546874, 0.14915525817871095, 0.14872198486328125, 0.15993215942382813, 0.14751708984375, 0.14737619018554687, 0.14643898010253906, 0.14692965698242189, 0.14870527648925783, 0.15146803283691407, 0.1508106231689453, 0.14653599548339843, 0.14792678833007813, 0.14833261108398438, 0.14645660400390625, 0.1486231689453125, 0.15122511291503907, 0.14828544616699219, 0.14971856689453125, 0.14693621826171874, 0.14740486145019532, 0.14841036987304687, 0.1483357391357422, 0.15048361206054686, 0.14838829040527343, 0.1484613494873047, 0.14987858581542968, 0.14722682189941405, 0.1478544616699219, 0.1494486083984375, 0.1489008331298828, 0.15049955749511718, 0.14792291259765625, 0.14841976928710937, 0.1482860870361328, 0.1481871337890625, 0.15021670532226564, 0.14863526916503905, 0.14864012145996094, 0.14857420349121095, 0.14819532775878907, 0.14851890563964842, 0.1487337646484375, 0.1484781494140625, 0.1506299591064453, 0.14853575134277344, 0.14922752380371093, 0.14804893493652344, 0.14816458129882812, 0.15020541381835936, 0.1488690185546875, 0.14906381225585938, 0.14823628234863281, 0.14857830810546874, 0.1500584259033203, 0.1488143310546875, 0.14919686889648437, 0.14810931396484375, 0.1489304656982422, 0.14990911865234374, 0.1488056640625, 0.14905699157714844, 0.14861817932128907, 0.14947535705566406, 0.14953855895996093, 0.14837910461425782, 0.16095333862304687, 0.14748570251464843, 0.14649932861328124, 0.14717747497558595, 0.14834857177734376, 0.1456494445800781, 0.15342060852050782, 0.14872166442871093, 0.1476091766357422, 0.14962693786621092, 0.14657160949707032, 0.1472184295654297, 0.14970675659179689, 0.14991352844238282, 0.1497290496826172, 0.14698687744140626, 0.14824412536621093, 0.1495392608642578, 0.14683782958984376, 0.14890412902832031, 0.14968608093261718, 0.14815635681152345, 0.15051712036132814, 0.147901123046875, 0.14726499938964843, 0.14922921752929688, 0.14900070190429687, 0.14986483764648437, 0.14855760192871093, 0.14835952758789062, 0.14913690185546874, 0.14779592895507812, 0.1493282928466797, 0.14874188232421875, 0.1486168975830078, 0.14991007995605468, 0.14843904113769532, 0.1491183624267578, 0.14833721923828125, 0.1489775390625, 0.14949392700195313, 0.14821171569824218, 0.14970841979980468, 0.1482710418701172, 0.1488501739501953, 0.14969952392578126, 0.14888345336914063, 0.14876057434082032, 0.14870527648925783, 0.1490145263671875, 0.1499129638671875, 0.14834136962890626, 0.14980709838867187, 0.1486069793701172, 0.14851887512207032, 0.14970474243164061, 0.14883634948730468, 0.15015936279296874, 0.14893670654296876, 0.1485701141357422, 0.14867625427246095, 0.14870358276367188, 0.15068365478515625, 0.15983421325683594, 0.1475067901611328, 0.14824703979492188, 0.14649754333496093, 0.14741299438476563, 0.14839590454101562, 0.15120375061035157, 0.15081289672851564, 0.14701158142089843, 0.14820565795898438, 0.14740879821777345, 0.14737788391113282, 0.14829977416992188, 0.1518011474609375, 0.1477840576171875, 0.14971731567382812, 0.14720643615722656, 0.14742323303222657, 0.14916812133789062, 0.14943846130371094, 0.1512489013671875, 0.14784259033203126, 0.14839447021484375, 0.14763357543945313, 0.14719418334960938, 0.14937635803222657, 0.15040777587890625, 0.14934255981445313, 0.1489320068359375, 0.14788652038574218, 0.1487787170410156, 0.14705094909667968, 0.14965475463867187, 0.15120005798339844, 0.14869961547851562, 0.15063040161132812, 0.1474949188232422, 0.14834072875976562, 0.14789427185058593, 0.150466552734375, 0.15071846008300782, 0.1488343048095703, 0.14881129455566405, 0.1471453399658203, 0.14837539672851563, 0.1502578582763672, 0.14976937866210938, 0.14887388610839844, 0.14923139953613282, 0.14886729431152343, 0.14908758544921874, 0.14841923522949219, 0.1494950714111328, 0.14941462707519532, 0.1492295684814453, 0.14889503479003907, 0.14904595947265625, 0.14950726318359375, 0.14831494140625, 0.14960435485839843, 0.14936679077148438, 0.14997299194335936, 0.14920399475097657, 0.15953305053710937, 0.14723043823242188, 0.14750968933105468, 0.14654812622070312, 0.14725692749023436, 0.148378662109375, 0.15187120056152345, 0.1507197723388672, 0.14664691162109375, 0.14829779052734374, 0.14833689880371093, 0.1466343994140625, 0.14940269470214843, 0.1504930877685547, 0.14905958557128907, 0.14892236328125, 0.14779705810546875, 0.14837446594238282, 0.1477242889404297, 0.14979644775390624, 0.15104365539550782, 0.14844195556640624, 0.1482395477294922, 0.14779061889648437, 0.14775299072265624, 0.14894688415527343, 0.1495347900390625, 0.1491763916015625, 0.14946870422363281, 0.147685791015625, 0.14830738830566406, 0.1484125061035156, 0.14931709289550782, 0.15089881896972657, 0.14942912292480467, 0.14970675659179689, 0.146998779296875, 0.14823065185546874, 0.15011839294433593, 0.1492991943359375, 0.14949334716796875, 0.14953712463378907, 0.14853330993652344, 0.14871128845214843, 0.1482254333496094, 0.14880216979980468, 0.14944674682617187, 0.1495451202392578, 0.150596923828125, 0.14859494018554686, 0.150186279296875, 0.14766831970214844, 0.14924656677246093, 0.14919480895996093, 0.1499402313232422, 0.15055258178710937, 0.14867251586914063, 0.14881190490722657, 0.14751731872558593, 0.14920256042480468, 0.15068588256835938, 0.15008607482910155, 0.15136093139648438, 0.15862098693847657, 0.14686630249023438, 0.1481856689453125, 0.14656716918945312, 0.1464617919921875, 0.14816569519042969, 0.1531820526123047, 0.14896307373046874, 0.1480133056640625, 0.14709735107421876, 0.1475536346435547, 0.14741334533691405, 0.14935478210449218, 0.1525148468017578, 0.14903910827636718, 0.14906777954101563, 0.14587289428710937, 0.14798841857910155, 0.14929046630859374, 0.1497379913330078, 0.14989935302734375, 0.149032958984375, 0.14734693908691407, 0.14878115844726564, 0.14779023742675781, 0.14837350463867188, 0.1515166473388672, 0.14926626586914063, 0.1508338623046875, 0.1469246368408203, 0.14834918212890624, 0.14894966125488282, 0.14850662231445313, 0.1506078643798828, 0.1495548553466797, 0.14880546569824218, 0.14855833435058594, 0.14803135681152343, 0.1485489959716797, 0.14964198303222656, 0.14907725524902343, 0.15048069763183594, 0.14822898864746092, 0.14866233825683595, 0.1474140167236328, 0.14947021484375, 0.15194090270996094, 0.14911509704589843, 0.14987059020996094, 0.14783238220214845, 0.1486299591064453, 0.1496678466796875, 0.14896258544921875, 0.15022265625, 0.14943904113769532, 0.14874453735351562, 0.14895513916015626, 0.1489469451904297, 0.14966989135742187, 0.14976141357421874, 0.1493059844970703, 0.15016978454589844, 0.14903482055664063, 0.15973487854003907, 0.14637324523925782, 0.14639094543457032, 0.14741127014160157, 0.14830918884277344, 0.14672773742675782, 0.15436618041992187, 0.14774864196777343, 0.14762745666503907, 0.14828729248046876, 0.1473115234375, 0.14725436401367187, 0.15095639038085937, 0.1503678741455078, 0.15055474853515624, 0.14770399475097656, 0.1484169921875, 0.14728707885742187, 0.14742947387695313, 0.15051046752929687, 0.15043075561523436, 0.1493553924560547, 0.14874227905273438, 0.14767417907714844, 0.14769244384765626, 0.14814413452148437, 0.14986367797851563, 0.1510996551513672, 0.14797013854980468, 0.14934672546386718, 0.14725030517578125, 0.14796890258789064, 0.1509396514892578, 0.1497681884765625, 0.15033958435058595, 0.14846937561035156, 0.14851036071777343, 0.14906851196289062, 0.14833868408203124, 0.14983576965332032, 0.14883021545410155, 0.15006271362304688, 0.14914802551269532, 0.14860858154296874, 0.14947573852539062, 0.14853289794921876, 0.1492545623779297, 0.14992778015136718, 0.14949594116210937, 0.15028633117675783, 0.14834063720703125, 0.14954505920410155, 0.148279296875, 0.1499334716796875, 0.1506185302734375, 0.14935250854492188, 0.150136962890625, 0.14760557556152343, 0.14960633850097657, 0.14959222412109374, 0.1496629180908203, 0.1509547882080078, 0.14948953247070312]",tokens/s,6.7222525279139616,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 110.12 MiB is free. Process 169070 has 14.63 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 51.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 436.12 MiB is free. Process 175731 has 14.31 GiB memory in use. Of the allocated memory 14.14 GiB is allocated by PyTorch, and 61.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,1171.74272,5037.228032,0.0,4634.70592,4621.451776,s,1,14.017248046875,14.017248046875,0.0,14.017248046875,14.017248046875,14.017248046875,14.017248046875,[14.017248046875],,kWh,0.00019905916797079802,2.1950498733250368e-05,6.75647762739956e-05,0.000288574442978044,,MB,1344.139264,5911.740416,0.0,5496.635392,5336.871424,s,10,9.381483093261718,0.9381483093261718,0.0052608552739976085,0.9388148498535156,0.9424588684082031,0.9425970489501952,0.9427075933837891,"[0.9242491455078125, 0.9354083251953125, 0.9369375610351562, 0.94210546875, 0.937936279296875, 0.9377211303710937, 0.9427352294921875, 0.9422683715820312, 0.9396934204101562, 0.9424281616210938]",tokens/s,272.87796338286097,kWh,2.7226402124618733e-05,3.0025769358617312e-06,1.81140296427273e-05,4.834300870320775e-05,tokens/kWh,5295491.67226353,MB,1387.823104,5911.740416,0.0,5496.635392,5336.873984,s,10,44.90041455078125,4.490041455078126,0.02078786908101345,4.492143310546876,4.5145380859375,4.51455126953125,4.51456181640625,"[4.45762060546875, 4.46370556640625, 4.47185888671875, 4.4781611328125, 4.48135107421875, 4.502935546875, 4.50408837890625, 4.514564453125, 4.51159375, 4.51453515625]",tokens/s,14.031050855610376,kWh,0.0001319513217583023,1.4555179242530698e-05,8.755458266987098e-05,0.00023406108367070397,tokens/kWh,269160.507214576,,s,630,44.897264289855926,0.07126549887278723,0.0016062593425575284,0.07099377822875977,0.07233937225341798,0.07277121505737305,0.08117747116088868,"[0.08214636993408203, 0.07056832122802735, 0.06992720031738281, 0.06955622100830078, 0.06930223846435547, 0.06932691192626952, 0.07043714904785156, 0.06996371459960937, 0.06969548797607422, 0.07021071624755859, 0.06988428497314453, 0.06931283569335937, 0.0699345932006836, 0.07270467376708985, 0.0723329620361328, 0.07126582336425781, 0.07043360137939453, 0.07017078399658203, 0.06964822387695313, 0.06995097351074218, 0.0706012191772461, 0.06988800048828125, 0.06964444732666016, 0.06980592346191407, 0.07027871704101563, 0.06983468627929687, 0.07130915069580078, 0.07179507446289063, 0.07138333129882812, 0.07097865295410156, 0.07072454071044922, 0.07038579559326172, 0.07043875122070313, 0.07036275482177734, 0.07013426971435546, 0.06976831817626954, 0.06990300750732421, 0.07062537384033203, 0.07014399719238282, 0.0708647689819336, 0.07128844451904297, 0.07132825469970704, 0.07147519683837891, 0.07083622741699219, 0.07126127624511719, 0.07061090850830078, 0.07077983856201171, 0.07093641662597656, 0.07054470062255859, 0.07043363189697266, 0.07023001861572266, 0.07124281311035156, 0.07052992248535156, 0.07170496368408204, 0.07093548583984376, 0.07118515014648437, 0.07137078094482421, 0.07116966247558594, 0.07073827362060547, 0.07096889495849609, 0.0709986572265625, 0.07060054779052734, 0.07054700469970702, 0.08119439697265625, 0.07060540771484375, 0.06990217590332032, 0.06957209777832031, 0.069493408203125, 0.0695173110961914, 0.06975897979736329, 0.07006208038330078, 0.06951936340332031, 0.06935475158691407, 0.07020211029052735, 0.06951321411132813, 0.07071324920654297, 0.07357449340820313, 0.07270333099365234, 0.07138575744628907, 0.07054252624511718, 0.07004780578613282, 0.06967295837402344, 0.06985616302490234, 0.07028508758544921, 0.06976313781738282, 0.0700047378540039, 0.07044271850585937, 0.07007395172119141, 0.06980473327636719, 0.07083401489257812, 0.07245155334472657, 0.07219884490966796, 0.07211203002929688, 0.0717674560546875, 0.07054592132568359, 0.07018515014648438, 0.07008048248291016, 0.07062735748291016, 0.07002025604248047, 0.07044950103759766, 0.07038963317871094, 0.07046617889404297, 0.07050870513916016, 0.07171379089355469, 0.07149040222167968, 0.07239190673828125, 0.0715865249633789, 0.07091584014892578, 0.07045318603515625, 0.07047615814208984, 0.07118409729003906, 0.07057027435302735, 0.06994944000244141, 0.07048799896240235, 0.07009506988525391, 0.0704776611328125, 0.070698974609375, 0.07184925079345703, 0.07185081481933593, 0.07206668853759765, 0.07136492919921875, 0.07069856262207032, 0.07045164489746093, 0.07117132568359374, 0.07077891540527344, 0.07053724670410157, 0.08113603210449219, 0.07055206298828125, 0.07010508728027344, 0.06952451324462891, 0.07062828826904297, 0.07012764739990235, 0.06956646728515625, 0.06957868957519531, 0.07009286499023437, 0.06981581115722656, 0.07028947448730469, 0.0695607681274414, 0.07073792266845703, 0.07359510040283203, 0.07241641235351562, 0.07120140838623047, 0.0709421157836914, 0.07030435180664063, 0.06977126312255859, 0.06964390563964844, 0.07114790344238281, 0.07020655822753906, 0.07044802856445312, 0.06997615814208985, 0.07006134033203125, 0.07058303833007812, 0.07171881866455078, 0.07226979064941406, 0.07175065612792969, 0.07161913299560548, 0.07144662475585938, 0.0706949462890625, 0.07021417236328124, 0.07042845153808594, 0.0707192611694336, 0.07048623657226563, 0.07082109069824219, 0.07050873565673828, 0.07035327911376953, 0.07074428558349609, 0.07141785430908203, 0.07194409942626953, 0.07202323150634765, 0.07201270294189453, 0.07061113739013672, 0.07101945495605469, 0.07058303833007812, 0.07026028442382813, 0.07051526641845703, 0.0711436767578125, 0.07064351654052735, 0.07040755462646485, 0.07099974060058593, 0.07106870269775391, 0.07184349060058594, 0.0719832992553711, 0.07241276550292969, 0.07131523132324219, 0.07104166412353516, 0.07055350494384766, 0.07055165100097656, 0.07058822631835937, 0.0708650894165039, 0.08226515197753906, 0.071000732421875, 0.07042281341552735, 0.0697853775024414, 0.07019747161865235, 0.07010018920898438, 0.0697147216796875, 0.06992829132080078, 0.06964044952392578, 0.06964995574951172, 0.07002729797363282, 0.07084480285644532, 0.07100454711914063, 0.07351920318603515, 0.0727162857055664, 0.07164514923095704, 0.07092192077636719, 0.07034095764160156, 0.07044505310058594, 0.07016448211669922, 0.06971392059326172, 0.06978479766845704, 0.0708815689086914, 0.07023859405517578, 0.06974681854248047, 0.07064575958251954, 0.07191331481933594, 0.07231504058837891, 0.07221862030029297, 0.07201900482177734, 0.0711994857788086, 0.07069305419921874, 0.070508544921875, 0.07023616027832032, 0.07009254455566406, 0.06985958099365235, 0.07106073760986328, 0.07050342559814453, 0.07040150451660156, 0.0706728286743164, 0.07173001861572266, 0.07200601959228516, 0.07197145843505859, 0.07152611541748047, 0.07143443298339844, 0.07104694366455078, 0.07161885070800782, 0.07073590087890624, 0.07050351715087891, 0.07047670745849609, 0.0710843505859375, 0.07048105621337891, 0.07054137420654297, 0.07113168334960937, 0.07181622314453125, 0.0720208969116211, 0.07158930969238281, 0.07130361938476562, 0.0711943359375, 0.07193987274169922, 0.07143097686767579, 0.07068367767333984, 0.07060765075683594, 0.08222509002685546, 0.07104889678955079, 0.06995897674560547, 0.06984134674072266, 0.06998483276367187, 0.06953984069824219, 0.06980812835693359, 0.0701112289428711, 0.06982806396484376, 0.07022998046875, 0.06993977355957032, 0.07057965087890625, 0.071159423828125, 0.07402137756347656, 0.07232502746582031, 0.07119312286376953, 0.07070476531982423, 0.07024668884277344, 0.07129535675048829, 0.07042633819580078, 0.07019725036621094, 0.06995782470703125, 0.07122720336914062, 0.0702852783203125, 0.06999657440185547, 0.07072895812988281, 0.07226652526855469, 0.07207933044433594, 0.07203424072265625, 0.07121414184570313, 0.07080989074707031, 0.07113187408447266, 0.07138285064697265, 0.07034899139404296, 0.07009279632568359, 0.07053635406494141, 0.07045616149902344, 0.07014809417724609, 0.0706170883178711, 0.07180691528320313, 0.07170873260498047, 0.07239807891845704, 0.07176070404052734, 0.07104067230224609, 0.07157904052734375, 0.0712867202758789, 0.07093138885498047, 0.07046963500976562, 0.07049996948242188, 0.07084038543701172, 0.07121965026855469, 0.07048998260498048, 0.07201382446289062, 0.07142121887207031, 0.07181177520751954, 0.07205590057373047, 0.07187500762939453, 0.07111116790771485, 0.07116595458984375, 0.07128678131103515, 0.07072077178955079, 0.07039871978759765, 0.07118224334716797, 0.0809559326171875, 0.07100729370117187, 0.07051324462890625, 0.06985113525390625, 0.06994290924072266, 0.07004608154296875, 0.07039100646972657, 0.07075714874267579, 0.07047372436523437, 0.07054541015625, 0.07009279632568359, 0.07064575958251954, 0.07183094024658203, 0.07430000305175781, 0.07283916473388671, 0.0714097900390625, 0.07092425537109374, 0.07046662139892577, 0.0703595199584961, 0.07079974365234375, 0.07055750274658203, 0.07057017517089843, 0.07148908996582032, 0.07051718139648437, 0.0709591064453125, 0.07168195343017578, 0.07273686218261718, 0.07254386901855468, 0.07210393524169922, 0.07172748565673828, 0.0712841567993164, 0.07068470764160156, 0.07196521759033203, 0.07076454162597656, 0.07050383758544922, 0.07054918670654296, 0.07058236694335937, 0.07128761291503906, 0.07136685180664062, 0.07219181060791016, 0.07286784362792968, 0.07173529815673828, 0.07162258911132813, 0.07227961730957032, 0.07123538970947266, 0.07149967956542969, 0.07108892822265625, 0.07093862152099609, 0.07114093017578126, 0.07074861145019531, 0.07076863861083985, 0.07240473937988282, 0.07188636779785157, 0.07304656219482422, 0.07235398101806641, 0.07165955352783203, 0.0723351058959961, 0.07139350128173828, 0.07176834869384766, 0.07216432189941406, 0.07124230194091796, 0.07092777252197266, 0.0712609634399414, 0.08091648101806641, 0.07091410827636718, 0.07054351806640625, 0.07002294158935547, 0.07017068481445313, 0.07004668426513672, 0.07011395263671875, 0.07010851287841798, 0.07084537506103515, 0.0704738540649414, 0.07062723541259766, 0.07074201965332032, 0.07184585571289062, 0.07399836730957031, 0.07312134552001953, 0.07206883239746094, 0.07091887664794921, 0.07055372619628907, 0.07026233673095703, 0.0709202880859375, 0.07057635498046876, 0.07041382598876954, 0.07080207824707031, 0.07050822448730469, 0.07055580902099609, 0.07150128173828126, 0.07345206451416016, 0.07291085052490234, 0.07235363006591797, 0.07154704284667969, 0.0711305923461914, 0.07081629180908203, 0.07076454162597656, 0.07158988952636719, 0.07087299346923828, 0.07141999816894531, 0.07101849365234375, 0.07069193267822266, 0.07100035095214843, 0.07217625427246094, 0.07267961883544923, 0.07262802886962891, 0.07275917053222657, 0.07165145874023438, 0.07134822082519532, 0.0710054702758789, 0.0708512954711914, 0.0712640609741211, 0.07116786956787109, 0.07081401824951172, 0.07154710388183594, 0.07094866943359375, 0.07143603515625, 0.07277133178710937, 0.07198767852783203, 0.07231283569335938, 0.07175904083251954, 0.07231520080566406, 0.07164979553222656, 0.07119657897949219, 0.07204386901855468, 0.07117286682128907, 0.07111804962158202, 0.0825099868774414, 0.07117753601074218, 0.07071971130371094, 0.07031874847412109, 0.07078460693359374, 0.07045903778076172, 0.07011382293701172, 0.07094480133056641, 0.07051673889160157, 0.07068262481689454, 0.0709265594482422, 0.07054694366455078, 0.07186255645751953, 0.07399382019042969, 0.07293788909912109, 0.07209149169921875, 0.0715777587890625, 0.07135350036621094, 0.0708878402709961, 0.07134838104248047, 0.07096348571777343, 0.07063334655761719, 0.07080973052978516, 0.07100621032714843, 0.07068409729003906, 0.07107231903076172, 0.07233526611328125, 0.07277107238769531, 0.0721945571899414, 0.07254761505126953, 0.07161529541015625, 0.07164934539794922, 0.0713604507446289, 0.07085584259033204, 0.07077958679199219, 0.07105782318115235, 0.07117990112304687, 0.07095708465576171, 0.07126640319824219, 0.07178854370117188, 0.07230226898193359, 0.07237036895751953, 0.07222198486328125, 0.0723683853149414, 0.07163164520263672, 0.07128656005859375, 0.07211551666259766, 0.07114115142822265, 0.07091705322265625, 0.07139328002929687, 0.07156326293945313, 0.07104467010498047, 0.07167635345458985, 0.071804931640625, 0.07229609680175782, 0.073210205078125, 0.07190528106689453, 0.0723394546508789, 0.07141580963134765, 0.0717127685546875, 0.07112406158447265, 0.07168230438232422, 0.07135465240478515, 0.08324569702148438, 0.07128601837158204, 0.070662109375, 0.0702311019897461, 0.07076236724853516, 0.07041455841064453, 0.07012242889404297, 0.07036524963378907, 0.07075305938720704, 0.07056301116943359, 0.07076777648925782, 0.07050614166259765, 0.07185209655761719, 0.0748625259399414, 0.07318553924560547, 0.07165497589111328, 0.07150614166259765, 0.07072764587402344, 0.07113442993164062, 0.07059126281738282, 0.07065325164794922, 0.07039810943603515, 0.07073436737060547, 0.070204833984375, 0.07149622344970703, 0.07130461120605469, 0.07280502319335938, 0.07266508483886719, 0.07271424102783203, 0.07203868865966796, 0.0710994873046875, 0.07144322967529297, 0.07093222045898437, 0.07086294555664062, 0.0708629150390625, 0.07078701019287109, 0.0709365463256836, 0.07056492614746093, 0.07229519653320313, 0.0714651870727539, 0.07286780548095703, 0.07225363159179687, 0.07188614654541016, 0.0716746597290039, 0.07219094085693359, 0.07177471923828126, 0.07102259063720703, 0.07151427459716797, 0.07098287963867188, 0.07106854248046875, 0.07100214385986328, 0.0713338851928711, 0.07157689666748047, 0.07233936309814454, 0.07268428802490234, 0.07170480346679688, 0.07191324615478516, 0.07198515319824218, 0.07181520080566406, 0.07126934051513673, 0.07171155548095703, 0.0715265884399414, 0.0716759033203125, 0.08157004547119141, 0.07102457427978516, 0.070723388671875, 0.07035603332519531, 0.07088137817382813, 0.07047779083251954, 0.07085148620605469, 0.07056563568115234, 0.07066957092285156, 0.07085356903076172, 0.07045532989501953, 0.07049212646484375, 0.07219830322265625, 0.07426217651367187, 0.07274425506591797, 0.071480224609375, 0.07119033813476562, 0.07127468872070312, 0.07082189178466797, 0.07082592010498047, 0.07079347229003906, 0.0706819839477539, 0.07081619262695313, 0.0708136978149414, 0.0713543701171875, 0.07148544311523437, 0.07342694091796875, 0.0721981430053711, 0.07180902099609375, 0.07150118255615234, 0.07145116424560546, 0.072569091796875, 0.0712143325805664, 0.07145942687988281, 0.07093609619140626, 0.07081571197509766, 0.07096896362304687, 0.0711668472290039, 0.07110041809082031, 0.07235727691650391, 0.07255510711669921, 0.07271206665039062, 0.07198323059082032, 0.07178841400146484, 0.071823486328125, 0.07136870574951172, 0.07190528106689453, 0.07151628875732421, 0.0709468765258789, 0.07192095947265625, 0.07110707092285157, 0.0712499237060547, 0.07174505615234375, 0.07238639831542969, 0.07205919647216796, 0.07215545654296875, 0.07167747497558594, 0.07230716705322265, 0.071731201171875, 0.07166067504882813, 0.07251033782958985, 0.07140966033935547, 0.07098889923095703]",tokens/s,14.032035358161936,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 272.12 MiB is free. Process 85929 has 14.47 GiB memory in use. Of the allocated memory 14.35 GiB is allocated by PyTorch, and 13.83 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 90999 has 14.71 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 28.11 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,883.24096,1044.250624,0.0,641.728512,581.889536,s,1,8.9170947265625,8.9170947265625,0.0,8.9170947265625,8.9170947265625,8.9170947265625,8.9170947265625,[8.9170947265625],,kWh,4.472559825414161e-05,4.926348289683629e-06,1.365806648204182e-05,6.331001302586706e-05,,MB,1297.399808,1165.88544,0.0,750.780416,716.974592,s,10,1.111191452026367,0.1111191452026367,0.0004961361968309897,0.11105427169799806,0.11155762252807618,0.11188476448059081,0.11214647804260254,"[0.11089600372314454, 0.11100220489501954, 0.11119878387451172, 0.11110633850097656, 0.11093138885498047, 0.11221190643310547, 0.11148492431640625, 0.1113694076538086, 0.11080121612548828, 0.11018927764892578]",tokens/s,2303.833417123204,kWh,3.3652509350098575e-06,3.709206684147668e-07,2.2352795660001552e-06,5.97145116942478e-06,tokens/kWh,42870651.159433335,MB,1310.998528,1314.783232,0.0,899.678208,716.977152,s,10,25.552953857421876,2.5552953857421876,0.02358099213907067,2.5494432373046876,2.5858733154296876,2.589494519042969,2.5923914819335936,"[2.534916259765625, 2.59311572265625, 2.585068603515625, 2.58089453125, 2.566125244140625, 2.558412841796875, 2.539067138671875, 2.529815673828125, 2.525064208984375, 2.5404736328125]",tokens/s,24.654683897416266,kWh,7.363953531207328e-05,8.12257188611828e-06,3.0006607338596353e-05,0.0001117687145367879,tokens/kWh,563663.9936416553,,s,630,25.546632228851326,0.04055020988706559,0.0007195118308451369,0.04036038398742676,0.0415007926940918,0.04178554801940918,0.04327999259948731,"[0.04045849609375, 0.04036995315551758, 0.0401767692565918, 0.03993280029296875, 0.03995647811889649, 0.04266783905029297, 0.040359935760498046, 0.03991980743408203, 0.04005219268798828, 0.04000412750244141, 0.039929279327392576, 0.03994441604614258, 0.03975203323364258, 0.03994339370727539, 0.04007196807861328, 0.04007955169677734, 0.04081439971923828, 0.04009164810180664, 0.03994809722900391, 0.03997510528564453, 0.04013260650634766, 0.0404106559753418, 0.04031660842895508, 0.04031296157836914, 0.04006550216674805, 0.03996652984619141, 0.039930240631103515, 0.039923713684082034, 0.03977142333984375, 0.040102046966552736, 0.03972902297973633, 0.03987286376953125, 0.03980892944335938, 0.040204734802246095, 0.040404705047607424, 0.04102787017822265, 0.03993600082397461, 0.03983491134643555, 0.039856864929199216, 0.03995964813232422, 0.03978947067260742, 0.03975897598266601, 0.04015951919555664, 0.039847969055175785, 0.03988947296142578, 0.03982342529296875, 0.040230846405029295, 0.040089599609375, 0.04006047821044922, 0.03993849563598633, 0.04049676895141602, 0.04028057479858398, 0.040040321350097656, 0.03993600082397461, 0.03995785522460937, 0.04005913543701172, 0.04124492645263672, 0.04120371246337891, 0.04119184112548828, 0.04097222518920898, 0.04166022491455078, 0.04078182220458984, 0.04074086380004883, 0.041371646881103515, 0.04100908660888672, 0.0403988151550293, 0.04039395141601562, 0.0404161262512207, 0.04031260681152344, 0.04025551986694336, 0.04013462448120117, 0.04015238571166992, 0.041027904510498044, 0.04145318222045898, 0.04124796676635742, 0.041301761627197266, 0.041783294677734374, 0.041528511047363284, 0.04257689666748047, 0.04307334518432617, 0.04175817489624024, 0.04082947158813476, 0.04110089492797851, 0.041260543823242186, 0.04068239974975586, 0.04094950485229492, 0.04153523254394531, 0.040624095916748044, 0.04146435165405273, 0.04064614486694336, 0.04041334533691406, 0.04049955368041992, 0.04096745681762695, 0.0418897590637207, 0.0416181755065918, 0.040979969024658204, 0.041121761322021486, 0.04098860931396484, 0.04358950424194336, 0.042963680267333985, 0.04186956787109375, 0.04114102554321289, 0.04105052947998047, 0.04128211212158203, 0.04154556655883789, 0.04128988647460938, 0.04070207977294922, 0.040796031951904295, 0.0408853759765625, 0.040796321868896486, 0.040721118927001955, 0.04066934585571289, 0.040759136199951175, 0.04063030242919922, 0.040564510345458986, 0.04104825592041016, 0.04088627243041992, 0.04093657684326172, 0.04138278579711914, 0.041176128387451175, 0.04151504135131836, 0.04108380889892578, 0.04119571304321289, 0.0411192626953125, 0.041606689453125, 0.04144614410400391, 0.04160700988769531, 0.0417380485534668, 0.04230899047851563, 0.041708160400390625, 0.04164396667480469, 0.04174182510375977, 0.04139235305786133, 0.041503326416015625, 0.04190972900390625, 0.041051456451416016, 0.041503807067871094, 0.04136748886108398, 0.04111561584472656, 0.041046241760253906, 0.0409876480102539, 0.04108303833007813, 0.04044416046142578, 0.04046499252319336, 0.040166751861572265, 0.041511646270751955, 0.04090185546875, 0.04141516876220703, 0.04127743911743164, 0.04131020736694336, 0.041323585510253905, 0.04099168014526367, 0.04157571029663086, 0.04137993621826172, 0.04093113708496094, 0.04104070281982422, 0.04082640075683594, 0.04049555206298828, 0.04062192153930664, 0.040392894744873044, 0.04025766372680664, 0.040533374786376954, 0.040571392059326174, 0.040752574920654296, 0.042117439270019534, 0.040608287811279294, 0.04029820632934571, 0.04033126449584961, 0.04052089691162109, 0.04037039947509766, 0.041793312072753906, 0.0410261116027832, 0.040638721466064454, 0.04043065643310547, 0.040511775970458984, 0.040452896118164064, 0.04053932952880859, 0.04062678527832031, 0.04029654312133789, 0.04037017440795899, 0.040275966644287106, 0.040142528533935545, 0.039904926300048826, 0.04092614364624023, 0.04226387023925781, 0.041689502716064454, 0.041643489837646486, 0.041880096435546875, 0.04178739166259766, 0.04149459075927735, 0.04121846389770508, 0.04038975906372071, 0.043404064178466796, 0.04127660751342774, 0.040378559112548826, 0.04019878387451172, 0.04082886505126953, 0.040734783172607425, 0.040654624938964844, 0.04045606231689453, 0.04040963363647461, 0.04021052932739258, 0.04022857666015625, 0.04081190490722656, 0.04464915084838867, 0.041172927856445315, 0.040521728515625, 0.040336734771728514, 0.041025920867919924, 0.040804447174072264, 0.04118918228149414, 0.040958335876464844, 0.040785919189453124, 0.040681472778320314, 0.04087839889526367, 0.04127436828613281, 0.04134902572631836, 0.04174515151977539, 0.04220111846923828, 0.04183052825927734, 0.04162572860717773, 0.040675071716308596, 0.04118844985961914, 0.04027276611328125, 0.04045008087158203, 0.040311935424804685, 0.04033164978027344, 0.040892318725585936, 0.040251998901367186, 0.04147398376464844, 0.04041286468505859, 0.04055855941772461, 0.04038288116455078, 0.040431648254394534, 0.040603103637695315, 0.04057548904418945, 0.040688865661621096, 0.04029859161376953, 0.040402912139892576, 0.040525791168212894, 0.04023583984375, 0.04059686279296875, 0.040712223052978516, 0.04091059112548828, 0.041683456420898435, 0.041560001373291015, 0.0415522232055664, 0.04157235336303711, 0.04141881561279297, 0.04187334442138672, 0.04086723327636719, 0.04083158493041992, 0.04082099151611328, 0.04079999923706055, 0.04071247863769531, 0.041352577209472656, 0.04138336181640625, 0.04128374481201172, 0.04150758361816406, 0.04143718338012695, 0.040718143463134765, 0.040538272857666015, 0.04076752090454101, 0.04055449676513672, 0.04043977737426758, 0.04057500839233399, 0.04096361541748047, 0.041086814880371095, 0.04104211044311523, 0.04141100692749024, 0.04088419342041016, 0.04129571151733399, 0.04067756652832031, 0.04057241439819336, 0.041972225189208984, 0.04119327926635742, 0.04042361450195313, 0.040804351806640625, 0.040738815307617186, 0.04065184020996094, 0.04131702423095703, 0.04062236785888672, 0.040384159088134766, 0.0410074577331543, 0.04182022476196289, 0.04236076736450195, 0.04158687973022461, 0.04169504165649414, 0.04125408172607422, 0.040753982543945313, 0.040597503662109374, 0.04159660720825195, 0.040825153350830076, 0.040400894165039065, 0.04006252670288086, 0.03989958572387695, 0.040302593231201174, 0.04016742324829101, 0.04003132629394531, 0.04006390380859375, 0.03998054504394531, 0.040030017852783206, 0.0399031982421875, 0.03989168167114258, 0.039897087097167966, 0.039890815734863284, 0.03991487884521484, 0.03981593704223633, 0.039927806854248044, 0.0400076789855957, 0.039757823944091795, 0.03978403091430664, 0.04020470428466797, 0.04098252868652344, 0.04204512023925781, 0.04313840103149414, 0.04139187240600586, 0.04098345565795898, 0.04093302536010742, 0.042893150329589846, 0.041263614654541016, 0.040953857421875, 0.040095073699951175, 0.03976668930053711, 0.04001788711547852, 0.039952415466308594, 0.03980492782592773, 0.039720863342285154, 0.03989718246459961, 0.04003424072265625, 0.040496257781982424, 0.04052304077148437, 0.042355392456054686, 0.04027081680297852, 0.04131536102294922, 0.040217567443847656, 0.039982593536376954, 0.040452224731445316, 0.040419391632080075, 0.04053971099853516, 0.04065971374511719, 0.0401710090637207, 0.04031324768066406, 0.04015625762939453, 0.04018380737304687, 0.04025609588623047, 0.04011254501342774, 0.0398961296081543, 0.0400516471862793, 0.04028006362915039, 0.04022195053100586, 0.04023270416259766, 0.040188926696777344, 0.04084870529174805, 0.04025619125366211, 0.04004767990112305, 0.040006591796875, 0.04396236801147461, 0.04029574584960938, 0.04104579162597656, 0.04036854553222656, 0.040056705474853516, 0.04042544174194336, 0.03997743988037109, 0.04115065765380859, 0.04080047988891602, 0.040336769104003904, 0.040264095306396484, 0.039946239471435545, 0.03992959976196289, 0.039981311798095706, 0.0404189453125, 0.040035774230957034, 0.040870849609375, 0.0400445442199707, 0.04066041564941406, 0.0414664306640625, 0.04440063858032227, 0.040241886138916015, 0.040255584716796876, 0.0400873908996582, 0.040661151885986326, 0.040921089172363284, 0.04035737609863281, 0.03995084762573242, 0.03990937423706055, 0.040583168029785156, 0.040515712738037106, 0.040837024688720705, 0.040429374694824216, 0.04064259338378906, 0.03993183898925781, 0.04030892944335938, 0.040281696319580076, 0.04015923309326172, 0.04003023910522461, 0.03990070343017578, 0.04057379150390625, 0.03993600082397461, 0.039831550598144534, 0.04007126235961914, 0.04048031997680664, 0.040673633575439454, 0.04051254272460938, 0.0401192626953125, 0.040323070526123043, 0.03984812927246094, 0.03992966461181641, 0.03985203170776367, 0.03999071884155273, 0.04050739288330078, 0.04076188659667969, 0.03983977508544922, 0.04028364944458008, 0.04035174560546875, 0.04000105667114258, 0.040581119537353515, 0.039916481018066406, 0.04024528121948242, 0.040054656982421874, 0.039886974334716795, 0.03985212707519531, 0.03989084625244141, 0.03995401763916016, 0.04043619155883789, 0.04130604934692383, 0.04092870330810547, 0.04113875198364258, 0.04036614227294922, 0.04055830383300781, 0.04199446487426758, 0.04023251342773437, 0.040015487670898436, 0.04020633697509766, 0.04060979080200195, 0.04026451110839844, 0.039839744567871094, 0.03988703918457031, 0.03973289489746094, 0.0408004150390625, 0.03993708801269531, 0.040526752471923826, 0.04032921600341797, 0.04007030487060547, 0.039873374938964846, 0.03990937423706055, 0.03978803253173828, 0.03983603286743164, 0.03968163299560547, 0.03990377426147461, 0.039736446380615235, 0.0400937614440918, 0.0402327995300293, 0.03963536071777344, 0.039921886444091795, 0.04064700698852539, 0.041009151458740234, 0.040459808349609376, 0.04056947326660156, 0.0401611213684082, 0.040877376556396484, 0.04036083221435547, 0.04056454467773438, 0.04053782272338867, 0.040722782135009766, 0.040148128509521486, 0.0400530891418457, 0.03991801452636719, 0.04002816009521484, 0.03981721496582031, 0.03998006439208984, 0.040131614685058596, 0.0402042236328125, 0.040072574615478515, 0.040187614440917965, 0.03991651153564453, 0.03992160034179688, 0.04001574325561524, 0.03988835144042969, 0.041509056091308595, 0.040104286193847656, 0.039800960540771486, 0.03988681411743164, 0.039868446350097654, 0.039923934936523436, 0.040013118743896486, 0.040034847259521486, 0.04000470352172852, 0.04023139190673828, 0.040034496307373046, 0.039966911315917966, 0.039880702972412106, 0.03993206405639648, 0.039829345703125, 0.03987251281738281, 0.040465503692626956, 0.04040284729003906, 0.04017772674560547, 0.040161502838134765, 0.040941761016845706, 0.040094337463378905, 0.04008086395263672, 0.04034400177001953, 0.03998720169067383, 0.040013248443603516, 0.04031903839111328, 0.04005337524414063, 0.03989894485473633, 0.03991759872436523, 0.039884063720703126, 0.03991628646850586, 0.04014518356323242, 0.039995105743408206, 0.03984588623046875, 0.03968345642089844, 0.03970140838623047, 0.03971372985839844, 0.03985251235961914, 0.03992563247680664, 0.04013510513305664, 0.040382110595703125, 0.04028246307373047, 0.03986182403564453, 0.040599998474121095, 0.039847934722900394, 0.0397496337890625, 0.03981452941894531, 0.04008937454223633, 0.03996662521362305, 0.03986508941650391, 0.039678142547607424, 0.03972431945800781, 0.03988057708740234, 0.039666526794433596, 0.03973734283447266, 0.040016929626464845, 0.039717857360839846, 0.040033760070800783, 0.04, 0.039906494140625, 0.03978294372558594, 0.03981686401367188, 0.039759838104248046, 0.0398375358581543, 0.03975571060180664, 0.03979679870605469, 0.04068387222290039, 0.039843551635742186, 0.0400043830871582, 0.03986995315551758, 0.03984435272216797, 0.04017552185058594, 0.043337825775146485, 0.04083280181884766, 0.040139198303222656, 0.04037152099609375, 0.03986284637451172, 0.03994204711914062, 0.03977769470214844, 0.04072934341430664, 0.041500511169433596, 0.04041113662719727, 0.0399749755859375, 0.04022268676757813, 0.03993596649169922, 0.040562496185302735, 0.03993824005126953, 0.040057376861572264, 0.040169471740722655, 0.04205567932128906, 0.043640830993652346, 0.04045734405517578, 0.04037823867797852, 0.040151679992675784, 0.04002444839477539, 0.04030022430419922, 0.040329536437988284, 0.040079360961914064, 0.03985184097290039, 0.03987065505981445, 0.03982662582397461, 0.04011814498901367, 0.04015955352783203, 0.04034828948974609, 0.04010371017456055, 0.03997513580322266, 0.041373695373535156, 0.04019196701049805, 0.040153343200683596, 0.04005660629272461, 0.04005817413330078, 0.04022249603271484, 0.04014531326293945, 0.04025932693481445, 0.040274688720703125, 0.03982950210571289, 0.040135871887207034, 0.03998764801025391, 0.03990390396118164, 0.04007702255249023, 0.04009369659423828, 0.04011212921142578, 0.04062822341918945, 0.04012428665161133, 0.03998527908325195, 0.040683521270751956, 0.040130561828613284, 0.03987830352783203, 0.040212833404541015, 0.04002352142333984, 0.040329536437988284, 0.040355262756347654, 0.04030748748779297, 0.040390655517578124, 0.040630271911621094, 0.040381919860839846, 0.0403111686706543, 0.04019625473022461, 0.0401715202331543, 0.040054271697998044, 0.03993065643310547, 0.04039379119873047, 0.04086547088623047, 0.04066726303100586, 0.04031983947753906, 0.04115689468383789, 0.04027363204956055, 0.04037007904052734, 0.04017571258544922, 0.04023295974731445]",tokens/s,24.660784809377095,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 344.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 302.12 MiB is free. Process 160432 has 14.44 GiB memory in use. Of the allocated memory 14.20 GiB is allocated by PyTorch, and 132.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 58.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 54.12 MiB is free. Process 155342 has 14.69 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 203.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,822.530048,8535.277568,0.0,8132.755456,7824.681472,s,1,19.363349609375,19.363349609375,0.0,19.363349609375,19.363349609375,19.363349609375,19.363349609375,[19.363349609375],,kWh,0.00035991572860831034,3.9689742300941714e-05,0.0001184187058459879,0.00051802417675524,,MB,1317.277696,9539.813376,0.0,9124.708352,8500.500992,s,10,17.646905761718752,1.764690576171875,0.007826338434277208,1.766254577636719,1.7710271606445311,1.771575653076172,1.7720144470214845,"[1.744111328125, 1.758710693359375, 1.764703857421875, 1.7649429931640626, 1.764416015625, 1.767566162109375, 1.76920263671875, 1.77022265625, 1.7721241455078125, 1.7709052734375]",tokens/s,145.0679249136912,kWh,5.141618405000069e-05,5.67070418568476e-06,3.420905514500139e-05,9.129594338068683e-05,tokens/kWh,2804067.6345555508,MB,1341.882368,9541.910528,0.0,9126.805504,8500.503552,s,10,83.08388867187502,8.308388867187501,0.013627102147594676,8.31012353515625,8.32286650390625,8.324238916015625,8.325336845703125,"[8.2831162109375, 8.291119140625, 8.296341796875, 8.3096630859375, 8.305431640625, 8.318150390625, 8.310583984375, 8.3213095703125, 8.325611328125, 8.3225615234375]",tokens/s,7.582697561112871,kWh,0.00024312225124250062,2.6818406767784186e-05,0.00016131454571819833,0.00043125520372848305,tokens/kWh,146085.19376768984,,s,630,83.08128448486335,0.13187505473787822,0.0017931074401041948,0.13183060455322265,0.13309696197509766,0.133570947265625,0.1425301985168457,"[0.14336204528808594, 0.13153074645996093, 0.1298570556640625, 0.12978839111328125, 0.13047398376464844, 0.1301360626220703, 0.12976332092285156, 0.13173286437988282, 0.13229116821289064, 0.13057388305664064, 0.1300455322265625, 0.13068582153320313, 0.13145497131347655, 0.13009446716308593, 0.1302487335205078, 0.13198602294921874, 0.13237615966796876, 0.13121133422851564, 0.1311130828857422, 0.13177436828613281, 0.13055007934570312, 0.13019664001464842, 0.13174844360351562, 0.13151068115234374, 0.13099810791015626, 0.13145021057128906, 0.13109111022949219, 0.13215948486328125, 0.13084669494628906, 0.12996528625488282, 0.13167709350585938, 0.13198326110839845, 0.13163725280761718, 0.13153280639648438, 0.13204031372070313, 0.13062384033203125, 0.1300951385498047, 0.13071907043457032, 0.1315681915283203, 0.1315998077392578, 0.13230464172363282, 0.13229557800292968, 0.13136300659179687, 0.13183570861816407, 0.1303166046142578, 0.13028457641601562, 0.13196354675292968, 0.13137100219726563, 0.13180117797851562, 0.13251779174804687, 0.13241661071777344, 0.1307058563232422, 0.1302034912109375, 0.13293411254882812, 0.13179241943359374, 0.1319983367919922, 0.131716796875, 0.1328172149658203, 0.1318154296875, 0.13043302917480468, 0.13119631958007813, 0.13226637268066407, 0.13200611877441407, 0.143204345703125, 0.1316884460449219, 0.12993276977539062, 0.12972067260742187, 0.1299027862548828, 0.13164054870605468, 0.13056224060058594, 0.13329866027832032, 0.13185142517089843, 0.1317809295654297, 0.13043510437011718, 0.12992063903808593, 0.1299640350341797, 0.1303822021484375, 0.13200796508789062, 0.1327744903564453, 0.13241754150390625, 0.13149501037597655, 0.13011651611328126, 0.1298695373535156, 0.12989468383789063, 0.13122969055175782, 0.1325194549560547, 0.13213945007324218, 0.13252511596679686, 0.13087849426269532, 0.13031826782226563, 0.13000090026855468, 0.1309020233154297, 0.13178598022460938, 0.13195062255859374, 0.13282701110839842, 0.13154106140136718, 0.13018540954589844, 0.1319508819580078, 0.13054124450683594, 0.1317091522216797, 0.13170524597167968, 0.13203456115722656, 0.1323469696044922, 0.13075753784179686, 0.13151846313476562, 0.13171705627441407, 0.13039418029785158, 0.13111820983886718, 0.13191871643066405, 0.13304550170898438, 0.1316800994873047, 0.13178562927246093, 0.13167820739746094, 0.13195468139648436, 0.1304637451171875, 0.1308625030517578, 0.13187747192382812, 0.1323520050048828, 0.13158195495605468, 0.1319403533935547, 0.13213081359863282, 0.1316884460449219, 0.1309165802001953, 0.13259344482421875, 0.1328037109375, 0.1321460418701172, 0.14190634155273438, 0.13134243774414062, 0.1298836212158203, 0.12979869079589842, 0.12982441711425782, 0.1298189697265625, 0.13039942932128906, 0.1342779541015625, 0.131917724609375, 0.13192195129394532, 0.1307213134765625, 0.12987242126464843, 0.12984906005859376, 0.13066677856445313, 0.13265087890625, 0.13286566162109376, 0.13177702331542968, 0.131845947265625, 0.13087875366210938, 0.12998133850097657, 0.13000630187988282, 0.13139222717285157, 0.13267088317871092, 0.132008544921875, 0.13263626098632814, 0.13088096618652345, 0.1305855712890625, 0.13189462280273437, 0.13117251586914064, 0.13186099243164062, 0.1329332733154297, 0.13181578063964844, 0.13154304504394532, 0.1301930847167969, 0.13188946533203125, 0.1305518035888672, 0.1319219207763672, 0.13300502014160157, 0.1322183380126953, 0.13236306762695313, 0.13065011596679688, 0.13077667236328125, 0.1317728271484375, 0.13066009521484376, 0.13306002807617187, 0.13196780395507812, 0.13321830749511718, 0.13182550048828126, 0.13065373229980468, 0.13204934692382814, 0.13060319519042968, 0.13144064331054686, 0.13185775756835938, 0.13299778747558594, 0.1323984375, 0.13216015625, 0.1313955841064453, 0.1317061767578125, 0.1303948211669922, 0.13129318237304688, 0.13311180114746093, 0.132117919921875, 0.13223158264160156, 0.143544677734375, 0.1313217010498047, 0.12981468200683594, 0.13000895690917968, 0.12984947204589845, 0.12993280029296875, 0.13051910400390626, 0.1355200958251953, 0.132998046875, 0.1320813446044922, 0.13028108215332032, 0.12986344909667968, 0.12978031921386718, 0.13010105895996094, 0.13377247619628907, 0.13362640380859375, 0.13279075622558595, 0.13153805541992186, 0.1301246795654297, 0.12995123291015626, 0.1299311981201172, 0.13198176574707032, 0.13221490478515624, 0.1337159729003906, 0.13207962036132812, 0.1309388732910156, 0.13003366088867188, 0.13001139831542968, 0.13168336486816407, 0.1325003204345703, 0.13312806701660157, 0.13277743530273436, 0.1318507843017578, 0.13126422119140624, 0.13016444396972657, 0.13003628540039064, 0.13245234680175783, 0.13251484680175782, 0.13305340576171876, 0.13281893920898438, 0.13197500610351562, 0.13076495361328125, 0.13032383728027344, 0.13186111450195312, 0.13207347106933592, 0.13256105041503907, 0.13247811889648436, 0.13285446166992188, 0.13183590698242187, 0.13044122314453124, 0.1310834197998047, 0.13194940185546875, 0.13220672607421874, 0.13203648376464844, 0.13280487060546875, 0.13239033508300782, 0.1323726043701172, 0.1308323516845703, 0.13218380737304689, 0.13299533081054687, 0.13261846923828124, 0.13322409057617188, 0.13294435119628906, 0.14304421997070313, 0.13134477233886718, 0.13023794555664062, 0.1299113006591797, 0.12978346252441406, 0.1299019775390625, 0.13078009033203125, 0.1349376983642578, 0.13267216491699219, 0.13080982971191407, 0.12987411499023438, 0.13001449584960936, 0.12988088989257812, 0.13069517517089843, 0.13310771179199218, 0.1341441650390625, 0.1322125701904297, 0.13060096740722657, 0.12994345092773438, 0.13179498291015626, 0.13028153991699218, 0.13089170837402345, 0.13307090759277343, 0.13284573364257812, 0.13158297729492188, 0.13047439575195313, 0.13139398193359375, 0.13139762878417968, 0.13032652282714843, 0.132421630859375, 0.13237449645996094, 0.13267884826660156, 0.13083120727539063, 0.13158399963378906, 0.13083648681640625, 0.1320201873779297, 0.13136221313476562, 0.13190774536132813, 0.13297235107421876, 0.13188552856445312, 0.13194464111328125, 0.13141996765136718, 0.13190162658691407, 0.13233935546875, 0.1313723907470703, 0.13195376586914062, 0.13183753967285156, 0.1314981689453125, 0.13165904235839843, 0.13252899169921875, 0.13231103515625, 0.13177407836914062, 0.13224179077148437, 0.13288426208496093, 0.13220681762695313, 0.13076480102539062, 0.13314598083496093, 0.13139353942871093, 0.13193075561523437, 0.13202841186523437, 0.13261935424804688, 0.13261302185058593, 0.13199880981445314, 0.14318243408203124, 0.13144454956054688, 0.13009919738769532, 0.129778076171875, 0.13156947326660157, 0.13044956970214844, 0.13171282958984376, 0.13397943115234376, 0.13325946044921874, 0.13134083557128906, 0.1299148864746094, 0.13018931579589843, 0.1317184295654297, 0.1304071350097656, 0.131548828125, 0.13340403747558593, 0.13200236511230468, 0.13047235107421876, 0.13022207641601563, 0.13185420227050781, 0.13176812744140626, 0.13057049560546874, 0.13237254333496093, 0.13268377685546875, 0.13148159790039063, 0.13032585144042969, 0.1329670715332031, 0.13165180969238283, 0.13168966674804689, 0.13182217407226562, 0.13199974060058595, 0.13252931213378907, 0.13171388244628907, 0.1312761993408203, 0.13290147399902344, 0.1317264709472656, 0.13138624572753907, 0.13256454467773438, 0.1320821075439453, 0.13171440124511719, 0.13170140075683595, 0.13277781677246095, 0.13264834594726563, 0.13107685852050782, 0.13311386108398438, 0.13189045715332032, 0.1330501708984375, 0.13226821899414062, 0.1315499267578125, 0.1336269073486328, 0.1316116180419922, 0.13197071838378907, 0.13181167602539062, 0.132834716796875, 0.13242839050292968, 0.13191302490234375, 0.13297254943847656, 0.1325198974609375, 0.13114151000976562, 0.13256358337402344, 0.13150405883789062, 0.13311318969726563, 0.13203756713867187, 0.14307554626464844, 0.13148374938964844, 0.130052001953125, 0.12977561950683594, 0.12978994750976564, 0.1297810516357422, 0.13145993041992188, 0.1358720703125, 0.13280863952636718, 0.13074978637695311, 0.12978012084960938, 0.12980899047851563, 0.12977516174316406, 0.13207391357421874, 0.13401695251464843, 0.13310581970214844, 0.13168374633789062, 0.13159043884277344, 0.13019711303710937, 0.13001280212402344, 0.12990264892578124, 0.1322382354736328, 0.13339033508300782, 0.1324522247314453, 0.13234188842773437, 0.1317560272216797, 0.13034895324707033, 0.1299737548828125, 0.13119754028320313, 0.13287014770507813, 0.13217304992675782, 0.13245516967773438, 0.13263381958007814, 0.13162985229492188, 0.13023638916015626, 0.1309163818359375, 0.13186770629882813, 0.13270710754394532, 0.1325897216796875, 0.1321158447265625, 0.13225798034667968, 0.13136326599121093, 0.13107609558105468, 0.1317417297363281, 0.13295849609375, 0.13201478576660156, 0.13309645080566407, 0.13194650268554686, 0.13230624389648438, 0.13124195861816407, 0.1321267852783203, 0.13171363830566407, 0.13184005737304688, 0.13245053100585938, 0.13272860717773438, 0.132140380859375, 0.13147203063964844, 0.13195161437988281, 0.13210520935058595, 0.13171717834472657, 0.1330298309326172, 0.13210806274414064, 0.13224368286132812, 0.14255532836914062, 0.13150198364257812, 0.1300581512451172, 0.1297407989501953, 0.12979811096191407, 0.129762939453125, 0.1313140869140625, 0.1354895324707031, 0.13285372924804686, 0.13116549682617187, 0.13005398559570314, 0.13001210021972656, 0.12991276550292968, 0.13153468322753906, 0.13397116088867186, 0.13379049682617186, 0.13211782836914063, 0.13096022033691407, 0.12997625732421875, 0.12997978210449218, 0.13207212829589843, 0.1324564514160156, 0.13347634887695312, 0.1323251495361328, 0.13227638244628906, 0.13064198303222657, 0.13011311340332032, 0.13117890930175782, 0.13267881774902343, 0.1340015106201172, 0.13238067626953126, 0.13272816467285156, 0.13203485107421875, 0.13102896118164062, 0.13016233825683593, 0.13136358642578125, 0.13353575134277343, 0.13325222778320311, 0.13269296264648436, 0.13205494689941405, 0.13176368713378905, 0.13218460083007813, 0.1310576629638672, 0.133093017578125, 0.13262217712402344, 0.13324954223632812, 0.13225100708007811, 0.1314370574951172, 0.13147520446777344, 0.13238674926757812, 0.13162950134277343, 0.13316242980957033, 0.1323643798828125, 0.13323721313476564, 0.13201408386230468, 0.13140534973144533, 0.13207600402832032, 0.13188710021972655, 0.1322001647949219, 0.13241343688964843, 0.13359951782226562, 0.13199942016601562, 0.13253453063964843, 0.14246867370605468, 0.13145494079589845, 0.13006463623046874, 0.12988858032226563, 0.12987942504882813, 0.1297681579589844, 0.13288249206542968, 0.13623699951171875, 0.1331955261230469, 0.13142796325683595, 0.13084022521972657, 0.12984214782714842, 0.12985139465332032, 0.13211366271972655, 0.1340648956298828, 0.13347196960449217, 0.1320020751953125, 0.13168435668945314, 0.13094297790527343, 0.13013754272460937, 0.1300198974609375, 0.13323263549804687, 0.13358489990234376, 0.13228195190429687, 0.13265116882324218, 0.1308275909423828, 0.13006658935546875, 0.13043586730957032, 0.13207267761230468, 0.1328873291015625, 0.13237452697753907, 0.13350898742675782, 0.1323245086669922, 0.13124911499023437, 0.13036749267578124, 0.13162495422363282, 0.13256410217285156, 0.13307321166992186, 0.1330386505126953, 0.1322208709716797, 0.13198057556152343, 0.13064408874511718, 0.13152928161621094, 0.1317540740966797, 0.1331015625, 0.13238272094726564, 0.13343046569824218, 0.131887939453125, 0.13213900756835936, 0.13121945190429687, 0.13215904235839843, 0.132291015625, 0.13246975708007813, 0.1329792022705078, 0.1327229766845703, 0.13189126586914063, 0.1320592956542969, 0.1321942138671875, 0.13175613403320313, 0.13251513671875, 0.13355389404296875, 0.132567138671875, 0.133446533203125, 0.1422878723144531, 0.13119500732421874, 0.12993263244628905, 0.12983351135253907, 0.12989645385742188, 0.12986778259277343, 0.1321285400390625, 0.13679624938964843, 0.13272837829589842, 0.13104570007324218, 0.13024691772460936, 0.1298588409423828, 0.13105203247070313, 0.1312617950439453, 0.13388890075683593, 0.13333056640625, 0.13278399658203124, 0.13157017517089845, 0.13045350646972656, 0.13162495422363282, 0.1302118377685547, 0.13238412475585937, 0.13322093200683593, 0.1323868865966797, 0.13302989196777343, 0.1311846466064453, 0.13088307189941406, 0.13144729614257813, 0.13087344360351563, 0.13242767333984376, 0.13273843383789063, 0.13234034729003907, 0.13309295654296874, 0.1311664581298828, 0.13195036315917968, 0.13057472229003905, 0.13156556701660158, 0.13360537719726562, 0.1324416046142578, 0.1329402618408203, 0.13209324645996093, 0.1320885467529297, 0.13104537963867188, 0.13059686279296875, 0.13255474853515625, 0.13312527465820312, 0.13212144470214843, 0.1330931854248047, 0.13197273254394531, 0.13248159790039063, 0.13082009887695312, 0.13170278930664062, 0.13233766174316405, 0.132384765625, 0.13343257141113282, 0.1324297332763672, 0.13207638549804687, 0.13173504638671876, 0.13123635864257813, 0.13246054077148436, 0.13283731079101563, 0.13267359924316408, 0.1327626953125]",tokens/s,7.582935241146648,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,2191.224832,7355.695104,0.0,6960.447488,6722.822144,s,1,16.2267978515625,16.2267978515625,0.0,16.2267978515625,16.2267978515625,16.2267978515625,16.2267978515625,[16.2267978515625],,kWh,0.0002513102884541619,2.7712902526142117e-05,9.369896384800458e-05,0.0003727221548283086,,MB,1736.82688,7921.926144,0.0,7514.095616,7161.403392,s,10,10.635327636718749,1.063532763671875,0.0065886517062641575,1.065108154296875,1.0696147094726562,1.0700051086425781,1.0703174279785157,"[1.0485753173828125, 1.0566400146484376, 1.0595382080078124, 1.06260791015625, 1.06538623046875, 1.0703955078125, 1.064830078125, 1.068911865234375, 1.0695279541015625, 1.06891455078125]",tokens/s,240.70720596905096,kWh,3.092942336749653e-05,3.410400895626253e-06,2.0545266436200472e-05,5.488509069932325e-05,tokens/kWh,4664290.369901066,MB,1740.906496,7924.023296,0.0,7516.192768,7161.405952,s,10,51.21002734375,5.121002734375,0.015476669790559302,5.1233251953125,5.136107666015625,5.138426879882812,5.140282250976562,"[5.08609130859375, 5.1046337890625, 5.11413916015625, 5.12413330078125, 5.1189990234375, 5.12251708984375, 5.134634765625, 5.12854052734375, 5.14074609375, 5.13559228515625]",tokens/s,12.302278141175202,kWh,0.00015038089679375162,1.6588737902042466e-05,9.990805214859788e-05,0.000266877686844392,tokens/kWh,236063.19713319952,,s,630,51.20619602966308,0.08127967623756045,0.0018860073454918297,0.08094657516479492,0.0823894271850586,0.08315366783142089,0.09381792877197266,"[0.0912479019165039, 0.07957987213134765, 0.07925555419921874, 0.07937433624267579, 0.08023040008544922, 0.08124620819091796, 0.08011135864257812, 0.08026547241210938, 0.08010137939453126, 0.08052531433105468, 0.08065638732910156, 0.08385330963134766, 0.0819240951538086, 0.08077295684814453, 0.08018905639648438, 0.07938921356201172, 0.0794227523803711, 0.079968994140625, 0.0804659194946289, 0.08001945495605468, 0.08020582580566406, 0.08038195037841797, 0.08086246490478516, 0.08194124603271484, 0.08153459167480469, 0.08074073791503907, 0.08018892669677734, 0.08021814727783202, 0.08018582153320312, 0.07973654174804687, 0.08006275177001954, 0.08021622467041016, 0.07965270233154297, 0.08005587005615235, 0.08111353302001953, 0.08077516937255859, 0.08155340576171875, 0.08121699523925781, 0.08037814331054688, 0.08100032043457031, 0.08027993774414062, 0.07969996643066406, 0.08012799835205078, 0.08027945709228515, 0.08017314910888672, 0.0802877426147461, 0.08030003356933593, 0.08144300842285156, 0.08111027526855469, 0.08107577514648437, 0.08076185607910157, 0.08099430084228515, 0.08072988891601562, 0.0804268798828125, 0.08029833221435546, 0.08038377380371094, 0.08031254577636719, 0.08053043365478516, 0.08080623626708984, 0.08154940795898437, 0.08113414764404298, 0.08108441925048829, 0.08129945373535157, 0.09435372924804687, 0.08006655883789063, 0.08025702667236329, 0.0800030746459961, 0.07945011138916015, 0.07933542633056641, 0.08026521301269532, 0.07999897766113281, 0.07947673797607421, 0.07996415710449219, 0.08011711883544922, 0.08263539123535156, 0.08238899230957031, 0.08151411437988282, 0.08077327728271484, 0.08043087768554688, 0.08017078399658203, 0.08007091522216797, 0.07976150512695312, 0.08062159729003907, 0.07995625305175781, 0.07945970916748046, 0.08088585662841796, 0.08200204467773438, 0.08191836547851562, 0.08275138854980468, 0.082753662109375, 0.08134393310546875, 0.08036822509765625, 0.08074233245849609, 0.08087347412109375, 0.08098127746582032, 0.08084963226318359, 0.08083251190185547, 0.08084889221191406, 0.08141619110107422, 0.08219033813476563, 0.08148786926269531, 0.08098611450195313, 0.08090764617919922, 0.08050342559814454, 0.08007065582275391, 0.08074797058105469, 0.08034499359130859, 0.07982147216796875, 0.08024269104003906, 0.08149833679199218, 0.08098918151855469, 0.08154000091552735, 0.08147545623779297, 0.08153702545166015, 0.08103695678710937, 0.08090876770019531, 0.08038326263427735, 0.08059555053710937, 0.07993344116210938, 0.08026525115966797, 0.08102051544189454, 0.08106768035888672, 0.08053628540039062, 0.08138883209228516, 0.08159715270996094, 0.0815308837890625, 0.09604431915283203, 0.07986659240722656, 0.07929776000976563, 0.0799813461303711, 0.07997235107421875, 0.08001741027832031, 0.08000511932373047, 0.08023859405517578, 0.0799477767944336, 0.08005244445800781, 0.07999830627441407, 0.08309961700439453, 0.08236489868164062, 0.08140185546875, 0.08074034881591798, 0.08070931243896484, 0.08064851379394532, 0.080048095703125, 0.08004547119140625, 0.08055052947998047, 0.07995174407958984, 0.0802939224243164, 0.08087948608398438, 0.08234210968017579, 0.08290303802490234, 0.08256454467773437, 0.08187142181396484, 0.08080384063720703, 0.08158003234863281, 0.08092876434326172, 0.08075263977050781, 0.08085913848876954, 0.08078540802001953, 0.08086118316650391, 0.08097574615478516, 0.08206563568115234, 0.08169171142578124, 0.0811448974609375, 0.08075856018066406, 0.08094432067871093, 0.0808845443725586, 0.08079154968261719, 0.08033894348144531, 0.08024054718017579, 0.08033084869384766, 0.08025833892822265, 0.0807943344116211, 0.08167529296875, 0.08167318725585937, 0.0811599349975586, 0.0811087646484375, 0.08142281341552735, 0.08120524597167969, 0.08064205169677735, 0.08113094329833985, 0.0803702392578125, 0.08083455657958985, 0.08059903717041016, 0.081438720703125, 0.08112742614746093, 0.08179917144775391, 0.08218390655517578, 0.08176863861083984, 0.09386358642578126, 0.08034320068359375, 0.08071186828613282, 0.08060313415527344, 0.08064614105224609, 0.0806789093017578, 0.08063382720947265, 0.08141388702392578, 0.08049282836914062, 0.08069900512695312, 0.08066464233398438, 0.08392729949951172, 0.08181561279296876, 0.08127648162841797, 0.0806732177734375, 0.08010502624511719, 0.08056204986572266, 0.08040096282958985, 0.0801413116455078, 0.08054022216796874, 0.08021759796142579, 0.0800777587890625, 0.08030207824707031, 0.08222329711914063, 0.08170041656494141, 0.08155366516113281, 0.08152678680419922, 0.08099839782714843, 0.08121676635742188, 0.0801239013671875, 0.08035814666748047, 0.0809298553466797, 0.08049900817871093, 0.08088435363769532, 0.08118393707275391, 0.08090214538574218, 0.08159318542480469, 0.08166159820556641, 0.08193785858154297, 0.08154112243652344, 0.08229071807861328, 0.0824349136352539, 0.08188630676269532, 0.08117750549316406, 0.08098989105224609, 0.08193446350097656, 0.08127625274658203, 0.08158284759521485, 0.08118486022949219, 0.08104473876953125, 0.0820921630859375, 0.08099724578857422, 0.08199689483642578, 0.08092534637451172, 0.08036351776123046, 0.08081798553466797, 0.08098941040039062, 0.08083145904541016, 0.08105891418457031, 0.08137731170654297, 0.08109053039550781, 0.08191887664794922, 0.08187699127197266, 0.09404640197753907, 0.07928697967529297, 0.07999833679199218, 0.0803662109375, 0.08040447998046875, 0.0807383041381836, 0.08071939086914062, 0.08073574066162109, 0.08061846160888672, 0.0807356185913086, 0.08130214691162109, 0.0839527359008789, 0.08259196472167969, 0.08088355255126953, 0.08041353607177734, 0.08017839813232422, 0.08009315490722656, 0.0800889892578125, 0.0802514877319336, 0.08008889770507813, 0.07961446380615235, 0.07999839782714843, 0.08139817810058594, 0.08286796569824219, 0.08185488128662109, 0.08163766479492188, 0.08093052673339844, 0.08148947143554687, 0.0803741455078125, 0.08015641784667969, 0.08052345275878907, 0.08005830383300781, 0.0802220458984375, 0.0802360610961914, 0.08145798492431641, 0.08279158020019531, 0.08207154846191406, 0.0821277084350586, 0.0809697265625, 0.08168361663818359, 0.08302012634277343, 0.08106400299072265, 0.0809265899658203, 0.08097843170166015, 0.08104761505126953, 0.08058060455322266, 0.08158108520507812, 0.08114454650878906, 0.08198783874511718, 0.08113561248779297, 0.08085298919677734, 0.08212207794189454, 0.08103734588623047, 0.08138921356201172, 0.08069529724121094, 0.08045053100585937, 0.08029698944091797, 0.08029827117919922, 0.08156671905517578, 0.08183702087402343, 0.08178556823730469, 0.08175122833251953, 0.08110572814941407, 0.09370614624023438, 0.0801632308959961, 0.07929241943359375, 0.07926579284667969, 0.08002355194091797, 0.08009449768066407, 0.07998111724853516, 0.07998876953125, 0.0799642562866211, 0.07994985961914063, 0.08021794891357421, 0.0842542724609375, 0.08363887786865234, 0.08105779266357421, 0.08039218902587891, 0.08009891510009766, 0.0801468505859375, 0.07946854400634766, 0.0804290542602539, 0.08014848327636719, 0.08015666961669922, 0.0804714584350586, 0.08099696350097656, 0.08280815887451172, 0.08282720184326171, 0.08182653045654296, 0.08161920166015625, 0.08140185546875, 0.0816394271850586, 0.08248079681396485, 0.08147138977050782, 0.08087776184082031, 0.08143462371826173, 0.08060253143310547, 0.08158035278320312, 0.08095568084716796, 0.08224944305419922, 0.08157129669189453, 0.08103404998779297, 0.08117190551757812, 0.08105769348144531, 0.08107433319091797, 0.080880126953125, 0.08102674865722656, 0.08062188720703126, 0.08083411407470703, 0.08096812438964844, 0.0809717788696289, 0.08224153900146484, 0.08102291107177734, 0.08228460693359375, 0.08112860870361328, 0.08231993865966797, 0.08285820770263672, 0.08117852783203125, 0.08146550750732422, 0.08096959686279297, 0.0809488296508789, 0.0810642547607422, 0.08105699157714844, 0.08156400299072265, 0.081572509765625, 0.08156489562988281, 0.0953056640625, 0.08049788665771485, 0.08119574737548828, 0.08082371520996094, 0.08141686248779297, 0.0819402847290039, 0.08168057250976563, 0.08127072143554688, 0.08120735931396485, 0.08073590087890625, 0.08126499176025391, 0.08307872009277344, 0.08157574462890625, 0.08087206268310547, 0.08043049621582031, 0.08100489807128906, 0.08065814208984375, 0.0807265625, 0.08051292419433594, 0.08069363403320312, 0.08039584350585938, 0.08049884796142578, 0.08065350341796874, 0.08160348510742188, 0.08153282928466797, 0.08117967987060547, 0.08124924468994141, 0.08089395141601563, 0.08121548461914062, 0.08085443115234375, 0.08053311920166016, 0.08105878448486328, 0.08021571350097656, 0.08121711730957032, 0.08159859466552734, 0.08120326232910156, 0.08275971221923828, 0.08342940521240234, 0.08084031677246094, 0.08166899108886719, 0.08103731536865234, 0.08099635314941406, 0.08088780975341797, 0.08082147216796876, 0.08049913787841798, 0.08091088104248047, 0.08100761413574219, 0.0810948486328125, 0.0820552978515625, 0.08169113922119141, 0.0815059814453125, 0.08112092590332032, 0.0813062744140625, 0.08131171417236328, 0.08157545471191406, 0.0815313949584961, 0.0807567367553711, 0.08158767700195313, 0.08153520202636719, 0.08273273468017578, 0.08381094360351563, 0.08147334289550781, 0.08150444793701171, 0.09472480010986328, 0.08027340698242187, 0.08063590240478516, 0.08000921630859376, 0.08041887664794922, 0.08012179565429688, 0.08010546875, 0.08000224304199219, 0.08015750122070313, 0.08090825653076172, 0.08039356994628906, 0.0837083511352539, 0.08537522888183594, 0.08152268981933594, 0.08050892639160157, 0.08006034851074219, 0.08004409790039063, 0.08005427551269531, 0.08009318542480469, 0.08002355194091797, 0.08007868957519532, 0.08067292785644531, 0.08107405090332032, 0.0823235855102539, 0.08257331085205079, 0.08160160064697265, 0.08126528167724609, 0.08100281524658202, 0.08048025512695313, 0.08116425323486329, 0.08036271667480469, 0.08108934020996093, 0.08029727935791016, 0.0808414077758789, 0.08177641296386719, 0.08232895660400391, 0.08279926300048829, 0.08319789123535157, 0.08264320373535156, 0.08063919830322265, 0.08031263732910156, 0.0806503677368164, 0.08034544372558594, 0.08067481231689454, 0.0803470687866211, 0.08063699340820313, 0.08168141174316407, 0.08181894683837891, 0.08228463745117187, 0.08218889617919922, 0.08184207916259766, 0.08105101013183594, 0.08111135864257812, 0.08075100708007812, 0.08096518707275391, 0.08033932495117188, 0.08104557037353516, 0.08110899353027344, 0.08192733001708985, 0.08211952209472656, 0.08325325012207031, 0.0829676513671875, 0.08136704254150391, 0.09417731475830078, 0.08028665924072266, 0.08058486175537109, 0.08067056274414063, 0.0805580825805664, 0.08056217956542969, 0.08008649444580078, 0.08005241394042968, 0.08062806701660157, 0.08008294677734375, 0.08078931427001954, 0.08509664154052735, 0.08450374603271485, 0.08171196746826172, 0.08073382568359375, 0.080176513671875, 0.08070777893066407, 0.0805296630859375, 0.0800445098876953, 0.0800474853515625, 0.08010310363769531, 0.08099244689941407, 0.0815006103515625, 0.08292390441894532, 0.08347647857666016, 0.0823377914428711, 0.08121753692626953, 0.08085068511962891, 0.08082867431640625, 0.0803594207763672, 0.08116223907470703, 0.08022589111328125, 0.08075714874267578, 0.08137718200683594, 0.08126268768310548, 0.0844432601928711, 0.08291110229492188, 0.08164505767822265, 0.081244384765625, 0.0814551010131836, 0.08079718780517578, 0.08087123107910156, 0.08142332458496093, 0.0809079360961914, 0.08085858917236328, 0.08068390655517578, 0.08192819213867188, 0.08243405151367188, 0.08217705535888672, 0.0828712615966797, 0.08110489654541016, 0.08248870086669922, 0.08357129669189453, 0.0809653091430664, 0.08110115051269531, 0.08138925170898438, 0.08097148895263671, 0.08095750427246094, 0.08167068481445312, 0.08190156555175782, 0.08227779388427735, 0.08179180908203125, 0.08210819244384765, 0.0936655044555664, 0.07980662536621094, 0.07998982238769531, 0.08019039916992188, 0.08042086029052735, 0.08073414611816407, 0.08066259002685547, 0.08005203247070312, 0.08071183776855469, 0.08068099212646485, 0.08024269104003906, 0.0856657943725586, 0.08338835144042969, 0.0814817886352539, 0.0808058853149414, 0.08017919921875, 0.08106591796875, 0.08009939575195313, 0.08021401977539062, 0.08000511932373047, 0.0805125732421875, 0.0805370864868164, 0.0817142105102539, 0.08375868988037109, 0.08344179534912109, 0.08239334106445312, 0.08334553527832031, 0.08073811340332031, 0.08076649475097657, 0.08080230712890625, 0.08095702362060547, 0.08019347381591797, 0.08060972595214844, 0.08091970825195313, 0.08152690887451172, 0.08217215728759765, 0.08233769226074218, 0.08235897827148438, 0.08151248168945313, 0.08113136291503906, 0.08064205169677735, 0.0808526382446289, 0.08045807647705078, 0.08096669006347657, 0.08089289855957031, 0.08090009307861327, 0.08097586822509766, 0.08228431701660156, 0.0820547866821289, 0.0828946533203125, 0.08331539154052735, 0.08122601318359375, 0.08091958618164062, 0.08034384155273437, 0.08099366760253907, 0.0804010238647461, 0.08151449584960938, 0.08121497344970703, 0.08170492553710937, 0.08102543640136718, 0.08291248321533203, 0.08256336212158204, 0.08235395050048828]",tokens/s,12.30319861360233,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1555.116032,3332.243456,0.0,2929.721344,2929.394176,s,1,11.2540751953125,11.2540751953125,0.0,11.2540751953125,11.2540751953125,11.2540751953125,11.2540751953125,[11.2540751953125],,kWh,0.00012034527542922053,1.3267930774012753e-05,3.960336501598882e-05,0.0001732165712192221,,MB,1437.868032,4208.852992,0.0,3793.747968,3508.40064,s,10,4.576798187255859,0.4576798187255859,0.004653527442250004,0.45949142456054687,0.46078789062500003,0.46251365051269533,0.4638942584228516,"[0.4476086730957031, 0.45076229858398437, 0.4570346984863281, 0.45952081298828124, 0.45987518310546877, 0.46423941040039063, 0.4604043884277344, 0.45799215698242185, 0.4594620361328125, 0.4598985290527344]",tokens/s,559.3429937829345,kWh,1.3314679115342186e-05,1.4677687231489091e-06,8.811711594818407e-06,2.3594159433309502e-05,tokens/kWh,10850142.838256283,MB,1481.822208,4208.852992,0.0,3793.747968,3508.4032,s,10,22.805061523437498,2.28050615234375,0.002609818759577729,2.28113037109375,2.2835340576171874,2.284028991699219,2.2844249389648437,"[2.28452392578125, 2.27602392578125, 2.280455078125, 2.28189111328125, 2.277826171875, 2.2774296875, 2.283424072265625, 2.27964013671875, 2.282041748046875, 2.2818056640625]",tokens/s,27.62544619108037,kWh,6.685231513756813e-05,7.374542923311588e-06,4.4276386431183935e-05,0.00011850324449206367,tokens/kWh,531631.0137332923,,s,630,22.80319440841674,0.036195546680026586,0.0011436061204465457,0.035951423645019534,0.03644619827270508,0.03664069194793702,0.0441962140274048,"[0.043786239624023435, 0.03805388641357422, 0.036683200836181644, 0.03642771148681641, 0.03641408157348633, 0.036091201782226565, 0.03610265731811523, 0.03607129669189453, 0.03607379150390625, 0.036016128540039063, 0.03602227020263672, 0.036009822845458984, 0.03602025604248047, 0.036034561157226565, 0.036049022674560546, 0.03602022552490235, 0.03606428909301758, 0.036031455993652345, 0.03606528091430664, 0.03602022552490235, 0.036042015075683595, 0.036088542938232424, 0.036192256927490236, 0.0361267204284668, 0.03607961654663086, 0.03600384140014649, 0.036017566680908206, 0.03598531341552735, 0.03604908752441406, 0.03600576019287109, 0.036055679321289065, 0.036028289794921876, 0.036044830322265624, 0.036036705017089846, 0.03604684829711914, 0.03603801727294922, 0.03604134368896485, 0.03602841567993164, 0.03607574462890625, 0.03606095886230469, 0.03606265640258789, 0.03604956817626953, 0.03607052612304688, 0.03611286544799805, 0.036089984893798825, 0.03609823989868164, 0.03610208129882812, 0.036085025787353515, 0.03611523056030273, 0.036168704986572264, 0.03617446517944336, 0.036157470703125, 0.03619465637207031, 0.03615059280395508, 0.036182720184326174, 0.0361451530456543, 0.036171775817871094, 0.036163135528564455, 0.03617145538330078, 0.03617459106445312, 0.03622092819213867, 0.03617292785644531, 0.03619673538208008, 0.044652801513671875, 0.03812076950073242, 0.03673302459716797, 0.036413185119628905, 0.036418399810791015, 0.03609600067138672, 0.03607551956176758, 0.036036449432373045, 0.0358067512512207, 0.035840641021728514, 0.03576956939697266, 0.03579964828491211, 0.03580499267578125, 0.03591206359863281, 0.035885055541992186, 0.035870113372802735, 0.03583603286743164, 0.03583843231201172, 0.03582156753540039, 0.035829761505126956, 0.035790561676025394, 0.03583824157714844, 0.03578396987915039, 0.035894142150878904, 0.035820960998535156, 0.035903934478759766, 0.035846145629882815, 0.03583180618286133, 0.03582064056396484, 0.035916641235351564, 0.0359035530090332, 0.03590057754516601, 0.03589628982543945, 0.03591136169433594, 0.03588662338256836, 0.035901439666748046, 0.03591836929321289, 0.035909759521484376, 0.035917823791503906, 0.03590553665161133, 0.035894817352294925, 0.035924446105957034, 0.036087806701660154, 0.036035873413085937, 0.036028961181640624, 0.03599292755126953, 0.036037281036376954, 0.035971263885498046, 0.03596214294433594, 0.03596156692504883, 0.035942401885986325, 0.03601593780517578, 0.03594211196899414, 0.03603299331665039, 0.03598163223266602, 0.0360090560913086, 0.03595894241333008, 0.03605753707885742, 0.03599555206298828, 0.03599318313598633, 0.03598387145996094, 0.03597926330566406, 0.03598735809326172, 0.0439090576171875, 0.03798348617553711, 0.036799072265625, 0.036477279663085935, 0.03649276733398438, 0.03611484909057617, 0.03614310455322266, 0.03611155319213867, 0.03585212707519531, 0.03584662246704102, 0.035880638122558595, 0.03599203109741211, 0.03583830261230469, 0.035837825775146485, 0.03591551971435547, 0.03595711898803711, 0.0359153938293457, 0.0358895378112793, 0.03584393692016601, 0.0358199348449707, 0.03635353469848633, 0.036319488525390624, 0.03633561706542969, 0.036327423095703124, 0.03633152008056641, 0.03618201446533203, 0.03602767944335938, 0.03593494415283203, 0.03621683120727539, 0.03589120101928711, 0.03588915252685547, 0.03586422348022461, 0.03586032104492187, 0.03586918258666992, 0.03585843276977539, 0.03588476943969727, 0.03589900970458984, 0.03591228866577149, 0.03594246292114258, 0.035932159423828124, 0.035934207916259765, 0.03589651107788086, 0.03593708801269531, 0.035937278747558594, 0.03592607879638672, 0.03646559906005859, 0.036534271240234374, 0.03636019134521484, 0.03636595153808594, 0.03622335815429688, 0.035969024658203126, 0.03592124938964844, 0.035963520050048825, 0.035958816528320316, 0.035952255249023436, 0.03590387344360352, 0.03594838333129883, 0.03594601440429687, 0.03590233612060547, 0.03592371368408203, 0.035950592041015625, 0.035933246612548826, 0.03596384048461914, 0.04651007843017578, 0.038391807556152346, 0.03688652801513672, 0.0364400634765625, 0.036431102752685546, 0.03623193740844727, 0.03606528091430664, 0.03611033630371094, 0.0358328971862793, 0.03580928039550781, 0.03579369735717773, 0.03583119964599609, 0.03599177551269531, 0.03586511993408203, 0.0358392333984375, 0.03585919952392578, 0.03586275100708008, 0.035870304107666014, 0.03585657501220703, 0.03583142471313477, 0.03580681610107422, 0.03582668685913086, 0.03582339096069336, 0.03582361602783203, 0.035816638946533204, 0.03581814575195313, 0.03643817520141602, 0.035915775299072264, 0.03584601593017578, 0.0358519058227539, 0.036039169311523435, 0.03599564743041992, 0.03588710403442383, 0.03589904022216797, 0.035877216339111326, 0.035899391174316404, 0.035917438507080075, 0.0359156494140625, 0.0359285774230957, 0.035950592041015625, 0.03590758514404297, 0.03591689682006836, 0.03591465759277344, 0.035975166320800785, 0.03597449493408203, 0.036092575073242185, 0.03603222274780273, 0.0360401611328125, 0.03601859283447266, 0.036038814544677736, 0.03599996948242187, 0.03600566482543945, 0.03598137664794922, 0.036003902435302736, 0.03596012878417969, 0.03596550369262695, 0.03612457656860352, 0.03622537612915039, 0.03698675155639648, 0.036229248046875, 0.03624284744262695, 0.036256351470947266, 0.03619839859008789, 0.04454880142211914, 0.038145889282226564, 0.03678412628173828, 0.03635200119018555, 0.036370433807373044, 0.03609171295166016, 0.03605728149414063, 0.03604889678955078, 0.03582361602783203, 0.03581087875366211, 0.035797439575195315, 0.03575795364379883, 0.03577254486083985, 0.0357498893737793, 0.03575603103637695, 0.035762176513671876, 0.035794689178466794, 0.03577590560913086, 0.03580185699462891, 0.03579251098632812, 0.03583023834228516, 0.035829025268554686, 0.03583567810058594, 0.03581635284423828, 0.035846206665039064, 0.035829727172851565, 0.035897022247314454, 0.03587945556640625, 0.03586556625366211, 0.03583878326416016, 0.035907135009765626, 0.0364917106628418, 0.03649353790283203, 0.03620159912109375, 0.03616556930541992, 0.03616969680786133, 0.035916126251220704, 0.03587926483154297, 0.036459968566894534, 0.03647068786621094, 0.03627920150756836, 0.036155200958251955, 0.036169055938720704, 0.03591439819335938, 0.035899391174316404, 0.03586624145507813, 0.03588137435913086, 0.03589523315429687, 0.03588713455200195, 0.0358809585571289, 0.035942401885986325, 0.03591372680664062, 0.03595647811889648, 0.03593414306640625, 0.03593436813354492, 0.03595811080932617, 0.035967807769775394, 0.03594035339355469, 0.03597721481323242, 0.035983104705810544, 0.035955009460449217, 0.035960704803466796, 0.03597900772094727, 0.04363043212890625, 0.037910526275634765, 0.03664281463623047, 0.036421630859375, 0.03645455932617187, 0.03617792129516602, 0.036087615966796875, 0.036098175048828125, 0.035811233520507815, 0.035800640106201174, 0.03576671981811524, 0.03577577590942383, 0.0357977294921875, 0.03581542587280274, 0.03579289627075195, 0.03579904174804688, 0.03601129531860352, 0.03588496017456055, 0.03586358261108399, 0.035824607849121094, 0.03579372787475586, 0.03584592056274414, 0.035788673400878906, 0.03579257583618164, 0.0357935676574707, 0.03579046249389648, 0.03581353759765625, 0.03584431838989258, 0.035823486328125, 0.035874942779541015, 0.03585030364990234, 0.035856510162353514, 0.03585209655761719, 0.03653852844238281, 0.03646144104003906, 0.03620748901367187, 0.03664086532592774, 0.036274177551269535, 0.03604608154296875, 0.03593247985839844, 0.03586912155151367, 0.03588915252685547, 0.03585843276977539, 0.035890975952148435, 0.03588220977783203, 0.03593318557739258, 0.03588419342041015, 0.035899742126464844, 0.03588780975341797, 0.03591756820678711, 0.03590150451660156, 0.035937664031982425, 0.035928768157958986, 0.03591980743408203, 0.03651116943359375, 0.03651846313476562, 0.03629228973388672, 0.036208255767822266, 0.036199104309082034, 0.0359785270690918, 0.035893985748291016, 0.03592144012451172, 0.03592035293579102, 0.04647747039794922, 0.0384093132019043, 0.03690092849731445, 0.03644598388671875, 0.03636028671264648, 0.036141887664794925, 0.03606937789916992, 0.03601408004760742, 0.035792800903320314, 0.03574752044677734, 0.03574620819091797, 0.03577036666870117, 0.03582342529296875, 0.03579686355590821, 0.03584646224975586, 0.03580912017822266, 0.03582304000854492, 0.035834590911865236, 0.035813377380371096, 0.03583001708984375, 0.03587692642211914, 0.036031936645507814, 0.035901695251464846, 0.035843265533447265, 0.03583059310913086, 0.035899295806884765, 0.03586841583251953, 0.03581126403808594, 0.03592655944824219, 0.03644812774658203, 0.03646054458618164, 0.03616531372070313, 0.036194625854492186, 0.03618815994262695, 0.03591987228393555, 0.03587686538696289, 0.03589724731445312, 0.03590572738647461, 0.03591088104248047, 0.03591241455078125, 0.03593318557739258, 0.03592086410522461, 0.035949665069580077, 0.035918750762939454, 0.03593830490112305, 0.035926143646240236, 0.03650137710571289, 0.03653196716308594, 0.036313343048095706, 0.03619839859008789, 0.03624534225463867, 0.03598556900024414, 0.035950592041015625, 0.035942401885986325, 0.035969310760498044, 0.03596480178833008, 0.0365300178527832, 0.03652556610107422, 0.03632588958740234, 0.03619430541992188, 0.036245536804199216, 0.03597740936279297, 0.03593593597412109, 0.044462078094482424, 0.03825020980834961, 0.036660640716552735, 0.03642867279052735, 0.03632281494140625, 0.03613091278076172, 0.03609027099609375, 0.035958782196044925, 0.03586867141723633, 0.03582467269897461, 0.03579091262817383, 0.035781246185302734, 0.03577897644042969, 0.0357720947265625, 0.035774143218994144, 0.03579955291748047, 0.03579497528076172, 0.03583382415771484, 0.03576847839355469, 0.035780448913574216, 0.03577017593383789, 0.03577056121826172, 0.03581542587280274, 0.03579289627075195, 0.03580080032348633, 0.03581484985351562, 0.03579580688476563, 0.03581727981567383, 0.035995838165283206, 0.03647881698608398, 0.03643203353881836, 0.036122623443603515, 0.03616153717041016, 0.03611164855957031, 0.03585507202148437, 0.03583990478515625, 0.03582166290283203, 0.03584204864501953, 0.03584185409545899, 0.03588934326171875, 0.03588726425170898, 0.035915454864501956, 0.03641769790649414, 0.036466686248779294, 0.03623535919189453, 0.03618396759033203, 0.03616057586669922, 0.03593926239013672, 0.035885055541992186, 0.035894657135009764, 0.03589174270629883, 0.035899486541748044, 0.03642192077636719, 0.036501182556152346, 0.03626611328125, 0.03624950408935547, 0.03626803207397461, 0.03602022552490235, 0.035952640533447267, 0.03594649505615234, 0.03594035339355469, 0.03598115158081055, 0.03649520111083984, 0.04492534255981445, 0.03821564865112305, 0.036781280517578126, 0.036409984588623046, 0.03638681411743164, 0.03607283020019531, 0.03605676651000977, 0.03604147338867188, 0.035805374145507815, 0.03578060913085938, 0.035796993255615236, 0.035762176513671876, 0.03580518341064453, 0.035811073303222654, 0.03583001708984375, 0.03581884765625, 0.0358818244934082, 0.03578553771972656, 0.035805374145507815, 0.03578144073486328, 0.035813377380371096, 0.03578003311157227, 0.0358221435546875, 0.035768318176269534, 0.03581327819824219, 0.03580640029907227, 0.03585424041748047, 0.03585734558105469, 0.03646879959106445, 0.03642800140380859, 0.03643369674682617, 0.03630899047851562, 0.03633110427856445, 0.03631919860839844, 0.03636064147949219, 0.036380672454833986, 0.03623526382446289, 0.036173534393310544, 0.03619651031494141, 0.035905342102050784, 0.0359153938293457, 0.03589187240600586, 0.03589734268188476, 0.03589257431030273, 0.035910335540771485, 0.03592806243896484, 0.035952640533447267, 0.035925537109375, 0.03593804931640625, 0.035934398651123044, 0.03594294357299805, 0.03592396926879883, 0.03596492767333984, 0.035923744201660154, 0.035967201232910154, 0.03592806243896484, 0.03595705413818359, 0.03670985412597656, 0.03657545471191406, 0.036525665283203126, 0.036270336151123045, 0.03625795364379883, 0.03611859130859375, 0.04431350326538086, 0.03804159927368164, 0.03664048004150391, 0.036396800994873045, 0.036321983337402344, 0.03606240081787109, 0.03608643341064453, 0.035953662872314454, 0.035730430603027344, 0.03610966491699219, 0.03576079940795898, 0.03602227020263672, 0.035919231414794923, 0.03585260772705078, 0.035857761383056644, 0.03581753540039063, 0.03581222534179687, 0.03580521774291992, 0.03581568145751953, 0.0358476791381836, 0.03585036849975586, 0.035856510162353514, 0.035848384857177736, 0.03585823822021485, 0.035833854675292966, 0.03584435272216797, 0.035989185333251954, 0.0364442253112793, 0.03642748641967773, 0.03621878433227539, 0.03644025421142578, 0.03648044967651367, 0.03624448013305664, 0.036195297241210934, 0.03618643188476563, 0.03587120056152344, 0.03641958236694336, 0.03648227310180664, 0.036254432678222655, 0.03616732788085938, 0.03615776062011719, 0.035919136047363284, 0.035875648498535154, 0.03586870574951172, 0.03588633728027344, 0.035902175903320316, 0.035896446228027346, 0.03589209747314453, 0.03597532653808594, 0.03589718246459961, 0.03588211059570313, 0.035904544830322266, 0.03589718246459961, 0.03592393493652344, 0.03591689682006836, 0.036102558135986326, 0.036528350830078125, 0.03650182342529297, 0.036241409301757815, 0.036249088287353515, 0.03616614532470703, 0.03597721481323242, 0.035975166320800785]",tokens/s,27.62770814984871,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1281.196032,13202.55488,0.0,12807.307264,12661.927936,s,1,26.6992890625,26.6992890625,0.0,26.6992890625,26.6992890625,26.6992890625,26.6992890625,[26.6992890625],,kWh,0.0005687548774541673,6.273068353996872e-05,0.00021457433832600126,0.0008460598993201372,,MB,1247.15008,15727.525888,0.0,15319.69536,14319.896576,s,10,30.010053466796876,3.0010053466796878,0.008951754834617047,3.003243286132813,3.0097442138671875,3.0109288696289065,3.0118765942382812,"[2.980326416015625, 2.99000244140625, 2.999275146484375, 3.00148193359375, 3.001940673828125, 3.00460400390625, 3.00948095703125, 3.006282470703125, 3.0045458984375, 3.012113525390625]",tokens/s,85.30474638548658,kWh,8.732547033874993e-05,9.630788921964672e-06,5.8185046547999666e-05,0.00015514130580871426,tokens/kWh,1650108.581112771,MB,1264.951296,15727.525888,0.0,15319.69536,14319.899136,s,10,141.4810751953125,14.148107519531251,0.023793453007881927,14.15676513671875,14.1686619140625,14.1693158203125,14.1698389453125,"[14.0949990234375, 14.1190595703125, 14.1331025390625, 14.147650390625, 14.1553505859375, 14.1581796875, 14.167037109375, 14.1685166015625, 14.1672099609375, 14.1699697265625]",tokens/s,4.452892368327668,kWh,0.00041360347847291745,4.562475451827259e-05,0.0002747863309400009,0.0007340145639311909,tokens/kWh,85829.3596554657,,s,630,141.47523474121104,0.22456386466858883,0.001889570810858079,0.2246788330078125,0.22589389953613281,0.22682214126586914,0.2319333972167969,"[0.2324471435546875, 0.2196405792236328, 0.219787353515625, 0.22076824951171875, 0.2283184356689453, 0.2221222381591797, 0.21976272583007814, 0.22031964111328126, 0.22681251525878907, 0.22372528076171874, 0.22182499694824218, 0.2200743103027344, 0.22421708679199218, 0.22470416259765624, 0.22300502014160156, 0.22120387268066405, 0.22276754760742187, 0.2246222686767578, 0.22377897644042968, 0.2221419219970703, 0.22340489196777344, 0.22354124450683593, 0.224395263671875, 0.22332826232910155, 0.22270565795898437, 0.22311322021484375, 0.22372880554199218, 0.22398037719726563, 0.2229917755126953, 0.2231711730957031, 0.2239036865234375, 0.22449360656738282, 0.2237662353515625, 0.22275065612792969, 0.2237783966064453, 0.22423324584960938, 0.22391871643066405, 0.22284042358398437, 0.22395094299316406, 0.22423782348632812, 0.224579833984375, 0.22318304443359374, 0.22387245178222656, 0.22389340209960937, 0.22454135131835937, 0.2239051513671875, 0.22337782287597657, 0.22498463439941407, 0.22399020385742188, 0.22456137084960937, 0.22343270874023438, 0.22483148193359376, 0.2247200927734375, 0.22466026306152342, 0.22391119384765626, 0.22427926635742187, 0.22459759521484374, 0.22507562255859376, 0.22501986694335938, 0.22428880310058594, 0.22444032287597657, 0.22502957153320313, 0.22499526977539064, 0.23144540405273437, 0.22001802062988282, 0.22074435424804686, 0.22213558959960938, 0.2287418518066406, 0.22117094421386718, 0.22103526306152343, 0.22161203002929689, 0.22724607849121095, 0.2250198974609375, 0.220727294921875, 0.22151980590820314, 0.22427244567871094, 0.22556057739257812, 0.22254182434082032, 0.21992988586425782, 0.22342886352539063, 0.22557743835449218, 0.22429490661621093, 0.22251686096191406, 0.22267462158203125, 0.22498374938964844, 0.22477133178710937, 0.22271160888671876, 0.22261241149902344, 0.22356378173828126, 0.22483558654785157, 0.22447514343261718, 0.22282444763183593, 0.22378445434570313, 0.22469068908691406, 0.22461427307128906, 0.22334994506835937, 0.2233189697265625, 0.22513253784179688, 0.22497894287109374, 0.22437274169921875, 0.2232340545654297, 0.224247802734375, 0.22429901123046875, 0.22535107421875, 0.22401699829101562, 0.2240491485595703, 0.22434144592285157, 0.2248709716796875, 0.2241903076171875, 0.22448541259765625, 0.2247467803955078, 0.2253173828125, 0.2234916534423828, 0.2248239288330078, 0.22452444458007811, 0.22557081604003906, 0.2240482177734375, 0.22503208923339843, 0.22499385070800781, 0.22450743103027343, 0.22586256408691407, 0.2245847625732422, 0.22550828552246094, 0.22479872131347656, 0.22526156616210938, 0.2250891876220703, 0.23152024841308594, 0.22093414306640624, 0.22102732849121093, 0.222308349609375, 0.22938111877441406, 0.22157926940917969, 0.22147891235351563, 0.2225145263671875, 0.22683001708984374, 0.22384739685058594, 0.22228582763671875, 0.2220789794921875, 0.225003173828125, 0.22556092834472657, 0.2225048370361328, 0.22222019958496095, 0.22343907165527344, 0.2248417205810547, 0.2246775665283203, 0.22250233459472657, 0.2222244873046875, 0.22530712890625, 0.22444876098632813, 0.22271142578125, 0.22270115661621093, 0.22557151794433594, 0.22450189208984375, 0.22414707946777343, 0.22291903686523437, 0.224310302734375, 0.22435939025878907, 0.22449151611328125, 0.22320742797851562, 0.22443008422851562, 0.2249337615966797, 0.2246165771484375, 0.2238525390625, 0.22365129089355468, 0.2252799072265625, 0.22477180480957032, 0.2246090850830078, 0.2243230438232422, 0.22420069885253907, 0.2259539794921875, 0.22430764770507813, 0.22465536499023436, 0.22428671264648437, 0.2249947204589844, 0.2245629119873047, 0.22524957275390625, 0.22414796447753907, 0.22529852294921876, 0.22443212890625, 0.2251376953125, 0.22569879150390626, 0.2255319061279297, 0.22521766662597656, 0.22504031372070313, 0.22498912048339845, 0.22451478576660155, 0.2256796417236328, 0.2253844451904297, 0.22534553527832032, 0.23324053955078125, 0.22112393188476562, 0.22178431701660156, 0.22286787414550782, 0.22900531005859376, 0.22270565795898437, 0.22107673645019532, 0.22109599304199218, 0.22687405395507812, 0.2239930877685547, 0.22275149536132813, 0.22196421813964845, 0.2246636199951172, 0.22564659118652344, 0.22376573181152343, 0.22240130615234374, 0.22340336608886718, 0.22565341186523438, 0.22392547607421875, 0.2230955810546875, 0.22323587036132814, 0.22419683837890625, 0.22529638671875, 0.2239705352783203, 0.2235502471923828, 0.22418576049804687, 0.22466006469726563, 0.22449491882324218, 0.22421734619140626, 0.22416224670410156, 0.224810302734375, 0.22477008056640624, 0.22437510681152345, 0.2239184265136719, 0.2244071350097656, 0.2251976318359375, 0.22501589965820312, 0.22507798767089843, 0.22413725280761718, 0.22437408447265625, 0.22485801696777344, 0.22532115173339845, 0.2242525177001953, 0.224606201171875, 0.22522880554199218, 0.22497894287109374, 0.22443128967285156, 0.22531491088867187, 0.22527049255371093, 0.22495423889160157, 0.2251367645263672, 0.224257568359375, 0.2255385284423828, 0.22508543395996095, 0.22536175537109376, 0.22429302978515625, 0.2258534393310547, 0.22610099792480468, 0.22515327453613282, 0.2250260467529297, 0.22550909423828125, 0.22528598022460938, 0.22618489074707032, 0.23196646118164063, 0.22116860961914062, 0.2212575378417969, 0.22324652099609374, 0.23077069091796876, 0.22223052978515626, 0.22230221557617189, 0.2232580871582031, 0.22763165283203124, 0.22413107299804688, 0.2222344970703125, 0.2201719970703125, 0.22632899475097656, 0.22543331909179687, 0.22350624084472656, 0.22253334045410156, 0.22382601928710938, 0.22630262756347655, 0.22449130249023438, 0.22288978576660157, 0.22351907348632813, 0.22539884948730468, 0.22474137878417969, 0.2236600341796875, 0.22312675476074217, 0.22466435241699217, 0.22442095947265625, 0.22513346862792968, 0.22387303161621094, 0.22377186584472655, 0.2247196502685547, 0.22536601257324218, 0.22417543029785156, 0.22421119689941407, 0.22469635009765626, 0.22488278198242187, 0.2249566650390625, 0.2244524841308594, 0.22430943298339845, 0.2253367004394531, 0.2250916748046875, 0.2246614990234375, 0.2246661376953125, 0.22466061401367188, 0.22552790832519531, 0.22486819458007812, 0.2250467834472656, 0.22515939331054688, 0.22504701232910157, 0.22589768981933595, 0.22428953552246095, 0.2255462646484375, 0.22518783569335937, 0.22553919982910156, 0.22512728881835936, 0.22558889770507812, 0.22512879943847655, 0.22526771545410157, 0.22544178771972656, 0.22535935974121094, 0.22549264526367188, 0.22577853393554687, 0.2253020477294922, 0.23185244750976564, 0.22079283142089845, 0.22234072875976563, 0.22271629333496093, 0.23001863098144532, 0.22175379943847656, 0.22215475463867188, 0.2226175994873047, 0.2271285400390625, 0.22461247253417968, 0.22202024841308593, 0.22269541931152342, 0.22451814270019532, 0.22654348754882814, 0.2229757080078125, 0.22352528381347656, 0.22396214294433595, 0.22581961059570313, 0.22453811645507812, 0.22352716064453124, 0.22333465576171874, 0.22426966857910155, 0.22529910278320311, 0.22449151611328125, 0.2228632049560547, 0.2246800994873047, 0.22535562133789064, 0.2245771484375, 0.22423193359375, 0.22447068786621094, 0.22503794860839843, 0.224827392578125, 0.22387583923339843, 0.2243170623779297, 0.22539651489257811, 0.2250020751953125, 0.22440304565429686, 0.2241084442138672, 0.22481765747070312, 0.22545753479003905, 0.22515699768066405, 0.2248383026123047, 0.2249276123046875, 0.22499069213867187, 0.2252991943359375, 0.2254043884277344, 0.2246661376953125, 0.2248308410644531, 0.22503631591796874, 0.22677471923828124, 0.2239510040283203, 0.22541798400878907, 0.2252554168701172, 0.22547817993164063, 0.2250379180908203, 0.22535983276367189, 0.2256290588378906, 0.22555445861816406, 0.22521446228027345, 0.2260061798095703, 0.22530706787109375, 0.22559490966796875, 0.22492620849609374, 0.23259120178222656, 0.21991014099121095, 0.2217697296142578, 0.22271772766113282, 0.23089993286132812, 0.2222469177246094, 0.22277638244628906, 0.2224055633544922, 0.2287840576171875, 0.22407994079589844, 0.22254591369628907, 0.22246604919433594, 0.22514688110351563, 0.22747750854492188, 0.2235146179199219, 0.22214451599121093, 0.22410182189941405, 0.22688825988769531, 0.22470831298828126, 0.22267727661132813, 0.22270976257324218, 0.22525132751464844, 0.2258934783935547, 0.22407267761230468, 0.22231033325195312, 0.224046142578125, 0.22664697265625, 0.22509085083007813, 0.22290505981445313, 0.22389144897460939, 0.22545730590820312, 0.2262327423095703, 0.2248667449951172, 0.2239180145263672, 0.22436582946777345, 0.2259443817138672, 0.22500265502929687, 0.22504124450683594, 0.22402662658691405, 0.2259578857421875, 0.22542279052734376, 0.22511782836914063, 0.22495309448242187, 0.22493775939941407, 0.22561782836914063, 0.22517768859863282, 0.22462506103515625, 0.225112060546875, 0.22542335510253905, 0.225291748046875, 0.2255235137939453, 0.22490390014648437, 0.22562611389160156, 0.2251138916015625, 0.22549478149414062, 0.2252518005371094, 0.22536582946777345, 0.22638584899902345, 0.22524134826660155, 0.22494569396972655, 0.22560963439941406, 0.22565330505371095, 0.226197509765625, 0.23421942138671875, 0.22039059448242188, 0.22210858154296875, 0.22327909851074218, 0.23018060302734375, 0.22202297973632812, 0.22196524047851562, 0.22236114501953125, 0.22815557861328126, 0.22447712707519532, 0.22192568969726562, 0.2222786865234375, 0.22530966186523438, 0.2262056884765625, 0.22341017150878906, 0.22300262451171876, 0.22377676391601561, 0.2258841552734375, 0.2245069122314453, 0.2237019805908203, 0.2238279724121094, 0.2247740173339844, 0.2249381103515625, 0.22417613220214844, 0.2231904296875, 0.2251260223388672, 0.22578070068359374, 0.22423545837402345, 0.2238239288330078, 0.22438835144042968, 0.22562185668945312, 0.22579405212402343, 0.22478839111328125, 0.2242827453613281, 0.2244259796142578, 0.22594239807128907, 0.22472291564941407, 0.22470863342285155, 0.22458323669433594, 0.22588665771484376, 0.22543974304199219, 0.22422518920898438, 0.2252857666015625, 0.2250695343017578, 0.22567695617675781, 0.224872802734375, 0.22512136840820313, 0.22593408203125, 0.22515728759765624, 0.22534941101074218, 0.2251589813232422, 0.2246475830078125, 0.225693603515625, 0.22559123229980468, 0.22547433471679687, 0.22512258911132813, 0.2254148864746094, 0.22606681823730468, 0.2258590393066406, 0.2257904968261719, 0.22487030029296876, 0.22608079528808595, 0.225880126953125, 0.23330960083007812, 0.22114154052734375, 0.2202235565185547, 0.22274252319335938, 0.2309836730957031, 0.22202557373046874, 0.22145858764648438, 0.22221142578125, 0.22857356262207032, 0.22456349182128907, 0.22302105712890624, 0.22230213928222656, 0.22545155334472655, 0.2260731201171875, 0.22359654235839843, 0.22198655700683595, 0.223652099609375, 0.22699186706542968, 0.2241938934326172, 0.22370191955566407, 0.22386892700195313, 0.2251643829345703, 0.22475663757324219, 0.22426953125, 0.22363011169433594, 0.2242150421142578, 0.2255134735107422, 0.22458770751953125, 0.2243665008544922, 0.224655517578125, 0.2250198974609375, 0.22512435913085938, 0.22465922546386718, 0.2242798767089844, 0.22480169677734374, 0.22549655151367187, 0.2248934783935547, 0.22468569946289063, 0.22468031311035155, 0.22482330322265626, 0.2264915771484375, 0.22448416137695312, 0.22480870056152344, 0.2247088623046875, 0.22511529541015626, 0.2257293701171875, 0.22532293701171874, 0.22539065551757811, 0.2253834228515625, 0.22548329162597655, 0.225876220703125, 0.22532118225097655, 0.22504646301269532, 0.22627468872070314, 0.2252045440673828, 0.22542095947265625, 0.22495304870605468, 0.22564454650878907, 0.22573056030273436, 0.22612582397460937, 0.22438502502441407, 0.22556466674804687, 0.22642425537109376, 0.2348450927734375, 0.22040489196777344, 0.22131497192382812, 0.22313055419921876, 0.23079525756835936, 0.2227439727783203, 0.22160035705566405, 0.22258265686035156, 0.22779405212402343, 0.2258665008544922, 0.22231680297851564, 0.2226114501953125, 0.2258145294189453, 0.2262994842529297, 0.22331155395507812, 0.22269346618652344, 0.22327743530273436, 0.22597193908691407, 0.22574284362792968, 0.22327760314941406, 0.22271705627441407, 0.22527679443359375, 0.22582208251953126, 0.22337394714355469, 0.22437887573242188, 0.2248272705078125, 0.22492991638183593, 0.224719970703125, 0.22393670654296874, 0.2241616973876953, 0.2256468505859375, 0.2248294677734375, 0.22456101989746094, 0.2239698486328125, 0.22459193420410156, 0.22604393005371093, 0.22530458068847656, 0.22428671264648437, 0.2244850311279297, 0.22497520446777344, 0.2259661102294922, 0.22471267700195313, 0.22479872131347656, 0.2248371124267578, 0.2251146240234375, 0.225887451171875, 0.22505552673339843, 0.22526771545410157, 0.22529638671875, 0.22544998168945313, 0.22574867248535158, 0.22487196350097657, 0.2253168029785156, 0.22602383422851563, 0.22550706481933594, 0.22555308532714843, 0.22542745971679687, 0.22568048095703125, 0.22569804382324218, 0.22560015869140626, 0.22498240661621094, 0.22570457458496093, 0.22554566955566407]",tokens/s,4.45307619494258,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 48.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 38.12 MiB is free. Process 189925 has 14.70 GiB memory in use. Of the allocated memory 14.42 GiB is allocated by PyTorch, and 176.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,807.604224,4683.923456,0.0,4288.67584,4213.842432,s,1,13.5787783203125,13.5787783203125,0.0,13.5787783203125,13.5787783203125,13.5787783203125,13.5787783203125,[13.5787783203125],,kWh,0.00018497083590416515,2.03963994005206e-05,6.22283831159991e-05,0.00026759561842068486,,MB,1192.230912,5107.54816,0.0,4699.717632,4535.11424,s,10,8.553767395019532,0.855376739501953,0.007683236508205055,0.8569793090820312,0.8610069885253906,0.8619868316650391,0.8627707061767578,"[0.8337443237304687, 0.852953857421875, 0.85672607421875, 0.8551661376953125, 0.8570130615234375, 0.8589288330078125, 0.8607892456054688, 0.8629666748046875, 0.856945556640625, 0.8585336303710938]",tokens/s,299.28333116593416,kWh,2.48531106444444e-05,2.7390895892942575e-06,1.6417744615666776e-05,4.400994484940543e-05,tokens/kWh,5816867.093925897,MB,1232.44544,5115.936768,0.0,4708.10624,4535.1168,s,10,40.23405615234375,4.023405615234375,0.0061507504575199655,4.025970825195312,4.029557641601563,4.030256481933594,4.030815554199219,"[4.015683837890625, 4.010197021484375, 4.019560546875, 4.02631640625, 4.025761962890625, 4.022799560546875, 4.0261796875, 4.027199462890625, 4.030955322265625, 4.02940234375]",tokens/s,15.658376516017778,kWh,0.0001180998546972224,1.3028841168202256e-05,7.851052577133302e-05,0.00020963922163675768,tokens/kWh,300516.2846347533,,s,630,40.23095680236817,0.06385866159106059,0.0015793124110772148,0.06369526290893554,0.06435266647338868,0.06462068290710449,0.07443461128234863,"[0.0795832290649414, 0.06496665954589843, 0.06382553482055664, 0.0633240966796875, 0.06302150344848632, 0.06267267227172851, 0.06260348892211914, 0.06262319946289062, 0.06255990219116211, 0.06251110458374023, 0.0625406723022461, 0.0626558723449707, 0.0636995849609375, 0.06333849716186524, 0.06316032028198242, 0.0640646743774414, 0.06386947250366211, 0.06376176071166992, 0.06457443237304687, 0.06376364898681641, 0.06333523178100586, 0.06309888076782226, 0.0625459213256836, 0.0625172462463379, 0.06259507369995117, 0.06265753555297851, 0.062499679565429685, 0.06303321456909179, 0.06385036849975587, 0.06337561416625977, 0.0632762565612793, 0.06387731170654297, 0.06397161483764649, 0.06438889312744141, 0.06388556671142578, 0.06335715103149414, 0.06384867095947265, 0.06398745727539062, 0.06370655822753907, 0.0633639030456543, 0.06284284973144531, 0.06338313674926757, 0.06284649658203124, 0.06317763137817382, 0.06418812561035156, 0.06393593597412109, 0.06363631820678711, 0.0637050895690918, 0.06414070129394531, 0.06332067108154296, 0.0645630111694336, 0.06414915466308593, 0.06368515014648438, 0.063246337890625, 0.0632828483581543, 0.06490048217773438, 0.06420988464355469, 0.06366207885742188, 0.06340329742431641, 0.06301123046875, 0.06356784057617187, 0.06435033416748047, 0.06386483383178711, 0.07312252807617188, 0.06415724945068359, 0.0634764175415039, 0.06313958358764649, 0.06327705764770508, 0.06235340881347656, 0.06369859313964844, 0.06333190536499024, 0.06303577423095703, 0.06275542449951171, 0.062441280364990234, 0.06250700759887695, 0.062476287841796874, 0.0642231674194336, 0.0637768325805664, 0.06471459197998047, 0.06404045104980469, 0.06379996871948242, 0.06357401657104492, 0.06384569549560547, 0.06351119995117188, 0.06330972671508789, 0.0628040657043457, 0.06274051284790039, 0.06330368041992188, 0.06374560165405273, 0.06343718338012695, 0.06299619293212891, 0.06276287841796875, 0.06390422439575195, 0.06333440017700195, 0.0637248649597168, 0.0641173095703125, 0.06391158294677735, 0.06370352172851562, 0.06410854339599609, 0.0637393913269043, 0.06341856002807617, 0.0632111358642578, 0.06312825775146484, 0.06380915069580079, 0.0633081283569336, 0.06303696060180664, 0.0627143669128418, 0.06400780487060546, 0.06390412902832031, 0.0633733139038086, 0.06342009735107422, 0.06392863845825195, 0.0635431022644043, 0.06429920196533204, 0.06409830474853516, 0.06335279846191406, 0.06341020965576172, 0.06396627044677734, 0.06368966293334961, 0.06398102569580078, 0.06364543914794922, 0.06342086410522461, 0.06347398376464844, 0.06394473648071289, 0.06361280059814453, 0.06330364990234374, 0.07437910461425781, 0.06458464050292968, 0.06365695953369141, 0.06322102355957031, 0.06286921691894531, 0.0636409912109375, 0.06352137756347656, 0.06316646575927734, 0.06252665710449219, 0.06272697448730469, 0.06378086471557617, 0.0632845115661621, 0.06389014434814454, 0.06371680068969726, 0.06330182266235351, 0.06348393630981446, 0.06390819168090821, 0.06345523071289062, 0.06521616363525391, 0.06425122833251953, 0.06332115173339843, 0.06318073654174805, 0.06293289566040039, 0.06377251052856445, 0.0633182716369629, 0.06302505493164062, 0.06321734237670898, 0.06329385757446289, 0.06403616333007812, 0.0636110725402832, 0.06324684906005859, 0.06309478378295899, 0.06383206558227539, 0.0639360008239746, 0.06431385803222656, 0.06393446350097656, 0.0636467514038086, 0.06354838562011719, 0.06402047729492187, 0.06362931060791016, 0.06343270492553711, 0.0631596794128418, 0.06395967864990235, 0.063825439453125, 0.06346185684204102, 0.06345913696289063, 0.06400364685058593, 0.06367295837402344, 0.06465948486328126, 0.06430003356933593, 0.06346441650390625, 0.06420480346679687, 0.0636701774597168, 0.06337340927124023, 0.0633507843017578, 0.06420025634765625, 0.06356832122802734, 0.0633481903076172, 0.06321763229370117, 0.06413497924804687, 0.06366396713256836, 0.0634090576171875, 0.06420233917236329, 0.07695600128173828, 0.06464921569824218, 0.06372963333129883, 0.06340790557861328, 0.06316249465942383, 0.06255628967285157, 0.06267497634887695, 0.06326800155639649, 0.06375507354736328, 0.06320876693725586, 0.06297875213623047, 0.06274383926391601, 0.06377510452270507, 0.06346550369262695, 0.06382009506225586, 0.06616886138916016, 0.06432262420654297, 0.06374697494506835, 0.06332412719726563, 0.06311939239501953, 0.0638914566040039, 0.06340403366088868, 0.06329958343505859, 0.0628936653137207, 0.06392464065551758, 0.06333030319213867, 0.0631621437072754, 0.06397974395751953, 0.0635590705871582, 0.06332851028442382, 0.06389139175415039, 0.06386633682250976, 0.0639119987487793, 0.06394355010986329, 0.06377459335327149, 0.06328537750244141, 0.06301283264160157, 0.06409219360351562, 0.06359775924682617, 0.06438790130615234, 0.06381568145751954, 0.0634511375427246, 0.06315766525268554, 0.06394326400756836, 0.06369894409179687, 0.06428444671630859, 0.06386825561523438, 0.06373260879516601, 0.06383580780029297, 0.06431308746337891, 0.06373430252075195, 0.06334265518188477, 0.06407782745361328, 0.0639815673828125, 0.06369865417480469, 0.06437328338623047, 0.0643663330078125, 0.06412249755859376, 0.06380992126464843, 0.06351033782958984, 0.06418450927734375, 0.06368438339233398, 0.06358780670166016, 0.07429385375976562, 0.06462054443359375, 0.06384022521972656, 0.06342838287353515, 0.06279193496704101, 0.06415468597412109, 0.06344595336914062, 0.06334156799316407, 0.06272905731201171, 0.06370470428466797, 0.06331856155395507, 0.06311731338500977, 0.0627341423034668, 0.06273763275146485, 0.06359139251708984, 0.06443993377685547, 0.06490528106689453, 0.06442208099365235, 0.06388953781127929, 0.06334627151489258, 0.0631624641418457, 0.06402285003662109, 0.06332956695556641, 0.0637993278503418, 0.06358291244506836, 0.0633133773803711, 0.06330217742919922, 0.0638579216003418, 0.06359116744995118, 0.06416588592529297, 0.06381510543823242, 0.06354585647583008, 0.06401958465576171, 0.06356268692016602, 0.06339142227172852, 0.06431161499023437, 0.06392012786865234, 0.0636701774597168, 0.063587646484375, 0.06413507080078125, 0.06363958358764649, 0.06346428680419922, 0.06417203521728515, 0.06385868835449218, 0.06366412734985352, 0.06375804901123047, 0.06428294372558593, 0.06409552001953125, 0.06460867309570313, 0.0641416015625, 0.06375116729736328, 0.06361804962158203, 0.06415071868896484, 0.06364652633666992, 0.06332387161254883, 0.06406172943115235, 0.06379520034790039, 0.06346108627319336, 0.06395318222045898, 0.06386198425292969, 0.06359939193725586, 0.06357932662963867, 0.06406358337402343, 0.07445728302001953, 0.06475315093994141, 0.06353561782836914, 0.06343024063110352, 0.06279529571533203, 0.06280928039550782, 0.06386246490478516, 0.06349619293212891, 0.06315145492553711, 0.06297868728637696, 0.06260678482055664, 0.06386134338378906, 0.06337068939208984, 0.06317091369628906, 0.06400994873046875, 0.06505027008056641, 0.06404787445068359, 0.06393836975097657, 0.06415302276611329, 0.06372438430786133, 0.06333440017700195, 0.06293724822998047, 0.06370902252197265, 0.0632176628112793, 0.06301907348632813, 0.0629554557800293, 0.06402566528320312, 0.06337631988525391, 0.06329344177246093, 0.06374115371704102, 0.06409072113037109, 0.06431254577636719, 0.06407881927490235, 0.06416793823242188, 0.06366207885742188, 0.06313478469848632, 0.06464752197265625, 0.06399814224243164, 0.06348144149780273, 0.06342851257324218, 0.06331836700439453, 0.06376710510253907, 0.06335283279418945, 0.06342860794067383, 0.06381523132324218, 0.0637014389038086, 0.06395302581787109, 0.06361619186401367, 0.06457414245605468, 0.06394684982299804, 0.06339369583129882, 0.06410610961914062, 0.06351248168945313, 0.0634150733947754, 0.06509331512451172, 0.0639870719909668, 0.06342105484008789, 0.06325411224365235, 0.06406934356689453, 0.06366073608398437, 0.06332527923583985, 0.0642159652709961, 0.0636948471069336, 0.07531222534179688, 0.06462079620361329, 0.06379996871948242, 0.06337696075439453, 0.06303801727294922, 0.0636102409362793, 0.06324070358276367, 0.06300991821289062, 0.06269583892822266, 0.06371737670898438, 0.06325500869750976, 0.06313926315307618, 0.06258652877807618, 0.06288272094726563, 0.06478438568115234, 0.06436438751220704, 0.06378307342529296, 0.06399107360839844, 0.06361072158813477, 0.06359132766723632, 0.06384409713745118, 0.06353737640380859, 0.06320115280151367, 0.06280121612548828, 0.06375711822509765, 0.06329139328002929, 0.06425775909423828, 0.06379312133789063, 0.06327436828613281, 0.06305385589599609, 0.0638554573059082, 0.06383967971801757, 0.06440409851074219, 0.06389990234375, 0.06346867370605469, 0.06398425674438477, 0.06354102325439454, 0.0631764793395996, 0.06457929229736328, 0.06410518646240235, 0.0635945930480957, 0.06369612884521485, 0.06408448028564454, 0.06363561630249023, 0.06340563201904297, 0.06410886383056641, 0.06390182495117187, 0.06461974334716797, 0.06414415740966797, 0.06425395202636719, 0.06407373046875, 0.06354534530639648, 0.06346342468261719, 0.06413311767578125, 0.06381158447265625, 0.06408956909179687, 0.0638100814819336, 0.06348185729980468, 0.06382976150512695, 0.0641170883178711, 0.06343670272827148, 0.0639447021484375, 0.06460633850097657, 0.07571673583984374, 0.06435225677490235, 0.06355267333984375, 0.06304240036010743, 0.06290633773803711, 0.06353475189208985, 0.06309747314453125, 0.06297369766235351, 0.06274867248535156, 0.06370950317382812, 0.06312419128417969, 0.06291292953491211, 0.0626324462890625, 0.06366758346557617, 0.06419321441650391, 0.06503833770751953, 0.06456114959716797, 0.06379296112060547, 0.06356150436401367, 0.06330995178222656, 0.06420451354980469, 0.0638039665222168, 0.06333030319213867, 0.06318080139160157, 0.0638230094909668, 0.06338032150268555, 0.06447305297851562, 0.06386415863037109, 0.06330780792236328, 0.06332828903198243, 0.06437542724609376, 0.0640552978515625, 0.06458163452148437, 0.064052734375, 0.06363977432250977, 0.06350467300415039, 0.06399385452270508, 0.06348121643066407, 0.06449625396728516, 0.06398361587524413, 0.06338355255126953, 0.06317670440673828, 0.06400972747802734, 0.06347574234008789, 0.06323247909545898, 0.0639283218383789, 0.06360883331298828, 0.06386191940307617, 0.06383190536499024, 0.06400415802001953, 0.06360547256469727, 0.06441558074951172, 0.06400991821289062, 0.0636844482421875, 0.0638902702331543, 0.06449533081054687, 0.06392214584350586, 0.06343702316284179, 0.06347558212280273, 0.0640857925415039, 0.06349427032470703, 0.06471900939941407, 0.06391334533691406, 0.07597491455078124, 0.0642171859741211, 0.06341791915893555, 0.06361747360229492, 0.06329296112060546, 0.06308911895751954, 0.06257247924804688, 0.06369635009765626, 0.06357852935791015, 0.06301417541503906, 0.06384118270874023, 0.06354742431640625, 0.06318025588989258, 0.06273247909545898, 0.06388518524169921, 0.06417996978759766, 0.06533542633056641, 0.06374662399291992, 0.0634511375427246, 0.06325417709350586, 0.0636399040222168, 0.06313129425048829, 0.06416121673583984, 0.06388214492797852, 0.06344294357299805, 0.06315795135498047, 0.06286515045166016, 0.06378515243530274, 0.06464374542236329, 0.06358537673950196, 0.06363590240478516, 0.06431145477294922, 0.06379868698120117, 0.06421977233886719, 0.064036865234375, 0.06374604797363281, 0.06342361450195312, 0.06404390716552734, 0.06351052856445312, 0.06409830474853516, 0.06376652908325195, 0.06351052856445312, 0.0638416976928711, 0.06405795288085937, 0.06359827041625976, 0.06482793426513672, 0.06409756469726563, 0.06391609573364258, 0.06394265747070313, 0.06387302398681641, 0.06441004943847656, 0.0641370849609375, 0.06354940795898438, 0.06423772430419922, 0.0638914566040039, 0.06347980880737304, 0.0645406723022461, 0.0639733772277832, 0.0636129264831543, 0.06380710220336915, 0.06404061126708985, 0.06463970947265625, 0.06415360260009766, 0.07515340423583984, 0.06424278259277344, 0.06361385726928712, 0.06319862365722656, 0.0627677116394043, 0.06386073684692382, 0.06337075042724609, 0.06318540954589844, 0.06317232131958007, 0.06365008163452149, 0.06380748748779297, 0.06350848007202148, 0.0631541748046875, 0.06285619354248047, 0.06392863845825195, 0.06398396682739257, 0.06446115112304687, 0.06371532821655274, 0.06376144027709961, 0.06311040115356445, 0.06364499282836913, 0.06317712020874024, 0.06383606338500977, 0.06353724670410156, 0.06316851043701172, 0.06351804733276367, 0.0635338897705078, 0.06326051330566407, 0.06312691116333008, 0.06395967864990235, 0.06397132873535157, 0.06395296096801757, 0.0638463363647461, 0.0638683853149414, 0.06458188629150391, 0.06406294250488281, 0.0636956787109375, 0.06351811218261719, 0.06378147125244141, 0.06360678482055664, 0.06482125091552735, 0.06421094512939453, 0.06389980697631836, 0.06352646255493163, 0.06325385665893554, 0.06406444549560547, 0.06366617584228515, 0.06444457244873047, 0.06416681671142578, 0.06398867034912109, 0.06407513427734375, 0.06455270385742187, 0.06422335815429688, 0.06396355056762695, 0.06360303878784179, 0.0640401611328125, 0.06465740966796875, 0.06404521942138672, 0.0636646728515625, 0.06338713455200196, 0.0640722885131836, 0.06435635375976563, 0.0642718734741211]",tokens/s,15.659582820633176,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,819.07712,5878.185984,0.0,5475.663872,5452.374016,s,1,16.271443359375,16.271443359375,0.0,16.271443359375,16.271443359375,16.271443359375,16.271443359375,[16.271443359375],,kWh,0.00025941024684165086,2.8607634425344527e-05,8.854312638999218e-05,0.00037656100765698754,,MB,1296.236544,6475.874304,0.0,6060.76928,5886.629888,s,10,13.201010986328125,1.3201010986328126,0.006943049104048965,1.3214691162109373,1.3266718994140625,1.3267033203125,1.32672845703125,"[1.3020126953125, 1.31772705078125, 1.3174197998046875, 1.3197159423828124, 1.318348388671875, 1.3232222900390624, 1.3236273193359376, 1.325537841796875, 1.3267347412109376, 1.3266649169921876]",tokens/s,193.92454128333898,kWh,3.846181334041602e-05,4.241852384193401e-06,2.5599687146400652e-05,6.830335287101008e-05,tokens/kWh,3747985.8490029383,MB,1339.43296,6492.65152,0.0,6077.546496,5886.632448,s,10,60.840519531249996,6.084051953125,0.01706101769265069,6.088565185546875,6.0997814453125,6.101058935546875,6.102080927734375,"[6.04843896484375, 6.0589814453125, 6.07556640625, 6.08544091796875, 6.086990234375, 6.09014013671875, 6.09451953125, 6.09949755859375, 6.09860791015625, 6.10233642578125]",tokens/s,10.354941161809247,kWh,0.00017856065867499982,1.9694814122652717e-05,0.00011851292814359927,0.0003167684009412517,tokens/kWh,198883.47389701937,,s,630,60.83672194671631,0.09656622531224811,0.0017570505206387337,0.0962927360534668,0.09756104583740234,0.09823385200500488,0.10749517120361328,"[0.10668163299560547, 0.09552252960205078, 0.09457174682617188, 0.09429174041748047, 0.09514329528808593, 0.0944249267578125, 0.09454364776611328, 0.09475737762451172, 0.09399539184570313, 0.09720543670654297, 0.09820800018310546, 0.09651679992675781, 0.09550348663330079, 0.09462828826904297, 0.09478383636474609, 0.09519522857666016, 0.09450003051757813, 0.09530860900878907, 0.09506950378417969, 0.09608995056152343, 0.09711497497558594, 0.09720374298095703, 0.09584793853759765, 0.09516336059570313, 0.09528514862060547, 0.09576457977294922, 0.0953016357421875, 0.0955775375366211, 0.09529606628417969, 0.09608806610107422, 0.09657740783691406, 0.09744345855712891, 0.09605101013183594, 0.09528800201416016, 0.09602153778076172, 0.09529420471191406, 0.09643030548095703, 0.0955206069946289, 0.0955758056640625, 0.09593059539794922, 0.0966063003540039, 0.09664125061035156, 0.09637481689453126, 0.09605104064941407, 0.09617817687988281, 0.09598566436767578, 0.09608601379394531, 0.09620585632324219, 0.09496288299560547, 0.09598652648925782, 0.09624262237548828, 0.09679801940917969, 0.09652870178222656, 0.09602063751220703, 0.09589494323730469, 0.09631827545166016, 0.09599795532226563, 0.09607782745361328, 0.09592569732666016, 0.09629548645019531, 0.0963583984375, 0.09626844787597656, 0.096553955078125, 0.10747270202636719, 0.09568109130859374, 0.09474009704589843, 0.09492451477050781, 0.09506269073486329, 0.09474662780761718, 0.09542768096923829, 0.09475552368164063, 0.09463011169433594, 0.09797945404052734, 0.09867340850830078, 0.09663507080078125, 0.09578086090087891, 0.09530924987792969, 0.09515062713623047, 0.09476073455810546, 0.09541155242919921, 0.09465113830566406, 0.09563878631591796, 0.09734441375732422, 0.09769353485107422, 0.09647939300537109, 0.09572345733642579, 0.09568876647949219, 0.09612041473388672, 0.09481871795654297, 0.09528524780273437, 0.09514393615722656, 0.09573375701904296, 0.09663488006591797, 0.0965733413696289, 0.09703759765625, 0.09632233428955078, 0.09651929473876954, 0.09552579498291015, 0.09482204437255859, 0.09567459106445313, 0.09498365020751953, 0.09550717163085938, 0.09644422149658204, 0.09668153381347656, 0.09690374755859375, 0.096427490234375, 0.09657730865478516, 0.09590860748291016, 0.09534668731689454, 0.09595085144042968, 0.09562931060791016, 0.09596054077148437, 0.09635820770263671, 0.09659670257568359, 0.0972779541015625, 0.09641500854492188, 0.09668271636962891, 0.09645040130615234, 0.09627664184570313, 0.09588121795654297, 0.09590988922119141, 0.09605683135986329, 0.09623302459716797, 0.09639212799072265, 0.09669395446777344, 0.0964672622680664, 0.10715315246582031, 0.09544048309326172, 0.09465309143066407, 0.09459414672851563, 0.09545203399658203, 0.09547574615478516, 0.09534611511230469, 0.09474515533447266, 0.09542655944824219, 0.0965551986694336, 0.09854070281982422, 0.09662902069091797, 0.09567884826660156, 0.09635145568847656, 0.09609820556640625, 0.09567731475830078, 0.09566316986083985, 0.09519590759277344, 0.09582099151611329, 0.09637353515625, 0.09694230651855469, 0.09715251159667969, 0.09638118743896484, 0.09674163055419922, 0.09572147369384766, 0.09572342681884766, 0.09586083221435547, 0.09574352264404297, 0.09567485046386719, 0.09649945831298828, 0.09684198760986328, 0.09697280120849609, 0.09680825805664063, 0.09647789001464843, 0.09600592041015625, 0.09627446746826172, 0.09591558074951172, 0.09605564880371094, 0.09601420593261718, 0.09695887756347656, 0.09676153564453124, 0.09731641387939453, 0.09687020874023437, 0.0963675537109375, 0.0963051528930664, 0.09617817687988281, 0.09614540863037109, 0.0962682876586914, 0.09604710388183593, 0.09662054443359375, 0.09690480041503906, 0.09758761596679688, 0.09637824249267578, 0.09641433715820312, 0.09638111877441406, 0.09640306854248047, 0.09644461059570313, 0.0965153579711914, 0.09629122924804688, 0.09669843292236328, 0.09694841766357422, 0.09751763153076172, 0.09618431854248047, 0.1096212158203125, 0.09564921569824218, 0.09486579132080078, 0.0946868133544922, 0.09488003540039062, 0.09599827575683594, 0.09501596832275391, 0.09558729553222656, 0.09477021026611328, 0.09902098846435547, 0.0989948501586914, 0.09754793548583984, 0.09589987182617188, 0.09505766296386718, 0.09471027374267578, 0.09589081573486329, 0.09579583740234375, 0.09511929321289063, 0.09585056304931641, 0.09755955505371093, 0.09776822662353515, 0.09725122833251953, 0.0965389404296875, 0.09580307006835938, 0.0956638412475586, 0.09588387298583985, 0.09525862121582031, 0.09573923492431641, 0.09612150573730469, 0.09701376342773438, 0.09827942657470704, 0.09690019226074219, 0.09663513946533203, 0.09587506866455078, 0.0959760971069336, 0.09619865417480469, 0.09580681610107422, 0.09590959930419922, 0.09598655700683593, 0.09725958251953125, 0.09848012542724609, 0.09678438568115234, 0.0967720947265625, 0.09639500427246094, 0.09604940795898438, 0.09670982360839844, 0.09591391754150391, 0.09606639862060547, 0.09605894470214844, 0.09684015655517578, 0.09755580902099609, 0.09729232025146485, 0.09676035308837891, 0.09628272247314452, 0.09627423858642578, 0.09701190185546875, 0.09621708679199219, 0.09593366241455079, 0.09623426818847657, 0.09713459014892578, 0.09766297912597656, 0.09753804779052734, 0.0966778564453125, 0.10750434875488281, 0.09550637054443359, 0.09477740478515626, 0.09476505279541016, 0.09466166687011719, 0.09623446655273438, 0.09519821166992187, 0.09481728363037109, 0.094887939453125, 0.09968563079833985, 0.0990113296508789, 0.09731423950195313, 0.09565446472167968, 0.09496982574462891, 0.09565827178955078, 0.09569664001464843, 0.09491558074951172, 0.09506835174560548, 0.09616860961914063, 0.09824476623535157, 0.09827516937255859, 0.0971060791015625, 0.09667993927001953, 0.09593212890625, 0.09621737670898438, 0.09533235168457031, 0.0956211166381836, 0.0958545913696289, 0.0962682876586914, 0.09705471801757813, 0.09729638671875, 0.09777356719970703, 0.09686534118652344, 0.09659593963623046, 0.0960317153930664, 0.09535078430175781, 0.09602047729492187, 0.09593856048583985, 0.096753662109375, 0.0967740478515625, 0.09692537689208984, 0.09743526458740234, 0.09724803161621094, 0.09729631805419922, 0.09657965087890626, 0.09596518707275391, 0.09578243255615235, 0.09608393859863282, 0.0966087646484375, 0.0966003189086914, 0.09717632293701171, 0.09733837127685546, 0.09740838623046875, 0.09742604827880859, 0.09705677032470703, 0.09608128356933594, 0.09591462707519531, 0.09600176239013672, 0.09677238464355468, 0.09701785278320313, 0.09703584289550782, 0.09715347290039063, 0.09720352172851562, 0.10818694305419922, 0.0956443862915039, 0.09466387176513671, 0.09471427154541015, 0.09570972442626953, 0.09489939117431641, 0.09469321441650391, 0.09557500457763672, 0.09502413177490235, 0.0995440673828125, 0.09896959686279297, 0.09669017791748047, 0.09601776123046875, 0.09567884826660156, 0.09570047760009766, 0.09571520233154297, 0.0949400634765625, 0.09575167846679687, 0.0956974105834961, 0.09739081573486329, 0.09790850830078125, 0.09678643035888672, 0.09667536163330079, 0.0964853744506836, 0.09607421112060546, 0.09575775909423828, 0.09561901092529297, 0.0959494400024414, 0.0971918716430664, 0.09634822082519531, 0.09705792236328124, 0.09709990692138672, 0.09699404907226562, 0.09676953887939453, 0.09627085113525391, 0.09607782745361328, 0.0961753921508789, 0.09625059509277344, 0.096455810546875, 0.09720476531982422, 0.09685145568847656, 0.09713094329833985, 0.09700729370117188, 0.09796015930175782, 0.09587967681884765, 0.09625804901123047, 0.09601017761230468, 0.09679058837890625, 0.09682329559326172, 0.09703199768066406, 0.09713683319091797, 0.09697264099121093, 0.09704806518554687, 0.09746908569335938, 0.0962191390991211, 0.09631539154052735, 0.0961343994140625, 0.09639369964599609, 0.09725981140136719, 0.09706233978271485, 0.09700614166259766, 0.09690831756591797, 0.09773769378662109, 0.10766470336914062, 0.09549689483642577, 0.09474765014648437, 0.09572045135498047, 0.09509478759765624, 0.09552243041992188, 0.09482073974609374, 0.09502105712890625, 0.09568777465820312, 0.09902969360351563, 0.10009193420410156, 0.096538818359375, 0.09594265747070313, 0.09566819000244141, 0.09620893096923828, 0.09530982208251954, 0.09538764953613281, 0.09616928100585938, 0.09579148864746094, 0.09777184295654297, 0.09843666839599609, 0.09749657440185547, 0.09626083374023438, 0.09628441619873047, 0.09567033386230468, 0.09571164703369141, 0.0956803207397461, 0.09585081481933594, 0.09715084838867187, 0.09678848266601563, 0.09730153656005859, 0.09815753936767578, 0.09674547576904297, 0.09598473358154297, 0.09622370910644532, 0.09596153259277344, 0.09614669036865234, 0.09624447631835938, 0.0975335693359375, 0.09694451141357421, 0.09707513427734375, 0.09719097900390625, 0.09689190673828126, 0.0965038070678711, 0.09629424285888671, 0.09613712310791016, 0.09634073638916016, 0.09673318481445313, 0.09759539031982421, 0.09700899505615235, 0.09701801300048828, 0.09723280334472656, 0.09680691528320312, 0.09689254760742187, 0.09674851226806641, 0.09667990112304688, 0.09576850891113281, 0.0969617919921875, 0.09709040069580079, 0.09787152099609375, 0.09705104064941407, 0.09707462310791015, 0.0969139175415039, 0.11003695678710937, 0.09554883575439453, 0.09548041534423828, 0.0965031967163086, 0.095439453125, 0.09505897521972656, 0.09564006042480469, 0.09569532775878906, 0.09536307525634766, 0.09813811492919922, 0.09937920379638672, 0.09745203399658203, 0.09604300689697266, 0.09579519653320312, 0.09568418884277344, 0.09561305236816406, 0.09573814392089844, 0.09544499206542968, 0.09570713806152344, 0.0973733139038086, 0.09803030395507813, 0.09730400085449219, 0.09698786926269531, 0.09621683502197266, 0.09566153717041016, 0.09603971099853516, 0.09602867126464844, 0.09575526428222657, 0.09624269104003906, 0.09727180480957032, 0.0978862075805664, 0.09765478515625, 0.09716121673583984, 0.09619987487792969, 0.09589433288574219, 0.09589520263671875, 0.09626659393310547, 0.09603890991210938, 0.09689702606201171, 0.09718284606933594, 0.09730751800537109, 0.09738140869140625, 0.09793020629882812, 0.09681017303466796, 0.09591276550292968, 0.09592217254638671, 0.09602655792236328, 0.09679058837890625, 0.0965895004272461, 0.09719750213623046, 0.09740377807617187, 0.0976278076171875, 0.09733932495117187, 0.09799311828613282, 0.09599504089355469, 0.09616265869140625, 0.09633586883544921, 0.09677206420898438, 0.09751964569091796, 0.09675727844238281, 0.097275390625, 0.09700764465332032, 0.09731986999511719, 0.10972889709472657, 0.09557500457763672, 0.09535692596435547, 0.09539711761474609, 0.09550681304931641, 0.09574233245849609, 0.09542041778564453, 0.09550643157958984, 0.09509683227539062, 0.09876070404052735, 0.09953846740722656, 0.09723887634277344, 0.09631807708740234, 0.09550822448730469, 0.0951719970703125, 0.09563632202148438, 0.09603209686279297, 0.09554601287841796, 0.0961143341064453, 0.09770175933837891, 0.09773308563232422, 0.09727180480957032, 0.09670406341552734, 0.09612438201904297, 0.0959026870727539, 0.0957763214111328, 0.09620630645751953, 0.09577776336669921, 0.09640345764160156, 0.09742745971679688, 0.09748070526123047, 0.09764249420166016, 0.0969295654296875, 0.09649545288085938, 0.09619891357421875, 0.0961371841430664, 0.09666575622558594, 0.09573894500732422, 0.09617504119873047, 0.096753662109375, 0.09748480224609375, 0.09759334564208984, 0.0975257568359375, 0.09678438568115234, 0.09588896179199219, 0.09658003234863281, 0.09640771484375, 0.09616162872314453, 0.09617948913574219, 0.09694636535644531, 0.09713871765136718, 0.09747286224365234, 0.09830006408691407, 0.0967741470336914, 0.0963463363647461, 0.09641302490234376, 0.09721286773681641, 0.09618153381347656, 0.09641648101806641, 0.09701744079589844, 0.097574462890625, 0.09750614166259766, 0.09787801361083984, 0.10787430572509765, 0.09566352081298828, 0.09529199981689453, 0.09552281951904297, 0.09556172943115235, 0.0956948471069336, 0.09552076721191406, 0.0955980453491211, 0.09539174652099609, 0.09975049591064453, 0.09962694549560547, 0.09641065979003906, 0.09599625396728516, 0.09576306915283203, 0.09619455718994141, 0.09618841552734375, 0.09561497497558594, 0.09597065734863282, 0.09589155578613281, 0.09818323516845703, 0.09882470703125, 0.09680828857421875, 0.09643280029296875, 0.0959078369140625, 0.09631948852539063, 0.09685552215576172, 0.09572573089599609, 0.0952958755493164, 0.09678623962402344, 0.09745836639404297, 0.09821103668212891, 0.09696521759033203, 0.09661663818359376, 0.09622525024414062, 0.09689295959472656, 0.0969746856689453, 0.09574364471435547, 0.09580390167236329, 0.09656114959716797, 0.09748636627197266, 0.09822051239013672, 0.09744134521484375, 0.09667424011230469, 0.09629695892333984, 0.09688473510742188, 0.09721212768554688, 0.0961149444580078, 0.0959768295288086, 0.09642050933837891, 0.09701152038574219, 0.09752185821533203, 0.09735753631591797, 0.09698332977294923, 0.09680403137207032, 0.0970289306640625, 0.09698918151855469, 0.09647113800048829, 0.09652655792236328, 0.09690070343017577, 0.09730262756347656, 0.09723664093017578, 0.09757936096191407, 0.09741059112548828]",tokens/s,10.355587543848664,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,881.729536,657.391616,0.0,262.144,258.163712,s,1,8.0430068359375,8.0430068359375,0.0,8.0430068359375,8.0430068359375,8.0430068359375,8.0430068359375,[8.0430068359375],,kWh,2.6612145262496746e-05,2.9283357066335143e-06,9.433063102012862e-06,3.897354407114312e-05,,MB,1172.221952,755.95776,0.0,348.127232,317.820928,s,10,0.20838342666625978,0.02083834266662598,0.0002567364207006522,0.020853376388549803,0.02121261806488037,0.021219717121124266,0.021225396366119385,"[0.021226816177368164, 0.020714208602905272, 0.02121104049682617, 0.020811904907226564, 0.02096156883239746, 0.02099456024169922, 0.020591007232666016, 0.020537759780883787, 0.020894847869873046, 0.020439712524414063]",tokens/s,12285.046085262884,kWh,6.035742818009889e-07,6.65637668998693e-08,4.0238668554542483e-07,1.072524734246283e-06,tokens/kWh,238689133.9899066,MB,1183.608832,781.123584,0.0,373.293056,317.823488,s,10,10.020481994628907,1.0020481994628907,0.010652903509280085,1.0036236572265627,1.015684619140625,1.0165244689941406,1.017196348876953,"[1.004661865234375, 0.9886724853515625, 1.0154979858398439, 0.9943750610351563, 1.00258544921875, 1.0109461669921875, 0.9888856201171875, 1.0173643188476562, 1.008752197265625, 0.9887408447265625]",tokens/s,62.871227186245854,kWh,2.9208369141114508e-05,3.221175133107925e-06,1.2214234518856399e-05,4.464377879307884e-05,tokens/kWh,1411170.8664269017,,s,630,10.01510766124725,0.015896996287694053,0.0004690713008814915,0.01582924795150757,0.016316352462768552,0.01651300172805786,0.017206590061187743,"[0.016197792053222658, 0.016232288360595704, 0.016127071380615234, 0.01612601661682129, 0.016044895172119142, 0.01597216033935547, 0.01581308841705322, 0.015744256019592284, 0.01567296028137207, 0.015479647636413574, 0.01549721622467041, 0.015454208374023438, 0.015538080215454102, 0.015534175872802734, 0.015527935981750488, 0.01571401596069336, 0.015755776405334473, 0.015605536460876465, 0.015478591918945312, 0.015444160461425781, 0.015376640319824218, 0.015672896385192873, 0.015435551643371582, 0.01535427188873291, 0.01536240005493164, 0.015480223655700684, 0.015542528152465821, 0.01576959991455078, 0.01568073558807373, 0.015721216201782226, 0.016295967102050782, 0.017291231155395506, 0.01617513656616211, 0.016330751419067382, 0.01627510452270508, 0.016594944000244142, 0.022169952392578126, 0.018266111373901366, 0.01623798370361328, 0.01572719955444336, 0.015545984268188477, 0.01549459171295166, 0.01565321636199951, 0.015553119659423829, 0.015564703941345214, 0.01600726318359375, 0.015694080352783205, 0.01569375991821289, 0.016228160858154296, 0.015629983901977538, 0.01576316833496094, 0.01586678409576416, 0.015830592155456544, 0.01559158420562744, 0.015557727813720702, 0.016085792541503906, 0.015579263687133789, 0.016134016036987303, 0.015759231567382813, 0.01577014446258545, 0.015695743560791016, 0.0157609920501709, 0.01595971202850342, 0.015769439697265623, 0.015710176467895506, 0.015570816040039062, 0.015528191566467285, 0.01551103973388672, 0.015547072410583496, 0.015563808441162109, 0.015512255668640137, 0.01562435245513916, 0.01576857566833496, 0.015600000381469727, 0.015484576225280761, 0.015526880264282226, 0.015510592460632324, 0.015522751808166504, 0.015433728218078613, 0.015482560157775879, 0.015483200073242187, 0.015344799995422364, 0.015393631935119629, 0.015468607902526856, 0.015439807891845703, 0.015330400466918945, 0.015469632148742675, 0.015626079559326173, 0.015638527870178224, 0.015577088356018067, 0.01562758445739746, 0.015673855781555175, 0.015691967964172365, 0.015775103569030762, 0.01676483154296875, 0.016123647689819335, 0.01583612823486328, 0.01562764835357666, 0.01555519962310791, 0.015474399566650391, 0.015567328453063965, 0.015574848175048828, 0.01558732795715332, 0.015505056381225586, 0.015405407905578613, 0.015460224151611328, 0.015353919982910157, 0.015641920089721678, 0.015636672019958497, 0.015450688362121582, 0.015433728218078613, 0.015586943626403808, 0.015716799736022947, 0.015835071563720705, 0.015835295677185057, 0.015684639930725098, 0.015696479797363282, 0.01601353645324707, 0.016946624755859375, 0.015904576301574706, 0.016505151748657226, 0.01609769630432129, 0.016074560165405274, 0.016111839294433595, 0.015984064102172853, 0.015935744285583496, 0.015714240074157716, 0.015853311538696287, 0.015574848175048828, 0.015575488090515137, 0.0155217924118042, 0.015561727523803711, 0.015551360130310058, 0.015628416061401366, 0.0157327356338501, 0.015976384162902833, 0.01609347152709961, 0.016144256591796875, 0.016123807907104493, 0.016115007400512697, 0.016074623107910156, 0.01613417625427246, 0.016089887619018556, 0.016134143829345703, 0.016033504486083986, 0.016050464630126954, 0.016070432662963867, 0.016109216690063478, 0.01610767936706543, 0.016101919174194335, 0.01614569664001465, 0.016182912826538085, 0.01623958396911621, 0.016300031661987305, 0.016519424438476562, 0.016210720062255858, 0.016206815719604493, 0.016205087661743164, 0.016182016372680665, 0.016127840042114257, 0.01615679931640625, 0.01603296089172363, 0.016057151794433594, 0.016107072830200197, 0.01621881675720215, 0.01619049644470215, 0.01608518409729004, 0.016116031646728514, 0.016091360092163085, 0.016457183837890625, 0.01609782409667969, 0.01675811195373535, 0.016206272125244142, 0.016279199600219726, 0.01624140739440918, 0.016445247650146485, 0.01621401596069336, 0.016193248748779296, 0.016328704833984374, 0.01625753593444824, 0.01621388816833496, 0.01656150436401367, 0.016468544006347657, 0.01620377540588379, 0.016130048751831053, 0.016087039947509766, 0.016111328125, 0.016127904891967772, 0.016111583709716798, 0.01550284767150879, 0.015867775917053222, 0.0157128963470459, 0.015638527870178224, 0.015560447692871094, 0.015511103630065918, 0.015439616203308106, 0.015505951881408691, 0.015372703552246094, 0.015350943565368652, 0.0153504638671875, 0.015372447967529297, 0.01553983974456787, 0.01574934387207031, 0.016052383422851563, 0.015711551666259767, 0.015710720062255858, 0.015636063575744628, 0.01569993591308594, 0.015796863555908203, 0.015882240295410157, 0.015943391799926758, 0.015868191719055177, 0.015879199981689452, 0.015754207611083985, 0.0172126407623291, 0.015982815742492676, 0.0162412166595459, 0.015811936378479004, 0.015744895935058595, 0.01585760021209717, 0.015811327934265137, 0.015599712371826172, 0.015437824249267578, 0.015368351936340332, 0.015379584312438965, 0.015366656303405762, 0.015548480033874513, 0.015524255752563476, 0.015432671546936035, 0.015462592124938964, 0.015673664093017577, 0.015900959968566896, 0.01597856044769287, 0.015868895530700682, 0.015535231590270995, 0.015603551864624024, 0.015540224075317383, 0.01562009620666504, 0.015560704231262207, 0.015793312072753907, 0.015712672233581543, 0.015601183891296387, 0.015676320075988768, 0.015931679725646974, 0.016498176574707032, 0.016636255264282227, 0.01637593650817871, 0.01633459281921387, 0.01627462387084961, 0.016259904861450195, 0.016158559799194335, 0.016071136474609376, 0.016126399993896486, 0.016113664627075194, 0.015970303535461427, 0.01599219226837158, 0.015743616104125977, 0.015550463676452637, 0.01551961612701416, 0.015409279823303222, 0.01545792007446289, 0.01566553592681885, 0.0163450870513916, 0.01633839988708496, 0.015810463905334474, 0.015671327590942384, 0.01563055992126465, 0.015986111640930174, 0.015471551895141602, 0.015453215599060058, 0.015413887977600097, 0.01546070384979248, 0.015495295524597169, 0.015781760215759277, 0.015892479896545412, 0.015769887924194335, 0.01573359966278076, 0.015844511985778808, 0.015875359535217286, 0.015847871780395508, 0.015773695945739748, 0.01571664047241211, 0.01557494354248047, 0.015646528244018555, 0.015523200035095214, 0.015546272277832032, 0.015528672218322754, 0.015564800262451172, 0.01557094383239746, 0.01560534381866455, 0.015481247901916503, 0.015482879638671876, 0.015648096084594727, 0.015672096252441405, 0.01572646427154541, 0.01572441577911377, 0.01576972770690918, 0.015779840469360353, 0.015834272384643553, 0.017191776275634764, 0.016029472351074218, 0.016107744216918945, 0.0165295352935791, 0.01626128005981445, 0.01657756805419922, 0.016689855575561522, 0.01661929512023926, 0.016582080841064453, 0.016177951812744142, 0.01617919921875, 0.016156095504760742, 0.017500415802001953, 0.01629420852661133, 0.016332319259643555, 0.016230016708374023, 0.015903039932250975, 0.01613884735107422, 0.016056352615356446, 0.016139392852783204, 0.0161102409362793, 0.015992863655090332, 0.016054464340209962, 0.016067840576171874, 0.016086816787719727, 0.016316640853881837, 0.0161779842376709, 0.016178495407104494, 0.01605081558227539, 0.01608687973022461, 0.01612406349182129, 0.016052223205566405, 0.016203647613525392, 0.016156543731689454, 0.016121280670166015, 0.01624982452392578, 0.016215904235839844, 0.016312320709228514, 0.016210208892822264, 0.016256736755371093, 0.016463872909545898, 0.01680335998535156, 0.016239072799682615, 0.01611782455444336, 0.016043968200683593, 0.016054271697998047, 0.016076608657836913, 0.015976832389831542, 0.016024927139282226, 0.016058847427368163, 0.016052127838134766, 0.016048288345336913, 0.01607468795776367, 0.016046079635620117, 0.01613209533691406, 0.0160501766204834, 0.01616089630126953, 0.016268960952758788, 0.016022783279418945, 0.015864864349365234, 0.015789183616638183, 0.01561849594116211, 0.015644607543945314, 0.015495488166809082, 0.015509056091308595, 0.015398528099060059, 0.015693056106567384, 0.01567695999145508, 0.015669407844543456, 0.01648134422302246, 0.015991456031799318, 0.015999263763427734, 0.016107519149780272, 0.01613209533691406, 0.015945119857788084, 0.01611612892150879, 0.015853759765625, 0.015699968338012696, 0.015751168251037596, 0.015431584358215332, 0.01575945568084717, 0.016082944869995116, 0.015696127891540526, 0.01554201602935791, 0.016002559661865236, 0.015493632316589356, 0.015361632347106934, 0.015494720458984375, 0.015571807861328125, 0.015742783546447755, 0.01564896011352539, 0.015567071914672851, 0.015460127830505372, 0.01553206443786621, 0.015456319808959961, 0.015487968444824219, 0.015547327995300293, 0.01575539207458496, 0.015883423805236815, 0.015913120269775392, 0.01594425582885742, 0.01637785530090332, 0.015923456192016603, 0.01592416000366211, 0.015911168098449707, 0.01587990379333496, 0.01594480037689209, 0.015686495780944824, 0.015915936470031738, 0.015550463676452637, 0.015480992317199706, 0.015387807846069336, 0.01547539234161377, 0.015519840240478516, 0.01570191955566406, 0.015736096382141112, 0.015788384437561035, 0.015598176002502441, 0.015525535583496093, 0.01562764835357666, 0.015433600425720215, 0.015443039894104003, 0.015398880004882813, 0.015331135749816895, 0.015500703811645507, 0.01568230438232422, 0.0156212797164917, 0.015572863578796387, 0.015626239776611327, 0.015584223747253418, 0.01561193561553955, 0.01572364807128906, 0.016351423263549804, 0.015996895790100097, 0.01596649646759033, 0.01607526397705078, 0.0159366397857666, 0.015766143798828125, 0.01567948818206787, 0.01565286445617676, 0.015601471900939941, 0.015511167526245118, 0.015323328018188477, 0.015689855575561525, 0.01575376033782959, 0.015654080390930174, 0.015565279960632325, 0.015520064353942872, 0.015470784187316894, 0.015484416007995605, 0.01557759952545166, 0.016189023971557616, 0.01604240036010742, 0.01591267204284668, 0.015669568061828613, 0.015784000396728514, 0.01577494430541992, 0.01573344039916992, 0.01583513641357422, 0.015930463790893554, 0.016055423736572264, 0.016330528259277343, 0.01635686492919922, 0.016421375274658204, 0.016830656051635744, 0.016834367752075197, 0.01626316833496094, 0.01617286491394043, 0.016108863830566405, 0.016137088775634766, 0.01701888084411621, 0.01604732894897461, 0.016103519439697265, 0.016746816635131837, 0.016250463485717775, 0.019272480010986328, 0.016350784301757813, 0.016225791931152343, 0.01614329528808594, 0.015933600425720215, 0.015970239639282225, 0.015927167892456056, 0.015829024314880372, 0.015800448417663576, 0.015928319931030274, 0.015947839736938477, 0.01612451171875, 0.01616908836364746, 0.016269407272338866, 0.016115999221801756, 0.016153791427612304, 0.016200223922729493, 0.016216064453125, 0.016325855255126955, 0.01696175956726074, 0.016360000610351564, 0.016342975616455077, 0.016228607177734375, 0.016219968795776366, 0.016119808197021485, 0.016151552200317384, 0.016288768768310546, 0.016244607925415037, 0.01619161605834961, 0.01616092872619629, 0.015939680099487305, 0.01618307113647461, 0.01627462387084961, 0.01623628807067871, 0.016142175674438475, 0.01609129524230957, 0.016668672561645507, 0.016300031661987305, 0.016144351959228517, 0.016108608245849608, 0.016573408126831054, 0.01649260711669922, 0.017542272567749023, 0.01627622413635254, 0.016404287338256836, 0.01631158447265625, 0.016356319427490235, 0.016436672210693358, 0.016765504837036132, 0.016373151779174804, 0.016316320419311522, 0.016441152572631835, 0.016270431518554687, 0.016558015823364258, 0.016205663681030272, 0.016707584381103514, 0.016186431884765626, 0.016008224487304688, 0.015841183662414552, 0.015755295753479004, 0.01589859199523926, 0.016106943130493163, 0.015665568351745606, 0.015847519874572755, 0.015515647888183593, 0.015511615753173828, 0.015900671958923338, 0.015736831665039062, 0.015726943969726563, 0.016909984588623046, 0.015659008026123047, 0.01586355209350586, 0.015569151878356934, 0.015558688163757325, 0.015701984405517577, 0.015644864082336427, 0.015527359962463379, 0.015829471588134764, 0.01553603172302246, 0.015590847969055176, 0.01605232048034668, 0.01570864009857178, 0.01570201587677002, 0.015609439849853515, 0.01555292797088623, 0.015617216110229492, 0.015584063529968262, 0.015604928016662597, 0.015508543968200684, 0.015490400314331054, 0.015578911781311036, 0.015522144317626952, 0.015537983894348144, 0.015133983612060547, 0.015426464080810547, 0.01535587215423584, 0.015413056373596192, 0.01544211196899414, 0.015388575553894043, 0.015451583862304687, 0.015343839645385743, 0.015425984382629394, 0.015396863937377929, 0.015405055999755859, 0.015581184387207032, 0.0155316801071167, 0.015560352325439453, 0.01546275234222412, 0.015447999954223633, 0.01552950382232666, 0.015532928466796875, 0.015828991889953615, 0.01552998447418213, 0.01603379249572754, 0.015588640213012695, 0.01561257553100586, 0.015893888473510743, 0.016078527450561524, 0.016076959609985352, 0.015809215545654298, 0.015685791969299315, 0.015544192314147949, 0.015488639831542968, 0.01561190414428711, 0.015532544136047363, 0.01570406436920166, 0.015871583938598634, 0.01566262435913086, 0.015754015922546386, 0.015904447555541993, 0.015888416290283203, 0.01576707172393799, 0.01565987205505371, 0.015638208389282225, 0.01574124813079834, 0.015830240249633788, 0.015864831924438477, 0.015916831970214845, 0.01591872024536133, 0.015804287910461425, 0.015849984169006348, 0.01592319965362549, 0.015917056083679198, 0.016377119064331053, 0.01608163261413574, 0.01599622440338135, 0.015835807800292968, 0.015830495834350585, 0.015768128395080565, 0.015977984428405763, 0.01577830410003662, 0.015624192237854004, 0.015602687835693359, 0.015530176162719726, 0.015534976005554198, 0.015529919624328613]",tokens/s,62.90496530933364,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1049.137152,5046.730752,0.0,4651.483136,4638.22848,s,1,14.198349609375,14.198349609375,0.0,14.198349609375,14.198349609375,14.198349609375,14.198349609375,[14.198349609375],,kWh,0.0002059586062625006,2.271175767481626e-05,7.954506363599956e-05,0.0003082154275733164,,MB,1222.680576,6172.901376,0.0,5765.070848,5418.530816,s,10,10.067480834960937,1.0067480834960938,0.0060373690484956935,1.0091661987304688,1.0126028259277344,1.0127268890380858,1.0128261395263671,"[0.9936867065429688, 0.9992445068359375, 1.0019698486328126, 1.007849609375, 1.0080123901367188, 1.0125752563476562, 1.0103200073242187, 1.0128509521484375, 1.0103255615234374, 1.01064599609375]",tokens/s,254.2840698648256,kWh,2.9188706434471595e-05,3.2190273342346356e-06,1.9385773084364752e-05,5.179350685307098e-05,tokens/kWh,4942704.511710836,MB,1240.367104,6172.901376,0.0,5765.070848,5418.533376,s,10,49.00916796875,4.900916796875,0.01320260211013012,4.905751953125,4.9146486328125,4.9152515625,4.91573390625,"[4.87516796875, 4.88421875, 4.890076171875, 4.89661669921875, 4.9038095703125, 4.9076943359375, 4.907861328125, 4.91335400390625, 4.9145146484375, 4.9158544921875]",tokens/s,12.854737717679894,kWh,0.00014398664825469116,1.5882754806762217e-05,9.576901348383259e-05,0.000255638416545286,tokens/kWh,246441.83316179962,,s,630,49.004861740112325,0.07778549482557509,0.0019485939166861061,0.07745126342773437,0.07909130020141601,0.07961224327087402,0.0891949423980713,"[0.08933990478515624, 0.07592518615722656, 0.07579583740234375, 0.07649581146240235, 0.07545414733886718, 0.07582675170898437, 0.07566413116455079, 0.075683837890625, 0.07550975799560547, 0.07577983856201172, 0.07732249450683594, 0.07804927825927735, 0.08050396728515626, 0.07935266876220703, 0.07704370880126953, 0.07665782165527343, 0.07588540649414062, 0.07588998413085937, 0.07575212860107422, 0.07571865844726562, 0.07590707397460937, 0.07600947570800781, 0.07672752380371094, 0.07752349090576172, 0.07847929382324219, 0.08017878723144531, 0.07869718170166015, 0.07727500915527344, 0.07706253051757812, 0.07684889221191406, 0.07630233764648438, 0.07626751708984375, 0.07732185363769531, 0.07647456359863282, 0.07617145538330078, 0.07733452606201172, 0.07787459564208984, 0.07901654052734375, 0.0784993896484375, 0.07908988952636718, 0.07763929748535156, 0.07766409301757812, 0.07689087677001953, 0.07647232055664062, 0.07668531036376953, 0.0768586883544922, 0.07644195556640625, 0.07701673889160156, 0.07734854125976562, 0.07863772583007812, 0.07846537780761718, 0.07910399627685546, 0.07791001892089844, 0.07895021057128906, 0.07781132507324219, 0.07657324981689453, 0.07714790344238281, 0.07704755401611328, 0.07659529876708984, 0.077295166015625, 0.07726780700683594, 0.0777359390258789, 0.0784783706665039, 0.0919291229248047, 0.07576172637939453, 0.0755568618774414, 0.07590707397460937, 0.07573661041259766, 0.07641340637207031, 0.0758104019165039, 0.07576812744140625, 0.07689132690429687, 0.07628278350830078, 0.07592960357666016, 0.07761920166015625, 0.08085708618164063, 0.07805731201171875, 0.0778733139038086, 0.07724237060546875, 0.07604774475097656, 0.0764975357055664, 0.07604383850097657, 0.07632940673828124, 0.0774771499633789, 0.07690108489990234, 0.07623052978515625, 0.07779097747802734, 0.07969558715820313, 0.07903456115722657, 0.07781977844238282, 0.07758448028564453, 0.07880089569091797, 0.07630694580078125, 0.07666687774658203, 0.07605833435058594, 0.0763661117553711, 0.07667711639404297, 0.07663398742675781, 0.07721087646484374, 0.0781443862915039, 0.07885004425048828, 0.078671875, 0.0779422378540039, 0.07854134368896484, 0.07750656127929688, 0.0770142059326172, 0.07685142517089844, 0.076404541015625, 0.07686428833007812, 0.0770355224609375, 0.07708057403564453, 0.07759852600097657, 0.07851641845703125, 0.07817327880859375, 0.07840860748291016, 0.0778629150390625, 0.07767040252685548, 0.07786495971679687, 0.07760076904296875, 0.0772894744873047, 0.07688396453857421, 0.07732204437255859, 0.07719475555419922, 0.07785337829589843, 0.07802880096435547, 0.07879065704345703, 0.08826876831054688, 0.07669334411621094, 0.07579094696044922, 0.07627366638183594, 0.07631667327880859, 0.07575961303710937, 0.07579647827148438, 0.07631053161621094, 0.07642726135253906, 0.07683891296386719, 0.07633920288085938, 0.07827561950683594, 0.08079049682617187, 0.07991705322265626, 0.07755356597900391, 0.07686112213134766, 0.07660995483398438, 0.07662156677246093, 0.07638861083984375, 0.07607046508789063, 0.076271484375, 0.07684767913818359, 0.07670524597167969, 0.07757794952392578, 0.07920102691650391, 0.07901315307617188, 0.07767734527587891, 0.0770703353881836, 0.07801139068603516, 0.07681126403808594, 0.07685270690917968, 0.07693571472167969, 0.07647232055664062, 0.07682240295410156, 0.07676322937011719, 0.0774512939453125, 0.0785080337524414, 0.07871231842041015, 0.07946617889404296, 0.07816684722900391, 0.07755980682373047, 0.07802780914306641, 0.07727788543701172, 0.07711177825927734, 0.07703327941894532, 0.07709081268310547, 0.07758233642578125, 0.07713996887207031, 0.0777871322631836, 0.0786513900756836, 0.07839449310302735, 0.07836966705322265, 0.0781475830078125, 0.07869235229492187, 0.0779606704711914, 0.07787773132324219, 0.07692908477783203, 0.07748607635498046, 0.07730707550048828, 0.07751254272460938, 0.07814630126953125, 0.07785692596435546, 0.07851628875732422, 0.08884003448486329, 0.07638502502441406, 0.07640064239501954, 0.0759582748413086, 0.07597440338134766, 0.07646028900146484, 0.07625727844238281, 0.07593126678466797, 0.0761200942993164, 0.07692323303222656, 0.07742873382568359, 0.0781946563720703, 0.08213302612304688, 0.07952588653564453, 0.077412353515625, 0.07644322967529296, 0.07687564849853516, 0.07666044616699219, 0.07652025604248047, 0.07665254211425782, 0.07653689575195312, 0.07612662506103515, 0.07665865325927734, 0.07773446655273437, 0.07901593780517578, 0.07959347534179688, 0.07904825592041016, 0.07747360229492188, 0.07706483459472656, 0.07739295959472656, 0.07693574523925781, 0.07688575744628906, 0.07686822509765626, 0.07686758422851563, 0.07674674987792969, 0.07716156768798828, 0.07800717163085938, 0.07906716918945313, 0.07922057342529297, 0.07867203521728515, 0.07757004547119141, 0.07769197082519531, 0.0779676513671875, 0.07718160247802734, 0.07712973022460938, 0.07709677124023437, 0.07709308624267579, 0.07721366119384766, 0.07903587341308593, 0.07867766571044922, 0.07856114959716796, 0.0788298568725586, 0.07819123077392579, 0.07798588562011718, 0.07786473846435547, 0.07791433715820313, 0.07738982391357421, 0.07722998046875, 0.07718675231933594, 0.07734636688232421, 0.0779559326171875, 0.07870054626464844, 0.07818240356445312, 0.0911994857788086, 0.0762798080444336, 0.07584358215332031, 0.07649603271484375, 0.0764582748413086, 0.07651779174804688, 0.07614479827880859, 0.07630643463134766, 0.076653564453125, 0.07694233703613282, 0.07694643402099609, 0.07857855987548829, 0.08163890838623047, 0.07821987152099609, 0.07759398651123046, 0.07686825561523437, 0.0766929931640625, 0.0770136947631836, 0.07673420715332031, 0.07662528228759766, 0.07662179565429687, 0.07683052825927734, 0.07660623931884766, 0.07949324798583984, 0.07966915130615235, 0.07928227233886719, 0.07841177368164062, 0.07741645050048829, 0.07729357147216796, 0.07710310363769532, 0.07671148681640624, 0.07656492614746094, 0.07674674987792969, 0.07674674987792969, 0.07674265289306641, 0.07807907104492187, 0.07892470550537109, 0.07920783996582031, 0.07908121490478516, 0.07890211486816406, 0.07831552124023437, 0.07692243194580078, 0.07714387512207031, 0.07698291015625, 0.07729718780517578, 0.07728790283203125, 0.07745126342773437, 0.07790169525146484, 0.07801436614990234, 0.0787042236328125, 0.07878031921386719, 0.07864329528808593, 0.0792828140258789, 0.07832575988769531, 0.07770435333251953, 0.07753814697265625, 0.07774553680419923, 0.07699887847900391, 0.07718701171875, 0.07758022308349609, 0.07819318389892578, 0.07874121856689453, 0.07854723358154297, 0.09250406646728515, 0.07675084686279297, 0.07657881927490234, 0.07647846221923828, 0.0760335693359375, 0.07605705261230469, 0.0766051483154297, 0.07653545379638672, 0.07656716918945312, 0.07651942443847656, 0.0773017578125, 0.07809843444824219, 0.08111007690429688, 0.08007350158691406, 0.0781416015625, 0.07743599700927735, 0.07678044891357422, 0.07651737976074219, 0.07716044616699219, 0.07665586853027344, 0.07667945861816407, 0.07683865356445313, 0.0768416976928711, 0.07736649322509766, 0.07966390228271485, 0.07912652587890626, 0.07844425964355468, 0.07831785583496094, 0.07772160339355469, 0.07743833923339843, 0.0773105926513672, 0.07698738861083984, 0.07756476593017578, 0.07682182312011719, 0.07649571228027344, 0.07733452606201172, 0.07904048156738282, 0.07885545349121094, 0.07853679656982422, 0.07855481719970703, 0.07960675048828125, 0.07807180786132813, 0.07713587188720702, 0.07713938903808594, 0.07747805023193359, 0.07735273742675781, 0.07709951782226562, 0.07797567749023437, 0.07788953399658204, 0.07851417541503906, 0.07857151794433594, 0.07862844848632812, 0.07839376068115235, 0.07795097351074219, 0.07811891174316406, 0.07767449951171874, 0.07791600036621094, 0.07697779083251953, 0.07707292938232421, 0.07745126342773437, 0.07808204650878907, 0.07814262390136718, 0.07821517181396484, 0.0922833251953125, 0.07605657958984376, 0.0760274887084961, 0.07657923126220703, 0.07725465393066407, 0.07603807830810547, 0.07654611206054687, 0.07631439971923829, 0.07631484985351562, 0.07741645050048829, 0.07675846099853516, 0.07832736206054687, 0.08169369506835937, 0.07808921813964843, 0.07723110198974609, 0.0768202896118164, 0.077050048828125, 0.07691651153564454, 0.07653807830810547, 0.07619174194335937, 0.07660749053955078, 0.07664230346679687, 0.07795420837402343, 0.07770403289794922, 0.07947408294677734, 0.0792080307006836, 0.07818956756591797, 0.07752448272705079, 0.07701478576660156, 0.07753190612792969, 0.07766015625, 0.07626956939697266, 0.0767262725830078, 0.07723417663574218, 0.07736524963378906, 0.07787725067138672, 0.07937840270996094, 0.0800123519897461, 0.07899190521240235, 0.078283203125, 0.07784243011474609, 0.07792230224609376, 0.07785049438476563, 0.07748531341552735, 0.07709123229980469, 0.07682198333740234, 0.07735807800292968, 0.07750656127929688, 0.07796736145019531, 0.07891763305664062, 0.07925350189208985, 0.07851827239990235, 0.078487548828125, 0.07799398040771484, 0.07774169921875, 0.07901427459716796, 0.07771340942382812, 0.07677529907226563, 0.07728131103515624, 0.07822908782958984, 0.07852671813964844, 0.07851606750488281, 0.07855760192871093, 0.09115846252441406, 0.07670630645751954, 0.07646975708007812, 0.07633283233642578, 0.0765337905883789, 0.0765283203125, 0.07622246551513671, 0.07657881927490234, 0.07638835144042969, 0.0765132827758789, 0.07781539154052734, 0.07809065246582031, 0.08151561737060548, 0.07953705596923828, 0.07803449249267579, 0.07733865356445313, 0.076499267578125, 0.07646627044677734, 0.07694950103759765, 0.07665849304199218, 0.07661126708984375, 0.07655174255371094, 0.07656707000732423, 0.07822582244873047, 0.07952515411376954, 0.08030281829833984, 0.07929631805419922, 0.07803718566894531, 0.07706419372558594, 0.07729737854003907, 0.077340576171875, 0.07714649963378906, 0.07733417510986328, 0.07831078338623047, 0.07705084991455079, 0.07731404876708985, 0.07851583862304687, 0.0790429458618164, 0.07931001281738281, 0.07871161651611328, 0.07838713836669922, 0.0775823974609375, 0.07878656005859375, 0.07711872100830078, 0.0767925796508789, 0.07751398468017578, 0.07763136291503907, 0.07734934234619141, 0.07833411407470703, 0.08007023620605469, 0.07871965026855468, 0.07824806213378906, 0.07788735961914063, 0.07836233520507813, 0.07840796661376953, 0.07781581115722656, 0.07737718200683594, 0.0773359375, 0.07852130889892578, 0.077264892578125, 0.07808348846435546, 0.07863970947265625, 0.07872512054443359, 0.0879636459350586, 0.07623270416259766, 0.07625113677978515, 0.07621568298339844, 0.07642556762695313, 0.0770041275024414, 0.07656543731689452, 0.07660749053955078, 0.07653135681152344, 0.077053955078125, 0.07745161437988281, 0.07872306823730468, 0.0814202880859375, 0.07960105895996093, 0.07732412719726563, 0.07663488006591797, 0.07662537384033204, 0.07727494049072266, 0.07751497650146484, 0.07678956604003906, 0.0767857894897461, 0.07705977630615235, 0.07730470275878906, 0.07856947326660156, 0.07934361267089844, 0.07948271942138672, 0.07875727844238281, 0.07802751922607422, 0.07794687652587891, 0.07712345886230469, 0.07699468994140625, 0.07711949157714844, 0.0773017578125, 0.07774534606933593, 0.07737782287597657, 0.07929459381103515, 0.07879647827148438, 0.07851900482177734, 0.0791592025756836, 0.07901936340332032, 0.07828966522216797, 0.0777359390258789, 0.07760486602783204, 0.07720550537109375, 0.07771955108642578, 0.07756755065917968, 0.07756832122802734, 0.07838527679443359, 0.07839315032958985, 0.07843583679199219, 0.07845958709716797, 0.07860991668701171, 0.07894057464599609, 0.07810467529296874, 0.07853670501708984, 0.07722803497314454, 0.07732192230224609, 0.0779717788696289, 0.07756185913085938, 0.07801446533203125, 0.07843424224853515, 0.0784814682006836, 0.079476318359375, 0.0905379867553711, 0.07649423980712891, 0.07646883392333985, 0.07641088104248046, 0.07612416076660156, 0.0765191650390625, 0.0762671356201172, 0.07626521301269532, 0.07647526550292968, 0.0767011489868164, 0.07777894592285156, 0.0785311050415039, 0.08133379364013672, 0.07809276580810547, 0.07787513732910156, 0.07721155548095703, 0.07694761657714844, 0.0767610855102539, 0.0765251235961914, 0.07650342559814453, 0.07674665832519531, 0.07671209716796876, 0.07822713470458985, 0.07817862701416016, 0.07986697387695313, 0.07917984008789063, 0.07864201354980468, 0.0779386215209961, 0.0781968002319336, 0.07752499389648437, 0.07719439697265625, 0.07749718475341796, 0.0767262725830078, 0.07666687774658203, 0.07760195159912109, 0.07822831726074218, 0.07923638153076172, 0.07961673736572265, 0.07945625305175781, 0.07831756591796875, 0.07862271881103515, 0.0783790054321289, 0.07693721771240235, 0.07699411010742188, 0.07758073425292969, 0.07721766662597657, 0.07713190460205079, 0.07794175720214844, 0.08017404937744141, 0.07924457550048829, 0.07884671783447265, 0.07851827239990235, 0.07830323028564454, 0.07865753936767578, 0.07808975982666015, 0.07773590087890625, 0.07788800048828125, 0.07826150512695312, 0.07806438446044922, 0.07742371368408203, 0.07833414459228516, 0.07877203369140626, 0.0786707534790039]",tokens/s,12.855867308453632,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,817.467392,1266.548736,0.0,864.026624,816.853504,s,1,8.7405419921875,8.7405419921875,0.0,8.7405419921875,8.7405419921875,8.7405419921875,8.7405419921875,[8.7405419921875],,kWh,4.5865116820861357e-05,5.051977071333417e-06,1.4692233975988067e-05,6.560932786818284e-05,,MB,1321.41056,1398.669312,0.0,983.564288,914.186752,s,10,1.46425048828125,0.146425048828125,0.000657422964131433,0.14645521545410156,0.14724449615478516,0.147261270904541,0.14727469070434568,"[0.14575270080566408, 0.1453380126953125, 0.1472407684326172, 0.14636697387695313, 0.14696176147460938, 0.14564198303222656, 0.14697474670410157, 0.14615203857421874, 0.14727804565429686, 0.14654345703125]",tokens/s,1748.3347422372728,kWh,4.44261515612349e-06,4.899381118249521e-07,2.9605158364237436e-06,7.893069104372185e-06,tokens/kWh,32433518.142922968,MB,1346.019328,1398.669312,0.0,983.564288,914.189312,s,10,23.89740209960938,2.3897402099609373,0.025950918649399912,2.3846202392578126,2.42797587890625,2.436194970703125,2.442770244140625,"[2.3848779296875, 2.37496875, 2.385448974609375, 2.384362548828125, 2.358567626953125, 2.364825439453125, 2.370741455078125, 2.4030458984375, 2.4261494140625, 2.4444140625]",tokens/s,26.36269822862034,kWh,7.014443908429331e-05,7.736782071514859e-06,3.0148538428577694e-05,0.00010802975958438587,tokens/kWh,583172.6391169876,,s,630,23.893006591796873,0.03792540728856647,0.0006279791419104671,0.037709503173828124,0.03865291137695313,0.0389565019607544,0.0401226244354248,"[0.037625247955322266, 0.037739105224609375, 0.0377977294921875, 0.03770588684082031, 0.03930931091308594, 0.03797769546508789, 0.037677471160888674, 0.037695297241210936, 0.03772169494628906, 0.037644607543945316, 0.037628353118896486, 0.03817792129516601, 0.0377740478515625, 0.03774259185791016, 0.037961376190185546, 0.038158687591552734, 0.03797401428222656, 0.038191009521484375, 0.03780207824707031, 0.037771041870117185, 0.037978336334228514, 0.037940799713134764, 0.03789049530029297, 0.037744129180908206, 0.0376734733581543, 0.03830681610107422, 0.03810406494140625, 0.03784451293945312, 0.03758124923706055, 0.03753337478637695, 0.03765663909912109, 0.03765071868896484, 0.037727710723876956, 0.03787571334838867, 0.037679168701171876, 0.037555774688720706, 0.037684127807617186, 0.03780198287963867, 0.03802521514892578, 0.03759452819824219, 0.03767561721801758, 0.03771311950683594, 0.037916702270507814, 0.038193855285644535, 0.03809308624267578, 0.03792281723022461, 0.03792086410522461, 0.03809247970581055, 0.03801244735717774, 0.03785161590576172, 0.037742561340332034, 0.0378675537109375, 0.037797889709472655, 0.03784908676147461, 0.037695392608642575, 0.0376668815612793, 0.03774617767333984, 0.03799504089355469, 0.0377262077331543, 0.037741600036621095, 0.03778211212158203, 0.037728641510009764, 0.03777891159057617, 0.0376627197265625, 0.037607425689697264, 0.037910526275634765, 0.038035457611083984, 0.03759718322753906, 0.03753545761108398, 0.03752169418334961, 0.03759513473510742, 0.03755212783813477, 0.03772246551513672, 0.04200566482543945, 0.038050048828125, 0.03784483337402344, 0.03761395263671875, 0.03839798355102539, 0.038206592559814456, 0.03788275146484375, 0.03749785614013672, 0.03811219024658203, 0.03764870452880859, 0.03752729415893555, 0.03753968048095703, 0.037564159393310544, 0.03764432144165039, 0.037628158569335934, 0.03754572677612305, 0.03753104019165039, 0.037538784027099606, 0.03746406555175781, 0.03760665512084961, 0.03776179122924805, 0.03770073699951172, 0.03750543975830078, 0.03761923217773438, 0.03763692855834961, 0.03796345520019531, 0.03761811065673828, 0.03740262222290039, 0.03741900634765625, 0.037392383575439454, 0.037703678131103514, 0.037435134887695315, 0.037370079040527346, 0.03763955307006836, 0.03752822494506836, 0.03728287887573242, 0.03741996765136719, 0.03800191879272461, 0.037446399688720704, 0.03740467071533203, 0.0378603515625, 0.03759820938110352, 0.03743334579467773, 0.03743743896484375, 0.037466110229492186, 0.037343231201171875, 0.037537727355957035, 0.037578113555908205, 0.037731006622314454, 0.03745792007446289, 0.03742310333251953, 0.03734022521972656, 0.03750598526000976, 0.03767977523803711, 0.03769548797607422, 0.037598880767822265, 0.03765190505981445, 0.03758089447021484, 0.040768478393554686, 0.0391262092590332, 0.037750625610351564, 0.03751385498046875, 0.037652671813964846, 0.03772415924072266, 0.03763363265991211, 0.0375463981628418, 0.03763814544677734, 0.03766009521484375, 0.03779612731933594, 0.037835041046142576, 0.03755567932128906, 0.037782047271728514, 0.037765121459960936, 0.037703678131103514, 0.037703678131103514, 0.037682945251464844, 0.03757696151733399, 0.03751289749145508, 0.03760774230957031, 0.0375863037109375, 0.03761593627929687, 0.03773881530761719, 0.03935363388061523, 0.03781398391723633, 0.037604225158691405, 0.03777916717529297, 0.037738143920898436, 0.03784576034545899, 0.037634048461914066, 0.03806412887573242, 0.037994495391845705, 0.03769343948364258, 0.03769680023193359, 0.03764912033081055, 0.038157470703125, 0.037895008087158205, 0.03766681671142578, 0.03770345687866211, 0.03777983856201172, 0.037762367248535156, 0.03800675201416016, 0.03765919876098633, 0.03772825622558594, 0.037607425689697264, 0.037619518280029296, 0.03786975860595703, 0.03774390411376953, 0.03767798233032227, 0.03854726409912109, 0.03775593566894531, 0.03771900939941406, 0.038559070587158205, 0.03769107055664062, 0.03783369445800781, 0.038330368041992184, 0.03817475128173828, 0.03768735885620117, 0.037993183135986326, 0.0378004150390625, 0.037733280181884765, 0.037765663146972654, 0.03762374496459961, 0.03865420913696289, 0.0378441276550293, 0.03748076629638672, 0.03760915374755859, 0.037394752502441404, 0.038633502960205075, 0.040075584411621096, 0.03788006210327149, 0.038982719421386716, 0.0376431999206543, 0.03749824142456055, 0.03749913787841797, 0.03743958282470703, 0.037480159759521486, 0.03743519973754883, 0.03738057708740234, 0.03818115234375, 0.0376627197265625, 0.037443584442138675, 0.037410945892333985, 0.037435264587402345, 0.03755788803100586, 0.03757433700561524, 0.03749708938598633, 0.03769929504394531, 0.03856044769287109, 0.037950817108154296, 0.03958854293823242, 0.03857113647460937, 0.03796259307861328, 0.03751484680175781, 0.03746963119506836, 0.03754684829711914, 0.037595294952392576, 0.03738623809814453, 0.037443584442138675, 0.03740028762817383, 0.03751760101318359, 0.037350849151611326, 0.03746652984619141, 0.037442848205566405, 0.0373072624206543, 0.03746371078491211, 0.038663646697998044, 0.037480960845947264, 0.03749875259399414, 0.03795199966430664, 0.03761500930786133, 0.03786608123779297, 0.038004737854003906, 0.037641761779785156, 0.037781982421875, 0.0377217903137207, 0.04009606552124023, 0.03795558547973633, 0.03801628875732422, 0.03807241439819336, 0.037975967407226564, 0.03835084915161133, 0.03794300842285156, 0.03761100769042969, 0.037552928924560545, 0.03732204818725586, 0.037363998413085936, 0.037380512237548826, 0.03750092697143555, 0.037377918243408206, 0.03732012939453125, 0.03742166519165039, 0.037599327087402344, 0.037467552185058595, 0.03799100875854492, 0.03740262222290039, 0.03743270492553711, 0.0373438720703125, 0.03731660842895508, 0.037319744110107425, 0.037438369750976565, 0.03775696182250977, 0.03824025726318359, 0.03792281723022461, 0.03748659133911133, 0.037425022125244144, 0.03731846237182617, 0.037345600128173825, 0.03750873565673828, 0.037224830627441405, 0.03725241470336914, 0.03722515106201172, 0.03734732818603516, 0.0373922233581543, 0.037322910308837894, 0.03732233428955078, 0.03742879867553711, 0.037275936126708986, 0.03725279998779297, 0.037149471282958986, 0.037306304931640624, 0.037622238159179686, 0.0373449592590332, 0.037381248474121095, 0.0372108154296875, 0.03728377532958985, 0.037238494873046875, 0.03718812942504883, 0.03741718292236328, 0.03754172897338867, 0.037263294219970704, 0.0372097282409668, 0.037171585083007816, 0.037197441101074216, 0.03737843322753906, 0.037318561553955076, 0.03745801544189453, 0.03721388626098633, 0.0372083854675293, 0.0372305908203125, 0.03721625518798828, 0.03769139099121094, 0.03744153594970703, 0.0376196174621582, 0.037502910614013674, 0.03760131072998047, 0.03742937469482422, 0.03728758239746094, 0.03748694229125977, 0.0372943344116211, 0.03791027069091797, 0.0388454704284668, 0.037677505493164065, 0.03755267333984375, 0.03758489608764649, 0.038258689880371094, 0.03795142364501953, 0.03794063949584961, 0.037991073608398436, 0.037849246978759764, 0.037551006317138674, 0.037329856872558596, 0.03735980987548828, 0.03745920181274414, 0.03728543853759766, 0.03765555191040039, 0.037308414459228514, 0.03719500732421875, 0.037141246795654295, 0.0372305908203125, 0.03772415924072266, 0.03721206283569336, 0.03729827117919922, 0.03736371231079102, 0.03748041534423828, 0.037275550842285156, 0.037504478454589846, 0.03740313720703125, 0.037574817657470706, 0.037303329467773434, 0.03752239990234375, 0.037897598266601565, 0.03755481719970703, 0.037488449096679685, 0.03769977569580078, 0.037377311706542966, 0.03740950393676758, 0.03734476852416992, 0.037298206329345704, 0.03744563293457031, 0.03730044937133789, 0.037548065185546875, 0.03844441604614258, 0.03740553665161133, 0.037410526275634765, 0.03737120056152344, 0.03759817504882813, 0.037746238708496097, 0.037365345001220705, 0.037504959106445315, 0.03745065689086914, 0.037343231201171875, 0.03726927947998047, 0.03730771255493164, 0.03734780883789063, 0.03749692916870117, 0.03821283340454101, 0.03802694320678711, 0.03790652847290039, 0.037512767791748045, 0.04107913589477539, 0.03790639877319336, 0.03778972625732422, 0.03767507171630859, 0.037824256896972656, 0.037634048461914066, 0.03739446258544922, 0.03747375869750977, 0.03751625442504883, 0.03799625778198242, 0.03759881591796875, 0.03744623947143555, 0.03754121780395508, 0.037622238159179686, 0.03747568130493164, 0.03738281631469727, 0.03736358261108398, 0.03758927917480469, 0.03760713577270508, 0.03736406326293945, 0.03749868774414063, 0.037586143493652344, 0.037609790802001955, 0.03780448150634766, 0.03744054412841797, 0.037391326904296876, 0.03751327896118164, 0.03734316635131836, 0.0377446403503418, 0.03753113555908203, 0.037862110137939456, 0.03774838256835938, 0.03747459030151367, 0.03744137573242187, 0.03742425537109375, 0.03739123153686524, 0.03762540817260742, 0.037456321716308597, 0.03751887893676758, 0.03750259017944336, 0.03737686538696289, 0.03742863845825195, 0.03742355346679688, 0.03737807846069336, 0.03747635269165039, 0.03735145568847656, 0.037533790588378906, 0.03743948745727539, 0.03758892822265625, 0.037848705291748046, 0.037454273223876955, 0.03747020721435547, 0.03751436614990234, 0.037407615661621096, 0.037591041564941405, 0.037452960968017576, 0.03744160079956055, 0.03758979034423828, 0.03772598266601562, 0.03758694458007812, 0.037771263122558595, 0.03791209411621094, 0.03766934585571289, 0.03768524932861328, 0.03756201553344726, 0.037534046173095706, 0.037530849456787106, 0.03753180694580078, 0.03795990371704101, 0.040377025604248044, 0.037903648376464844, 0.037671329498291016, 0.03797209548950195, 0.038004478454589846, 0.037644447326660155, 0.03750707244873047, 0.03751142501831055, 0.03762675094604492, 0.03752179336547851, 0.03766883087158203, 0.037632480621337894, 0.038004798889160155, 0.03757881546020508, 0.03748847961425781, 0.03741686248779297, 0.03760550308227539, 0.03734348678588867, 0.0376297607421875, 0.03745372772216797, 0.03755632019042969, 0.039769344329833985, 0.03876435089111328, 0.03779836654663086, 0.03780028915405274, 0.03839516830444336, 0.038652767181396486, 0.03853715133666992, 0.03831727981567383, 0.0396602897644043, 0.0384205436706543, 0.03864748764038086, 0.03835120010375977, 0.03826073455810547, 0.0384197769165039, 0.03829420852661133, 0.038250495910644534, 0.038504447937011715, 0.03874745559692383, 0.03837817764282227, 0.038529022216796875, 0.038456478118896485, 0.03886710357666016, 0.03846828842163086, 0.038494144439697266, 0.03840387344360351, 0.0384532470703125, 0.03849004745483398, 0.038521183013916015, 0.038346752166748044, 0.038365184783935545, 0.03876454544067383, 0.038553600311279294, 0.03871945571899414, 0.03896982574462891, 0.03841049575805664, 0.03839091110229492, 0.03844879913330078, 0.03871228790283203, 0.038819839477539066, 0.03897958374023437, 0.03858227157592774, 0.03911884689331055, 0.03856806564331055, 0.038809471130371094, 0.03846963119506836, 0.03848191833496094, 0.03848191833496094, 0.03844710540771484, 0.03830489730834961, 0.03833443069458008, 0.038271263122558595, 0.03886963272094727, 0.039608097076416014, 0.03896137619018555, 0.038494014739990236, 0.03836947250366211, 0.038299648284912106, 0.03836076736450195, 0.03828342437744141, 0.03848812866210938, 0.03867043304443359, 0.038594558715820314, 0.038362464904785155, 0.03829135894775391, 0.0385338249206543, 0.038486080169677736, 0.038450912475585936, 0.03820326232910156, 0.038089118957519534, 0.03823334503173828, 0.038238014221191406, 0.0383474235534668, 0.03820982360839844, 0.03844300842285156, 0.0384552001953125, 0.03832432174682617, 0.03816243362426758, 0.038346656799316405, 0.03879708862304688, 0.03925433731079102, 0.03831193542480469, 0.038335872650146485, 0.03835744094848633, 0.038277313232421874, 0.03837728118896484, 0.03819916915893555, 0.03839577484130859, 0.038633537292480466, 0.038510974884033204, 0.03867388916015625, 0.03856233596801758, 0.03861708831787109, 0.0383139533996582, 0.03831932830810547, 0.038262718200683596, 0.03830374526977539, 0.03867238235473633, 0.03855062484741211, 0.04013347244262695, 0.039385120391845704, 0.03851472091674805, 0.03901030349731445, 0.038903297424316405, 0.03979683303833008, 0.03888579177856445, 0.038375263214111326, 0.03873603057861328, 0.03837721633911133, 0.03832857513427734, 0.03927791976928711, 0.03913180923461914, 0.03890380859375, 0.03895913696289063, 0.038821857452392576, 0.038596607208251955, 0.03844300842285156, 0.038288608551025394, 0.03860950469970703, 0.0383287353515625, 0.03841024017333984, 0.03842176055908203, 0.03845788955688476, 0.03865580749511719, 0.038647998809814454, 0.03859616088867188, 0.038478015899658206, 0.03823641586303711, 0.0391536636352539, 0.03931955337524414, 0.03841769790649414, 0.0386814079284668, 0.03844476699829102, 0.038745281219482425, 0.03869545745849609, 0.03844940948486328, 0.03834697723388672, 0.04265795135498047, 0.03918451309204102, 0.038657760620117186, 0.03868262481689453, 0.038606815338134766, 0.038510623931884765, 0.03849964904785156, 0.03889632034301758, 0.03860275268554687, 0.03854876708984375, 0.03851964950561523, 0.0384510726928711, 0.03870883178710938, 0.03895328140258789, 0.04051491165161133, 0.03870585632324219, 0.03842873764038086, 0.03930438232421875, 0.03859743881225586, 0.03846963119506836, 0.038338558197021484, 0.03860070419311523]",tokens/s,26.367548076444102,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1127.5264,2578.382848,0.0,2183.135232,2081.564672,s,1,10.6620048828125,10.6620048828125,0.0,10.6620048828125,10.6620048828125,10.6620048828125,10.6620048828125,[10.6620048828125],,kWh,0.00010513518717917426,1.158997807986383e-05,3.851614192398878e-05,0.00015524130718302687,,MB,1236.185088,3134.128128,0.0,2726.2976,2478.86848,s,10,3.9971143798828126,0.39971143798828124,0.0057334785106462356,0.40094010925292967,0.40350568847656254,0.4045810302734375,0.4054413037109375,"[0.38347836303710936, 0.400876953125, 0.40225457763671874, 0.4010032653808594, 0.3996824951171875, 0.4032667236328125, 0.3997377624511719, 0.40234405517578126, 0.4056563720703125, 0.3988138122558594]",tokens/s,640.4620325313418,kWh,1.1468964845513397e-05,1.2640497670574177e-06,7.601715483076874e-06,2.0334730095647688e-05,tokens/kWh,12589299.134823164,MB,1247.301632,3134.128128,0.0,2726.2976,2478.87104,s,10,26.040555419921873,2.604055541992188,0.015545223263453626,2.5988059082031247,2.623723461914062,2.632519445800781,2.6395562329101563,"[2.59868408203125, 2.598927734375, 2.596089111328125, 2.59702587890625, 2.60553076171875, 2.604890625, 2.621768798828125, 2.6413154296875, 2.592539794921875, 2.583783203125]",tokens/s,24.193032362052822,kWh,7.592829943865324e-05,8.375808276304601e-06,5.030880306752271e-05,0.00013461291078248057,tokens/kWh,468008.60061484715,,s,630,26.037380851745596,0.04132917595515176,0.0008461596932565667,0.041192014694213866,0.04200077819824219,0.04252378139495849,0.045277171020507816,"[0.044763137817382816, 0.041047870635986326, 0.041023166656494144, 0.040728927612304684, 0.040747360229492186, 0.04168889617919922, 0.04103903961181641, 0.040573535919189455, 0.04069807815551758, 0.04118732833862305, 0.040892414093017575, 0.0408985595703125, 0.040853504180908204, 0.04127334213256836, 0.040826335906982425, 0.041048385620117187, 0.04071139144897461, 0.04057190322875977, 0.04066847991943359, 0.040559295654296876, 0.04146745681762695, 0.04147654342651367, 0.04064972686767578, 0.04166342544555664, 0.04202707290649414, 0.04086374282836914, 0.0404664306640625, 0.04043161773681641, 0.040202239990234374, 0.040540287017822266, 0.040777313232421876, 0.04138217544555664, 0.040681472778320314, 0.04096819305419922, 0.041111553192138675, 0.04122828674316406, 0.041000415802001956, 0.04101583862304688, 0.04129990386962891, 0.04170963287353516, 0.04153500747680664, 0.04399151992797851, 0.041183231353759765, 0.04132175827026367, 0.042524833679199216, 0.042342334747314456, 0.04133523178100586, 0.04148575973510742, 0.041079231262207035, 0.04115635299682617, 0.041147968292236325, 0.04180889511108398, 0.04119254302978516, 0.04109932708740234, 0.04126192092895508, 0.04137766265869141, 0.04234867095947266, 0.04219289779663086, 0.04113612747192383, 0.040944801330566404, 0.041040287017822266, 0.041120193481445313, 0.040986591339111325, 0.044784225463867185, 0.04140188980102539, 0.04124105453491211, 0.040755199432373046, 0.04090044784545899, 0.040943519592285156, 0.04144323348999023, 0.04152355194091797, 0.04174233627319336, 0.04128710556030273, 0.04142924880981445, 0.04116857528686523, 0.041583393096923826, 0.04097359848022461, 0.04071996688842774, 0.04094972610473633, 0.04075939178466797, 0.040979232788085934, 0.040521854400634764, 0.040220672607421876, 0.040271873474121096, 0.04195849609375, 0.04086262512207031, 0.04093644714355469, 0.04089548873901367, 0.04081782531738281, 0.04082912063598633, 0.040951454162597656, 0.040874622344970704, 0.040870273590087894, 0.04142489624023438, 0.04123344039916992, 0.04104496002197266, 0.040769535064697264, 0.04111743927001953, 0.04137599945068359, 0.04119551849365234, 0.04122009658813477, 0.04111088180541992, 0.041196193695068356, 0.04117036819458008, 0.041289726257324216, 0.041197822570800784, 0.041310527801513675, 0.04105535888671875, 0.04255347061157227, 0.04120665740966797, 0.04252249526977539, 0.04139740753173828, 0.04156707382202148, 0.04121395111083984, 0.04208156967163086, 0.04119420623779297, 0.04152524948120117, 0.041364990234375, 0.041506431579589845, 0.04109811019897461, 0.0411495361328125, 0.04088105773925781, 0.04161648178100586, 0.041030559539794925, 0.04111548614501953, 0.04124256134033203, 0.04452854537963867, 0.04068876647949219, 0.040667232513427735, 0.04021353530883789, 0.040632064819335935, 0.040755199432373046, 0.04074496078491211, 0.04058489608764648, 0.04011964797973633, 0.04036297607421875, 0.04072166442871094, 0.040981246948242185, 0.04095795059204101, 0.04057088088989258, 0.04050070571899414, 0.04009625625610352, 0.04013577651977539, 0.03994844818115234, 0.04063107299804687, 0.04115660858154297, 0.04459027099609375, 0.04207494354248047, 0.04103561782836914, 0.04145971298217774, 0.040837249755859374, 0.04118431854248047, 0.04042031860351562, 0.041207809448242184, 0.04044595336914063, 0.04326316833496094, 0.04101763153076172, 0.04178585433959961, 0.04125289535522461, 0.04134502410888672, 0.04105744171142578, 0.04143395233154297, 0.04131020736694336, 0.04122009658813477, 0.041322624206542966, 0.04121724700927734, 0.0412064323425293, 0.04119756698608398, 0.04110243225097656, 0.04194380950927734, 0.04122195053100586, 0.04227705764770508, 0.04148806381225586, 0.04163631820678711, 0.041501953125, 0.041392894744873045, 0.04125696182250976, 0.04169244766235351, 0.04097052764892578, 0.041070079803466795, 0.040979393005371095, 0.04155388641357422, 0.04186115264892578, 0.04115359878540039, 0.04105516815185547, 0.041193473815917966, 0.04125686264038086, 0.041239776611328126, 0.04103372955322265, 0.04576678466796875, 0.04119532775878906, 0.04119830322265625, 0.040774879455566404, 0.040921886444091796, 0.04084348678588867, 0.040804351806640625, 0.04132863998413086, 0.04118844985961914, 0.04085625457763672, 0.0407405776977539, 0.040811008453369144, 0.0409535026550293, 0.04080060958862305, 0.04073206329345703, 0.040615936279296876, 0.04064521789550781, 0.04066025543212891, 0.04076780700683594, 0.0410792007446289, 0.04101939010620117, 0.04194617462158203, 0.04144838333129883, 0.04125286483764649, 0.04123830413818359, 0.04087580871582031, 0.04091948699951172, 0.04096758270263672, 0.04200048065185547, 0.041140735626220705, 0.041653759002685545, 0.0412042236328125, 0.041662464141845705, 0.041495742797851565, 0.041588737487792966, 0.04119846343994141, 0.041260990142822265, 0.04106444931030274, 0.04130815887451172, 0.04108697509765625, 0.041099262237548825, 0.04130815887451172, 0.04158464050292969, 0.04135116958618164, 0.040959552764892576, 0.04206636810302734, 0.04170751953125, 0.041325599670410156, 0.042580032348632814, 0.04110019302368164, 0.04080025482177734, 0.04090060806274414, 0.040683521270751956, 0.04064051055908203, 0.04077315139770508, 0.0404989128112793, 0.04214575958251953, 0.040965953826904294, 0.04107158279418945, 0.04123344039916992, 0.04090521621704102, 0.04092934417724609, 0.041049793243408204, 0.04488191986083984, 0.04125696182250976, 0.04110540771484375, 0.04115456008911133, 0.040791393280029294, 0.042414752960205075, 0.04103577423095703, 0.04126297760009766, 0.04353036880493164, 0.04214169692993164, 0.04179123306274414, 0.04125516891479492, 0.04143654251098633, 0.04115027236938477, 0.041124671936035154, 0.04097743988037109, 0.040868831634521485, 0.04125491333007812, 0.04110335922241211, 0.04114432144165039, 0.04104579162597656, 0.04257404708862305, 0.041446590423583986, 0.04189676666259766, 0.041470977783203126, 0.042562335968017576, 0.04154735946655273, 0.041457664489746096, 0.04120435333251953, 0.04122195053100586, 0.04116854476928711, 0.041390113830566407, 0.04121036911010742, 0.04142630386352539, 0.04150281524658203, 0.04129216003417969, 0.0415458869934082, 0.041446464538574215, 0.041331649780273434, 0.04222771072387695, 0.0411541748046875, 0.041221824645996094, 0.04113459014892578, 0.04132883071899414, 0.04064460754394531, 0.041924606323242186, 0.04140188980102539, 0.041304542541503904, 0.04119331359863281, 0.040892574310302736, 0.04079740905761719, 0.04084611129760742, 0.040245086669921874, 0.04098806381225586, 0.04056550216674805, 0.04060160064697266, 0.041062049865722657, 0.04070844650268555, 0.040959999084472655, 0.04099299240112304, 0.04088387298583984, 0.04106988906860352, 0.04066966247558594, 0.044959712982177734, 0.04101315307617188, 0.04144543838500977, 0.04116534423828125, 0.040804351806640625, 0.04089014434814453, 0.04068188858032227, 0.04099420928955078, 0.0408845443725586, 0.04067712020874024, 0.0409683837890625, 0.0408454704284668, 0.041037406921386715, 0.040689697265625, 0.04070419311523438, 0.04104390335083008, 0.040900863647460935, 0.041293407440185545, 0.04089014434814453, 0.04122390365600586, 0.04698720169067383, 0.042458080291748045, 0.04164019012451172, 0.04112928009033203, 0.042092864990234374, 0.04128166580200195, 0.04094976043701172, 0.045335617065429684, 0.04111990356445312, 0.041145118713378906, 0.04102963256835938, 0.04116889572143555, 0.04154163360595703, 0.04110131072998047, 0.041197471618652344, 0.041166751861572266, 0.04110291290283203, 0.041040512084960935, 0.040941150665283206, 0.04127350234985352, 0.041183456420898434, 0.040635871887207034, 0.04066156768798828, 0.040854721069335936, 0.043794559478759765, 0.04197411346435547, 0.040828353881835935, 0.04093225479125977, 0.040986366271972656, 0.04062006378173828, 0.040470752716064456, 0.04079206466674805, 0.04079123306274414, 0.04128851318359375, 0.041199615478515625, 0.041060352325439455, 0.040996223449707034, 0.04073910522460938, 0.04121379089355469, 0.04126976013183594, 0.04102143859863281, 0.04127107238769531, 0.041150718688964846, 0.04559097671508789, 0.0413226547241211, 0.04149071884155273, 0.04134902572631836, 0.041162593841552735, 0.04112566375732422, 0.04093999862670898, 0.04088195037841797, 0.041410110473632813, 0.04148700714111328, 0.04166403198242188, 0.041603553771972654, 0.04175820922851563, 0.04150527954101563, 0.041611263275146484, 0.041527198791503905, 0.04149875259399414, 0.041403839111328125, 0.04171625518798828, 0.04163974380493164, 0.04165241622924805, 0.04285440063476562, 0.04209187316894531, 0.04178963088989258, 0.04193532943725586, 0.04201267242431641, 0.04181196975708008, 0.04157782363891602, 0.04179830551147461, 0.04165222549438476, 0.04164812850952149, 0.04176863861083984, 0.041722175598144534, 0.041809921264648435, 0.04177920150756836, 0.041836544036865236, 0.04185289764404297, 0.041643070220947265, 0.04156063842773437, 0.04138979339599609, 0.041517566680908204, 0.04136675262451172, 0.041169921875, 0.04116783905029297, 0.041055233001708984, 0.04246275329589844, 0.04219952011108399, 0.04155990219116211, 0.04150697708129883, 0.04168864059448242, 0.04123078536987305, 0.042527904510498045, 0.04094572830200195, 0.04099356842041016, 0.04085887908935547, 0.04125110244750976, 0.041150943756103515, 0.04119670486450195, 0.04117308807373047, 0.04125676727294922, 0.04135145568847656, 0.04146448135375977, 0.04148348617553711, 0.04682940673828125, 0.04185513687133789, 0.04192256164550781, 0.04149353790283203, 0.04155081558227539, 0.0413460807800293, 0.04144841766357422, 0.04144268798828125, 0.04176345443725586, 0.041964958190917966, 0.04177747344970703, 0.041896224975585934, 0.04186316680908203, 0.04182944107055664, 0.04185184097290039, 0.04285187149047852, 0.045337055206298826, 0.04284415817260742, 0.04231987380981445, 0.04195072174072265, 0.04205388641357422, 0.043597984313964847, 0.04223177719116211, 0.041836673736572266, 0.042003456115722655, 0.0424376335144043, 0.042147838592529296, 0.0437841911315918, 0.04239052963256836, 0.04177407836914063, 0.04243865585327149, 0.04365926361083984, 0.04160655975341797, 0.04173030471801758, 0.04159932708740234, 0.041540897369384766, 0.04207843017578125, 0.041323009490966796, 0.04136959838867187, 0.041481246948242186, 0.04135830307006836, 0.04121916961669922, 0.04105033493041992, 0.04128201675415039, 0.0412913589477539, 0.04227459335327149, 0.04140911865234375, 0.0415992317199707, 0.041492767333984375, 0.041574111938476564, 0.04156371307373047, 0.04109356689453125, 0.041082878112792966, 0.04102137756347656, 0.041282623291015626, 0.04105292892456055, 0.04109952163696289, 0.04140031814575195, 0.04153488159179688, 0.0417163200378418, 0.04160412979125976, 0.04148329544067383, 0.04131423950195313, 0.04513407897949219, 0.04290310287475586, 0.041640575408935544, 0.041611263275146484, 0.0416409912109375, 0.04182470321655273, 0.0417410888671875, 0.04193254470825195, 0.0415536003112793, 0.041987903594970705, 0.04180809783935547, 0.041823646545410154, 0.04147612762451172, 0.0415814094543457, 0.041283584594726565, 0.041240062713623044, 0.04110121536254883, 0.04116131210327149, 0.04087551879882813, 0.04151491165161133, 0.042178848266601565, 0.041838912963867186, 0.04112953567504883, 0.041226688385009765, 0.04150886535644531, 0.04098867034912109, 0.04064614486694336, 0.040952320098876956, 0.040478687286376956, 0.04064668655395508, 0.04041932678222656, 0.040543712615966794, 0.04030928039550781, 0.04117900848388672, 0.041013343811035156, 0.04106038284301758, 0.04072447967529297, 0.040416671752929685, 0.03992022323608398, 0.03999065780639648, 0.040495681762695315, 0.0402977294921875, 0.043488033294677736, 0.04067331314086914, 0.04032291030883789, 0.04149379348754883, 0.040718238830566404, 0.04069055938720703, 0.04065903854370117, 0.04081459045410156, 0.04060160064697266, 0.04062617492675781, 0.04063958358764649, 0.04047065734863281, 0.0404136962890625, 0.04046454238891602, 0.04030831909179688, 0.0408741455078125, 0.04121004867553711, 0.04113174438476563, 0.04093999862670898, 0.04113129425048828, 0.04075798416137695, 0.04675382232666016, 0.041271808624267575, 0.041180606842041015, 0.041211967468261716, 0.041281440734863284, 0.04113049697875976, 0.04126428985595703, 0.04173920059204102, 0.04119551849365234, 0.04106854248046875, 0.04125894546508789, 0.04119148635864258, 0.04124467086791992, 0.041183231353759765, 0.041270496368408204, 0.04106454467773438, 0.041134750366210934, 0.041166881561279296, 0.041161983489990235, 0.04128025436401367, 0.041164478302001956, 0.04219254302978516, 0.04108355331420899, 0.04098457717895508, 0.04093337631225586, 0.040822784423828126, 0.04094771194458008, 0.040594688415527345, 0.04051635360717774, 0.040359935760498046, 0.040365280151367186, 0.0404918098449707, 0.040495105743408207, 0.041194847106933594, 0.040807071685791015, 0.04088995361328125, 0.04070851135253906, 0.04050124740600586, 0.04039420700073242, 0.04034409713745117, 0.040235008239746094, 0.04073056030273438, 0.04043487930297852, 0.04030758285522461, 0.04070604705810547, 0.041852447509765626, 0.04158464050292969, 0.04119599914550781, 0.04101696014404297, 0.04101772689819336, 0.04112112045288086, 0.040643230438232425, 0.04131161499023438, 0.040497791290283205, 0.040359935760498046, 0.040179454803466796, 0.040151294708251954, 0.040169471740722655, 0.04044784164428711, 0.04083523178100586, 0.04108863830566406, 0.04087152099609375, 0.04084611129760742]",tokens/s,24.195982060836325,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 272.12 MiB is free. Process 104960 has 14.47 GiB memory in use. Of the allocated memory 14.35 GiB is allocated by PyTorch, and 13.83 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1178.492928,1093.599232,0.0,698.351616,690.178048,s,1,9.307734375,9.307734375,0.0,9.307734375,9.307734375,9.307734375,9.307734375,[9.307734375],,kWh,4.813778019170816e-05,5.30268701156643e-06,1.66522355439791e-05,7.009270274725368e-05,,MB,1372.995584,1408.172032,0.0,1000.341504,957.775872,s,10,0.5886727676391602,0.058867276763916024,0.0008932273377649839,0.05856430435180664,0.05927543525695801,0.06033389415740967,0.061180661277771,"[0.05887075042724609, 0.058735103607177735, 0.05904022216796875, 0.05820880126953125, 0.058393505096435545, 0.06139235305786133, 0.05903468704223633, 0.05824940872192383, 0.05835657501220703, 0.058391361236572265]",tokens/s,4348.76580118822,kWh,1.8262286517710889e-06,2.013398846739338e-07,1.2138065265999281e-06,3.241375063044951e-06,tokens/kWh,78978826.89314988,MB,1387.06944,1416.56064,0.0,1008.730112,957.778432,s,10,25.123927734375,2.5123927734375004,0.007594485465332524,2.5096402587890623,2.52302841796875,2.52503056640625,2.52663228515625,"[2.5029521484375, 2.51033251953125, 2.505419677734375, 2.52703271484375, 2.508947998046875, 2.506647216796875, 2.50775732421875, 2.51946875, 2.52258349609375, 2.512785888671875]",tokens/s,25.075697027181892,kWh,7.273305940906234e-05,8.022381448986752e-06,3.13892751113997e-05,0.00011214471596944882,tokens/kWh,561774.12779005,,s,630,25.121339572906486,0.03987514217921666,0.0006522795463124235,0.03971884727478027,0.04030095443725586,0.040638852691650386,0.04335835014343263,"[0.039836639404296874, 0.040035518646240234, 0.040170177459716794, 0.039548095703125, 0.03970502471923828, 0.03939376068115234, 0.039524383544921875, 0.03946905517578125, 0.03934003067016602, 0.039572704315185545, 0.0394596176147461, 0.039615840911865235, 0.039416481018066406, 0.03941580963134766, 0.0394958381652832, 0.03980271911621094, 0.039354366302490236, 0.039370750427246096, 0.03942604827880859, 0.039667713165283204, 0.0397127685546875, 0.03951411056518555, 0.03952230453491211, 0.03984384155273438, 0.039569408416748046, 0.03940761566162109, 0.039896385192871094, 0.03985580825805664, 0.03975270462036133, 0.039616512298583983, 0.03965542221069336, 0.039772159576416014, 0.039712257385253906, 0.03989350509643555, 0.03991676712036133, 0.03953276824951172, 0.03972758483886719, 0.04024095916748047, 0.03947734451293945, 0.03963324737548828, 0.03955023956298828, 0.03963910293579102, 0.03971468734741211, 0.03976169586181641, 0.040372383117675784, 0.04014969635009766, 0.03961974334716797, 0.0395497932434082, 0.0397946891784668, 0.039736446380615235, 0.03977849578857422, 0.03951996612548828, 0.039363296508789065, 0.03947043228149414, 0.03971535873413086, 0.03972531127929688, 0.03955862426757813, 0.039553470611572265, 0.03953276824951172, 0.0396492805480957, 0.04370761489868164, 0.03965945434570312, 0.039697025299072264, 0.040117599487304687, 0.03997119903564453, 0.039870750427246096, 0.039686111450195315, 0.03997596740722656, 0.04007219314575195, 0.04203724670410156, 0.040457759857177734, 0.0402006721496582, 0.04028416061401367, 0.040091232299804686, 0.0399257926940918, 0.03949606323242188, 0.03956531143188476, 0.040037696838378906, 0.03982815933227539, 0.039873790740966794, 0.03938777542114258, 0.03946099090576172, 0.03996192169189453, 0.03932844924926758, 0.03942399978637695, 0.039354366302490236, 0.039454113006591796, 0.04000009536743164, 0.03957964706420898, 0.03951411056518555, 0.03977328109741211, 0.03963772964477539, 0.0396822395324707, 0.039556961059570316, 0.039622112274169924, 0.0396743049621582, 0.03964339065551758, 0.040551551818847655, 0.03957235336303711, 0.039519649505615234, 0.03975753784179688, 0.03965983963012695, 0.03967238235473633, 0.039666847229003904, 0.03996096038818359, 0.03991376113891602, 0.04015068817138672, 0.040065567016601564, 0.039760929107666015, 0.03962140655517578, 0.03955318450927734, 0.0401324462890625, 0.040065216064453124, 0.039763904571533205, 0.039848064422607424, 0.03967158508300781, 0.04012403106689453, 0.039811614990234376, 0.03960992050170899, 0.03953907012939453, 0.03991263961791992, 0.039580543518066405, 0.040938655853271486, 0.03960636901855469, 0.03985055923461914, 0.039642433166503906, 0.03991961669921875, 0.03984143829345703, 0.03998134231567383, 0.03970054244995117, 0.04029958343505859, 0.04053087997436523, 0.039739391326904294, 0.039733375549316406, 0.0395665283203125, 0.039766719818115234, 0.039593982696533206, 0.039707969665527344, 0.03966207885742187, 0.03940371322631836, 0.03962575912475586, 0.03967689514160156, 0.03980806350708008, 0.03967808151245117, 0.039791423797607424, 0.039626750946044925, 0.04024515151977539, 0.039772449493408205, 0.040640033721923825, 0.03969257736206055, 0.039935359954833986, 0.04027865600585938, 0.03996031951904297, 0.03981654357910156, 0.03975609588623047, 0.03976992034912109, 0.03989993667602539, 0.03959807968139648, 0.039600128173828124, 0.03952025604248047, 0.03954278564453125, 0.03958927917480469, 0.0397973747253418, 0.03982755279541016, 0.03944214248657227, 0.03952195358276367, 0.039517982482910156, 0.03969302368164063, 0.03957360076904297, 0.039800704956054686, 0.03949961471557617, 0.03981536102294922, 0.03966566467285156, 0.03950947189331055, 0.03954332733154297, 0.039858177185058595, 0.04044095993041992, 0.0397628173828125, 0.03968928146362305, 0.03945568084716797, 0.03986636734008789, 0.04024662399291992, 0.039481502532958984, 0.03952076721191406, 0.0396759033203125, 0.03980886459350586, 0.039704734802246094, 0.0395830078125, 0.03959667205810547, 0.04001308822631836, 0.03993439865112305, 0.039808734893798825, 0.03948303985595703, 0.04003049468994141, 0.039513694763183595, 0.04011212921142578, 0.03960108947753906, 0.039741439819335936, 0.03963286590576172, 0.03958560180664063, 0.039516223907470706, 0.03956137466430664, 0.039470111846923825, 0.039699424743652345, 0.041523200988769535, 0.04022902297973633, 0.03948118209838867, 0.03942604827880859, 0.0396409912109375, 0.039659614562988284, 0.039451744079589846, 0.03967001724243164, 0.03971958541870117, 0.04053606414794922, 0.04031206512451172, 0.03970943832397461, 0.04045401763916016, 0.03977836990356445, 0.04011148834228516, 0.03994489669799805, 0.040151039123535154, 0.04017561721801758, 0.04027996826171875, 0.039927391052246096, 0.03974588775634766, 0.03979411315917969, 0.0434752311706543, 0.03988320159912109, 0.03976806259155274, 0.04534272003173828, 0.04501913452148437, 0.0409169921875, 0.04099225616455078, 0.04020803070068359, 0.039664478302001954, 0.039876609802246096, 0.03980284881591797, 0.03959632110595703, 0.04005043029785156, 0.03949977493286133, 0.03985539245605469, 0.0399832649230957, 0.03973791885375977, 0.03958169555664062, 0.04023875045776367, 0.039448287963867186, 0.03946793746948242, 0.03937251281738281, 0.039593982696533206, 0.03940332794189453, 0.04034515380859375, 0.0402213134765625, 0.040048641204833986, 0.04024863815307617, 0.039995166778564455, 0.03971510314941406, 0.0396596794128418, 0.03959142303466797, 0.0393691520690918, 0.03968582534790039, 0.03981923294067383, 0.039525249481201175, 0.039653377532958986, 0.03949772644042969, 0.0397209587097168, 0.039626750946044925, 0.03943587112426758, 0.03954524612426758, 0.03996057510375976, 0.03943987274169922, 0.03936307144165039, 0.039395328521728515, 0.04182160186767578, 0.039530303955078124, 0.039651424407958984, 0.03940652847290039, 0.03930495834350586, 0.039591934204101564, 0.039712158203125, 0.04006358337402344, 0.03972710418701172, 0.03937279891967774, 0.03968991851806641, 0.03944681549072265, 0.03948880004882813, 0.040428287506103514, 0.03995859146118164, 0.039712703704833985, 0.039624702453613284, 0.03979673767089844, 0.03970851135253906, 0.0396412467956543, 0.03982950210571289, 0.04016128158569336, 0.03989718246459961, 0.04000678253173828, 0.03962502288818359, 0.03987299346923828, 0.039446529388427735, 0.039708671569824217, 0.040054561614990235, 0.03965359878540039, 0.03996057510375976, 0.039593982696533206, 0.03953811264038086, 0.040581214904785154, 0.04224252700805664, 0.040243457794189454, 0.039487232208251954, 0.03961427307128906, 0.03993164825439453, 0.03973318481445313, 0.040753662109375, 0.04004044723510742, 0.039739391326904294, 0.04007516860961914, 0.04176454544067383, 0.040866111755371096, 0.04020038223266602, 0.03973625564575195, 0.0397628173828125, 0.039599777221679684, 0.03957180786132813, 0.039726207733154294, 0.03948556900024414, 0.03958217620849609, 0.03963523101806641, 0.0396308479309082, 0.039610366821289066, 0.03965542221069336, 0.03960128021240234, 0.03957372665405273, 0.039487583160400394, 0.039653663635253904, 0.03942745590209961, 0.03953961563110352, 0.03953216171264649, 0.039586177825927736, 0.039561344146728517, 0.04035747146606445, 0.0398111686706543, 0.04006057739257812, 0.039924030303955076, 0.039908641815185546, 0.03963385772705078, 0.04037628936767578, 0.039771617889404295, 0.039559520721435544, 0.04011644744873047, 0.03982745742797852, 0.03987804794311523, 0.04004627227783203, 0.03995536041259766, 0.039897087097167966, 0.03968384170532226, 0.03954473495483399, 0.039817569732666015, 0.03954687881469727, 0.039798080444335936, 0.03958428955078125, 0.03944195175170898, 0.03962944030761719, 0.03948044967651367, 0.03977510452270508, 0.04005260848999023, 0.03967942428588867, 0.03978720092773438, 0.03971891021728516, 0.039857471466064456, 0.03982124710083008, 0.039653182983398434, 0.03982227325439453, 0.039608001708984375, 0.03962809753417969, 0.039657726287841796, 0.039504638671875, 0.039607872009277345, 0.03970502471923828, 0.03976508712768555, 0.039790847778320315, 0.03988742446899414, 0.03944655990600586, 0.039849151611328126, 0.03964150238037109, 0.039992927551269535, 0.0396352653503418, 0.03942867279052734, 0.03975372695922851, 0.039976287841796875, 0.03974969482421875, 0.04045475387573242, 0.04043161773681641, 0.039810081481933594, 0.040637409210205075, 0.03978358459472656, 0.0399901123046875, 0.039339134216308594, 0.039506111145019535, 0.03944723129272461, 0.03944243240356445, 0.039458049774169925, 0.03936332702636719, 0.03952844619750977, 0.03935846328735351, 0.03957555389404297, 0.04008345413208008, 0.041207809448242184, 0.03980287933349609, 0.03967718505859375, 0.03950892639160156, 0.039352127075195316, 0.03958147048950195, 0.03967612838745117, 0.03975987243652344, 0.04035174560546875, 0.04057088088989258, 0.04015043258666992, 0.039696414947509764, 0.039929759979248046, 0.03959584045410156, 0.03978857421875, 0.039521087646484376, 0.03952844619750977, 0.039686145782470705, 0.03963068771362305, 0.040145057678222656, 0.04067327880859375, 0.0397918701171875, 0.039846656799316406, 0.03967712020874024, 0.03968083190917969, 0.03967974472045899, 0.03942819213867187, 0.03981327819824219, 0.04011539077758789, 0.03983443069458008, 0.03957084655761719, 0.03970547103881836, 0.03964694213867188, 0.040017280578613285, 0.03972774505615234, 0.04260752105712891, 0.03974467086791992, 0.03977481460571289, 0.03944473648071289, 0.039577598571777346, 0.039798782348632815, 0.03952435302734375, 0.04036403274536133, 0.041090847015380856, 0.03960444641113281, 0.039478687286376955, 0.03925872039794922, 0.03926425552368164, 0.039321952819824216, 0.039394977569580075, 0.0392540168762207, 0.03916595077514649, 0.04287692642211914, 0.03957964706420898, 0.03945792007446289, 0.03946912002563477, 0.039340480804443356, 0.04030054473876953, 0.03996710586547852, 0.03952777481079101, 0.03923011016845703, 0.03941580963134766, 0.03957555389404297, 0.039540542602539065, 0.03956252670288086, 0.03962358474731445, 0.03922467041015625, 0.039400096893310546, 0.045762016296386716, 0.04368809509277344, 0.03998348617553711, 0.03955686569213867, 0.03947135925292969, 0.039411006927490236, 0.03943078231811523, 0.039374912261962894, 0.039617599487304686, 0.039848896026611326, 0.039723007202148435, 0.03969785690307617, 0.03988127899169922, 0.039615840911865235, 0.04044252777099609, 0.04009983825683594, 0.04025139236450195, 0.040627552032470704, 0.039765758514404295, 0.039711647033691407, 0.03970240020751953, 0.03984944152832031, 0.04105599975585938, 0.041044097900390625, 0.040304641723632816, 0.039768863677978515, 0.03964675140380859, 0.03972073745727539, 0.0395880012512207, 0.039809215545654295, 0.04021868896484375, 0.04033331298828125, 0.04029417419433594, 0.039870014190673826, 0.040018592834472656, 0.03981926345825195, 0.04001567840576172, 0.039724224090576174, 0.040497920989990235, 0.04020780944824219, 0.040178497314453124, 0.03992473602294922, 0.03987148666381836, 0.039839134216308594, 0.03970134353637695, 0.039833343505859375, 0.03966566467285156, 0.03973241424560547, 0.03977913665771484, 0.03977974319458008, 0.0398526382446289, 0.03965542221069336, 0.03956067276000977, 0.039491710662841795, 0.039575809478759764, 0.039657505035400394, 0.039510143280029296, 0.040476673126220705, 0.03981721496582031, 0.03979788970947266, 0.03983798217773438, 0.03999599838256836, 0.03971891021728516, 0.04027088165283203, 0.04043670272827148, 0.04048896026611328, 0.040025215148925784, 0.040180606842041014, 0.03982131195068359, 0.03987446212768555, 0.03975084686279297, 0.039965599060058594, 0.03977830505371094, 0.03998720169067383, 0.04108451080322266, 0.03994460678100586, 0.039699775695800785, 0.043072193145751954, 0.039739391326904294, 0.03959574508666992, 0.039532833099365235, 0.03966323089599609, 0.03993452835083008, 0.04029420852661133, 0.03966880035400391, 0.04073507308959961, 0.041477825164794924, 0.03991024017333984, 0.04049862289428711, 0.041063201904296874, 0.03992876815795898, 0.03993632125854492, 0.0397031364440918, 0.040215679168701175, 0.0401396484375, 0.04008665466308594, 0.039680896759033205, 0.03965488052368164, 0.039980670928955075, 0.03959081649780274, 0.03964131164550781, 0.03989481735229492, 0.040046592712402344, 0.04051459121704101, 0.039771102905273435, 0.04009983825683594, 0.03953868865966797, 0.03971878433227539, 0.03982144165039062, 0.03963852691650391, 0.03949820709228516, 0.039554080963134765, 0.040068096160888675, 0.039884159088134766, 0.04013324737548828, 0.03996623992919922, 0.04012393569946289, 0.0407336311340332, 0.04003430557250977, 0.04365107345581055, 0.040551551818847655, 0.039787296295166016, 0.04003440093994141, 0.040210430145263674, 0.03989049530029297, 0.03965087890625, 0.039655872344970707, 0.03954937744140625, 0.03959590530395508, 0.03952243041992187, 0.039587329864501954, 0.039580158233642575, 0.03950796890258789, 0.039567359924316405, 0.03953049468994141, 0.039462913513183595, 0.03943833541870117, 0.03982460784912109, 0.04038291168212891, 0.03996899032592773, 0.03971289443969726, 0.039644542694091796, 0.03960201644897461, 0.03953129577636719, 0.039812576293945315, 0.039615009307861326, 0.03956121444702149, 0.040887935638427735, 0.039696800231933595, 0.03989910507202148, 0.03963904190063477, 0.03965705490112305, 0.03959584045410156, 0.039524513244628905, 0.039537086486816406, 0.039618560791015625]",tokens/s,25.078280486262706,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1832.112128,2899.247104,0.0,2503.999488,2349.010944,s,1,10.43385546875,10.43385546875,0.0,10.43385546875,10.43385546875,10.43385546875,10.43385546875,[10.43385546875],,kWh,8.925259466669407e-05,9.8379611483825e-06,3.31683598679966e-05,0.00013225891568307316,,MB,1861.627904,3310.288896,0.0,2902.458368,2642.29888,s,10,2.0485160675048824,0.20485160675048827,0.0014639113538269938,0.2049086685180664,0.20690233612060546,0.2069673957824707,0.2070194435119629,"[0.20393798828125, 0.20473356628417969, 0.20267205810546876, 0.20703245544433593, 0.20374549865722658, 0.20508377075195314, 0.20688787841796874, 0.20291651916503906, 0.205378173828125, 0.20612815856933595]",tokens/s,1249.6850967432786,kWh,6.244455116046322e-06,6.886439501866311e-07,4.1591522634890845e-06,1.1092251329722038e-05,tokens/kWh,23079174.13609624,MB,1879.646208,3310.288896,0.0,2902.458368,2642.30144,s,10,26.158725097656248,2.615872509765625,0.02669456699365264,2.600468994140625,2.6611926513671875,2.6641700561523436,2.666551979980469,"[2.59824267578125, 2.632306640625, 2.6671474609375, 2.660531005859375, 2.597810546875, 2.6013447265625, 2.598972412109375, 2.59959326171875, 2.61577734375, 2.5869990234375]",tokens/s,24.08374252369227,kWh,7.543729570770369e-05,8.320425516245945e-06,4.1119217292509205e-05,0.00012487693851645883,tokens/kWh,504496.67287204176,,s,630,26.156115360260003,0.04151764342898414,0.0009361234186335529,0.04123801612854004,0.04229381065368652,0.04348384552001952,0.045189528236389165,"[0.04186492919921875, 0.04122652816772461, 0.04103760147094727, 0.04131657409667969, 0.04110784149169922, 0.041498241424560545, 0.041033313751220706, 0.04120412826538086, 0.04109910583496094, 0.0409623031616211, 0.040944671630859374, 0.041351966857910157, 0.04093552017211914, 0.04110927963256836, 0.04083919906616211, 0.04199852752685547, 0.04134265518188476, 0.04110982513427734, 0.04149571228027344, 0.04102844619750977, 0.04103372955322265, 0.04091865539550781, 0.0406798095703125, 0.041180286407470706, 0.041122688293457034, 0.041474048614501956, 0.04111705780029297, 0.04172876739501953, 0.04131110382080078, 0.04123955154418945, 0.041213024139404295, 0.041243553161621094, 0.040982112884521485, 0.04104438400268555, 0.04104502487182617, 0.04149327850341797, 0.04101987075805664, 0.04109897613525391, 0.04203424072265625, 0.04169814300537109, 0.041987552642822265, 0.041237342834472654, 0.04123583984375, 0.0411426887512207, 0.041148414611816404, 0.04139606475830078, 0.04099430465698242, 0.04137027359008789, 0.04108697509765625, 0.04107030487060547, 0.04103952026367187, 0.04097846221923828, 0.04143929672241211, 0.04146435165405273, 0.04114022445678711, 0.04130985641479492, 0.041175071716308596, 0.04144678497314453, 0.04125913619995117, 0.041834976196289064, 0.041140575408935544, 0.04089984130859375, 0.041014015197753904, 0.041705406188964844, 0.04106006240844726, 0.04095475387573242, 0.04138940811157227, 0.04096387100219727, 0.04108969497680664, 0.04085318374633789, 0.04096441650390625, 0.04099440002441406, 0.04099507141113281, 0.04089023971557617, 0.041302303314208984, 0.041866912841796874, 0.043305023193359375, 0.04122652816772461, 0.041867263793945314, 0.04118492889404297, 0.04124502563476563, 0.04098867034912109, 0.04105420684814453, 0.04170751953125, 0.041266590118408206, 0.04099046325683594, 0.0410939826965332, 0.040855552673339846, 0.04120371246337891, 0.04120489501953125, 0.04112665557861328, 0.041168991088867186, 0.04114009475708008, 0.041119232177734374, 0.04122841644287109, 0.04109363174438477, 0.04149248123168945, 0.0411357421875, 0.04152336120605469, 0.041414878845214845, 0.04168646240234375, 0.04149919891357422, 0.04133679962158203, 0.04169116973876953, 0.04150076675415039, 0.041253887176513675, 0.04128969573974609, 0.04130297470092773, 0.0421580810546875, 0.04152012634277344, 0.04154995346069336, 0.04115446472167969, 0.04155491256713867, 0.04135251235961914, 0.04124697494506836, 0.04136131286621094, 0.043536895751953124, 0.049504287719726564, 0.04478524780273437, 0.04494742584228516, 0.044902847290039065, 0.044802047729492187, 0.044951553344726565, 0.041488254547119144, 0.041600704193115234, 0.041403839111328125, 0.046214622497558595, 0.04475344085693359, 0.044951553344726565, 0.044686622619628906, 0.044634624481201174, 0.04434524917602539, 0.044681537628173826, 0.04374844741821289, 0.04186531066894531, 0.042082656860351564, 0.04227324676513672, 0.04208025741577148, 0.042394687652587894, 0.04214470291137695, 0.04253900909423828, 0.042420223236083986, 0.042298881530761716, 0.042402305603027345, 0.04173209762573242, 0.04200243377685547, 0.04137321472167969, 0.041523681640625, 0.04141056060791016, 0.0413322868347168, 0.041912769317626955, 0.04142691040039063, 0.041406112670898436, 0.04125734329223633, 0.041430561065673825, 0.04178992080688477, 0.041506046295166014, 0.0418056640625, 0.04169206237792969, 0.04170751953125, 0.041430526733398435, 0.04147251129150391, 0.041656158447265626, 0.04128908920288086, 0.041396350860595704, 0.041212158203125, 0.04144579315185547, 0.04229324722290039, 0.04127849578857422, 0.04174947357177734, 0.041875457763671874, 0.041885696411132815, 0.0420711669921875, 0.04162035369873047, 0.04215193557739258, 0.04154150390625, 0.040986942291259765, 0.04100076675415039, 0.04088422393798828, 0.040978431701660156, 0.04090838241577149, 0.04109148788452149, 0.04213862228393555, 0.04388764953613281, 0.044763103485107425, 0.04426137542724609, 0.04429385757446289, 0.04473680114746094, 0.044730369567871096, 0.04576448059082031, 0.04506547164916992, 0.04521567916870117, 0.04585744094848633, 0.04274310302734375, 0.04158355331420899, 0.04161663818359375, 0.04143795013427734, 0.04160921478271484, 0.04300614547729492, 0.04137532806396484, 0.0413199348449707, 0.041759456634521484, 0.04210483169555664, 0.04229939270019531, 0.04254719924926758, 0.044943359375, 0.04259775924682617, 0.04247615814208985, 0.04211711883544922, 0.04208380889892578, 0.04246716690063477, 0.042393566131591794, 0.04161814498901367, 0.0420882568359375, 0.04157846450805664, 0.04251670455932617, 0.042229759216308595, 0.042646656036376955, 0.046682430267333985, 0.04197548675537109, 0.04311872100830078, 0.04288486480712891, 0.04238643264770508, 0.04196966552734375, 0.04213145446777344, 0.04168716812133789, 0.041770881652832034, 0.04177695846557617, 0.04187919998168945, 0.04182479858398438, 0.04192873764038086, 0.04154774475097656, 0.041041439056396484, 0.04102396774291992, 0.041344863891601566, 0.041524383544921876, 0.04122281646728516, 0.041535518646240235, 0.04156447982788086, 0.0413260498046875, 0.04147776031494141, 0.042119937896728514, 0.04152131271362305, 0.04164217758178711, 0.04187936019897461, 0.041177055358886716, 0.04161539077758789, 0.041110591888427736, 0.041374622344970705, 0.04145475387573242, 0.04177091217041016, 0.04190307235717773, 0.04189091110229492, 0.04141120147705078, 0.041224193572998044, 0.041369888305664064, 0.0413034553527832, 0.041129600524902346, 0.041226688385009765, 0.0410588493347168, 0.04148857498168945, 0.04206572723388672, 0.04151910400390625, 0.04134905624389648, 0.041433151245117185, 0.0415579833984375, 0.04109286499023437, 0.04107497787475586, 0.04089606475830078, 0.041142719268798825, 0.0409804801940918, 0.04101020812988281, 0.04106444931030274, 0.04096713638305664, 0.040860992431640625, 0.04112044906616211, 0.041127296447753904, 0.041172897338867184, 0.04146623992919922, 0.04071664047241211, 0.04089641571044922, 0.04114761734008789, 0.04152204895019531, 0.04115990447998047, 0.04103247833251953, 0.041568321228027345, 0.041067550659179684, 0.04097468948364258, 0.04106092834472656, 0.04106444931030274, 0.04094976043701172, 0.04064230346679688, 0.040869152069091794, 0.04110230255126953, 0.040767105102539065, 0.04117747116088867, 0.04099686431884766, 0.04095296096801758, 0.040834049224853515, 0.04341900634765625, 0.041256832122802733, 0.041175647735595705, 0.04122000122070312, 0.04095139312744141, 0.04106089782714844, 0.04118732833862305, 0.04148223876953125, 0.04193689727783203, 0.041484001159667966, 0.041236766815185545, 0.04165836715698242, 0.04125395202636719, 0.0411385612487793, 0.041273952484130856, 0.041310081481933596, 0.041942337036132815, 0.04124947357177734, 0.041279167175292966, 0.04127097702026367, 0.041291553497314455, 0.0424332160949707, 0.0410805778503418, 0.04165603256225586, 0.04096684646606445, 0.041250656127929684, 0.04115785598754883, 0.04114527893066406, 0.0411822395324707, 0.041333919525146486, 0.04161075210571289, 0.04142931365966797, 0.04101939010620117, 0.041283550262451174, 0.04089859390258789, 0.041134078979492186, 0.04091222381591797, 0.04095657730102539, 0.04533606338500976, 0.041195102691650394, 0.04091791915893555, 0.041176353454589844, 0.04120383834838867, 0.04122889709472656, 0.04127878570556641, 0.041210784912109374, 0.04192233657836914, 0.04129792022705078, 0.04095795059204101, 0.04109113693237305, 0.04099020767211914, 0.04117513656616211, 0.041369632720947264, 0.041174335479736326, 0.04108803176879883, 0.041861087799072265, 0.04140380859375, 0.041087776184082034, 0.04155152130126953, 0.040951969146728516, 0.04074812698364258, 0.040979007720947265, 0.0408436164855957, 0.04127961730957031, 0.040777599334716794, 0.04091904067993164, 0.0412421760559082, 0.04166086578369141, 0.04082601547241211, 0.04098748779296875, 0.04112319946289063, 0.04143942260742187, 0.04106428909301758, 0.04129622268676758, 0.04102783966064453, 0.041095169067382815, 0.041491840362548826, 0.0409865608215332, 0.04133958435058594, 0.04172851181030274, 0.04114659118652344, 0.04092006301879883, 0.04126512145996094, 0.04126188659667969, 0.04111715316772461, 0.0429901123046875, 0.041339969635009764, 0.041102272033691406, 0.041339935302734374, 0.04111663818359375, 0.041651294708251956, 0.04107561492919922, 0.04118732833862305, 0.041289726257324216, 0.04111273574829102, 0.041632606506347654, 0.04166598510742187, 0.04194566345214844, 0.04102143859863281, 0.04175468826293945, 0.041840576171875, 0.041618751525878905, 0.04106515121459961, 0.040940799713134766, 0.041055103302001954, 0.04094553756713867, 0.04111769485473633, 0.04095180892944336, 0.04101849746704102, 0.04078067016601562, 0.04105833435058594, 0.04107465744018555, 0.0409989128112793, 0.0409700813293457, 0.04135692977905273, 0.04106908798217773, 0.040938785552978516, 0.040723041534423826, 0.04084339141845703, 0.04093952178955078, 0.04125289535522461, 0.04106972885131836, 0.04096195220947266, 0.04256182479858398, 0.04120844650268555, 0.04115769577026367, 0.04203615951538086, 0.04082483291625977, 0.04097228622436523, 0.040861217498779294, 0.04109142303466797, 0.0410830078125, 0.04129587173461914, 0.042958782196044924, 0.04114633560180664, 0.0409150390625, 0.04096409606933594, 0.04097571182250977, 0.041263393402099606, 0.04096243286132813, 0.04108697509765625, 0.0410928955078125, 0.04179148864746094, 0.0441712646484375, 0.04512550354003906, 0.041660320281982424, 0.0407977294921875, 0.04086608123779297, 0.04079782485961914, 0.041075328826904296, 0.04106406402587891, 0.04103631973266601, 0.04088934326171875, 0.04115865707397461, 0.040847423553466794, 0.041011199951171876, 0.04094454574584961, 0.04112380981445313, 0.04128947067260742, 0.041301502227783206, 0.04093948745727539, 0.04091747283935547, 0.04090288162231445, 0.041369758605957034, 0.04127884674072266, 0.04242086410522461, 0.041104896545410156, 0.04233587265014648, 0.041232769012451174, 0.041320255279541016, 0.04106515121459961, 0.04089158248901367, 0.04105503845214844, 0.041199520111083986, 0.04141884613037109, 0.041017345428466793, 0.040844833374023434, 0.04116105651855469, 0.0409288330078125, 0.040872512817382814, 0.04083302307128906, 0.04102950286865235, 0.04126019287109375, 0.040970558166503905, 0.04075945663452148, 0.04126464080810547, 0.04089548873901367, 0.04111328125, 0.04134051132202148, 0.04119625473022461, 0.0409169921875, 0.040879390716552735, 0.0416426887512207, 0.04129385757446289, 0.04140236663818359, 0.0410398063659668, 0.04091910552978516, 0.04109827041625977, 0.04098553466796875, 0.041084190368652344, 0.04104793548583984, 0.04139302444458008, 0.041215999603271485, 0.04132352066040039, 0.04120383834838867, 0.04252467346191406, 0.04122943878173828, 0.04180217742919922, 0.041443264007568356, 0.04148688125610352, 0.04136460876464844, 0.04185993576049805, 0.04140031814575195, 0.04169676971435547, 0.04127596664428711, 0.04132447814941406, 0.041191425323486325, 0.041398273468017575, 0.041280960083007814, 0.04153401565551758, 0.041455520629882815, 0.04149033737182617, 0.042753982543945315, 0.04289152145385742, 0.041240577697753904, 0.04202905654907227, 0.041442913055419923, 0.04129219055175781, 0.04107001495361328, 0.0415032958984375, 0.04118732833862305, 0.04215155029296875, 0.04116313552856445, 0.041635841369628904, 0.04145142364501953, 0.04132812881469727, 0.04152105712890625, 0.041366207122802735, 0.04138393783569336, 0.04106230545043945, 0.04131375885009766, 0.04118387222290039, 0.04123648071289063, 0.04260790252685547, 0.041158878326416015, 0.04130031967163086, 0.04123868942260742, 0.041192958831787106, 0.041361919403076174, 0.041137535095214846, 0.041378337860107424, 0.04118947219848633, 0.04129974365234375, 0.04113225555419922, 0.04157196807861328, 0.041650558471679686, 0.04118707275390625, 0.04461081695556641, 0.04124160003662109, 0.04099814224243164, 0.04104268646240234, 0.04108515167236328, 0.04110313415527344, 0.040936511993408205, 0.04110784149169922, 0.04118316650390625, 0.043580127716064454, 0.04125788879394531, 0.04158035278320313, 0.04111529541015625, 0.0409106559753418, 0.041021984100341795, 0.04106428909301758, 0.041070751190185543, 0.041051200866699215, 0.04112879943847656, 0.04107049560546875, 0.04124179077148438, 0.04163481521606445, 0.04106444931030274, 0.04117913436889648, 0.04093244934082031, 0.04105104064941406, 0.04113564682006836, 0.04088265609741211, 0.04082198333740234, 0.04106729507446289, 0.041139518737792966, 0.04099862289428711, 0.041231327056884766, 0.04072447967529297, 0.040974273681640624, 0.041054271697998045, 0.041510814666748046, 0.04095347213745117, 0.041030303955078125, 0.040918464660644534, 0.04081919860839844, 0.04092502212524414, 0.04121807861328125, 0.04074905776977539, 0.04116889572143555, 0.04118022537231445, 0.04102169418334961, 0.04095657730102539, 0.040796192169189456, 0.040869056701660154, 0.04114924621582031, 0.0414453125, 0.04088966369628906, 0.04101599884033203, 0.04098668670654297, 0.040804351806640625, 0.04108489608764648, 0.04080028915405273, 0.04081459045410156, 0.04117647933959961, 0.04230335998535156, 0.0407861442565918, 0.04096803283691406, 0.04073257446289062, 0.04086819076538086, 0.04083248138427734, 0.04114118576049805, 0.041371646881103515, 0.040827999114990236, 0.04099728012084961, 0.04092979049682617, 0.04165740966796875, 0.04099760055541992, 0.04089263916015625]",tokens/s,24.086145489218296,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,824.107008,554.631168,0.0,159.383552,142.313472,s,1,8.16002978515625,8.16002978515625,0.0,8.16002978515625,8.16002978515625,8.16002978515625,8.16002978515625,[8.16002978515625],,kWh,2.1358915637529205e-05,2.3453210161104453e-06,8.320006655987422e-06,3.2024243309627075e-05,,MB,1142.02624,628.031488,0.0,220.20096,185.324544,s,18,0.20789641571044926,0.011549800872802735,0.00010180969120244798,0.011525103569030762,0.011686764812469483,0.01177346272468567,0.011777770700454711,"[0.011513312339782715, 0.011650015830993652, 0.011535327911376953, 0.011778847694396972, 0.01148038387298584, 0.011480416297912598, 0.011419615745544433, 0.011519935607910156, 0.01162441635131836, 0.011513759613037109, 0.011450624465942383, 0.011772512435913086, 0.011460927963256836, 0.011565407752990723, 0.011571999549865722, 0.011421536445617676, 0.011530271530151367, 0.011607104301452637]",tokens/s,22164.884297081193,kWh,3.491230954794669e-07,3.850182628413345e-08,1.9269818711959725e-07,5.803231088831976e-07,tokens/kWh,441133561.7715776,MB,1153.683456,632.225792,0.0,224.395264,185.327104,s,18,10.093456604003906,0.5607475891113282,0.0033757969675585003,0.5597496948242187,0.5635010986328125,0.5675152709960938,0.5706674096679688,"[0.5616351318359375, 0.5596442260742187, 0.5714554443359375, 0.5620497436523437, 0.5597603759765625, 0.559658935546875, 0.5604142456054687, 0.5586361083984375, 0.558799560546875, 0.55605615234375, 0.5620787353515625, 0.5601438598632813, 0.5668199462890625, 0.5591299438476562, 0.5607298583984375, 0.5581077880859375, 0.5585975341796875, 0.559739013671875]",tokens/s,112.35001491462904,kWh,1.649760427512293e-05,1.8194083952481014e-06,6.559899807548376e-06,2.4876912477919406e-05,tokens/kWh,2532468.6114451867,,s,1134,10.083655643463134,0.008892112560373136,0.00027634942801051267,0.00885923194885254,0.009003254222869874,0.009087467432022095,0.009776603565216065,"[0.008827327728271484, 0.00877177619934082, 0.008989760398864746, 0.009033984184265136, 0.008794655799865722, 0.008816479682922363, 0.008876352310180665, 0.008869888305664063, 0.008881343841552734, 0.008792767524719238, 0.008884160041809083, 0.008835264205932617, 0.008729920387268067, 0.008841119766235351, 0.00900380802154541, 0.008854592323303222, 0.008901568412780761, 0.008850751876831054, 0.008829376220703126, 0.009147647857666016, 0.00880742359161377, 0.00877126407623291, 0.008735039710998534, 0.008696000099182128, 0.008687423706054688, 0.00863385581970215, 0.008667648315429688, 0.008566783905029298, 0.008638463973999023, 0.009177087783813476, 0.008740927696228027, 0.008839103698730469, 0.009045951843261719, 0.008806464195251464, 0.008869888305664063, 0.009034815788269044, 0.008768447875976563, 0.00900607967376709, 0.00897107219696045, 0.008800224304199218, 0.00881715202331543, 0.008860992431640626, 0.008790528297424317, 0.009363360404968261, 0.009117695808410644, 0.009712639808654786, 0.009779359817504883, 0.009066656112670898, 0.008972991943359375, 0.00894927978515625, 0.008993375778198242, 0.008892288208007812, 0.008949312210083007, 0.00884342384338379, 0.008926912307739258, 0.008872511863708497, 0.008917247772216797, 0.008847135543823242, 0.00900432014465332, 0.008907711982727051, 0.00882256031036377, 0.008921088218688965, 0.008963232040405273, 0.008923359870910644, 0.00890345573425293, 0.00894156837463379, 0.00889241600036621, 0.008781824111938476, 0.00889241600036621, 0.009076607704162598, 0.008878208160400391, 0.008916576385498047, 0.00904643154144287, 0.00897433567047119, 0.008921088218688965, 0.010473471641540527, 0.008947711944580078, 0.008829119682312012, 0.008828512191772461, 0.00885372829437256, 0.008855551719665527, 0.008859647750854491, 0.00886905574798584, 0.008876864433288573, 0.008878016471862793, 0.008851327896118165, 0.008784064292907715, 0.008843551635742188, 0.00886684799194336, 0.00877184009552002, 0.008739263534545898, 0.00877184009552002, 0.008840000152587891, 0.008836031913757324, 0.008847135543823242, 0.008880640029907226, 0.008998623847961426, 0.008953599929809571, 0.008887968063354491, 0.008864224433898926, 0.00876966381072998, 0.00881868839263916, 0.008720383644104004, 0.008717823982238769, 0.008655136108398437, 0.008669343948364258, 0.008658335685729981, 0.008698080062866211, 0.009064895629882812, 0.008689663887023925, 0.008834303855895996, 0.008755968093872071, 0.008759008407592774, 0.008804191589355468, 0.008732928276062011, 0.00879190444946289, 0.008806719779968261, 0.008810527801513672, 0.00890067195892334, 0.008879648208618165, 0.008845408439636231, 0.008947903633117676, 0.008852864265441894, 0.008858048439025878, 0.008844736099243165, 0.00905673599243164, 0.008944607734680176, 0.008925439834594727, 0.009542880058288574, 0.008918784141540527, 0.008882687568664551, 0.00898691177368164, 0.008865792274475098, 0.008889535903930663, 0.008882975578308106, 0.008945440292358399, 0.008851712226867675, 0.00890060806274414, 0.008814592361450196, 0.008795455932617187, 0.00886240005493164, 0.008929023742675781, 0.008859135627746583, 0.008981247901916505, 0.01564243221282959, 0.008941760063171387, 0.00898252773284912, 0.009140255928039551, 0.0088406400680542, 0.00885750389099121, 0.008892959594726562, 0.008814080238342285, 0.008845824241638184, 0.008867936134338379, 0.008824576377868652, 0.008841183662414551, 0.008964384078979492, 0.00883407974243164, 0.008981472015380859, 0.008882176399230958, 0.008962047576904298, 0.008861696243286133, 0.008931679725646973, 0.008810144424438476, 0.00881884765625, 0.008891712188720704, 0.008826848030090331, 0.009091551780700684, 0.009545696258544923, 0.008978303909301757, 0.008984895706176757, 0.009049663543701172, 0.009135583877563476, 0.008991488456726074, 0.009088319778442383, 0.009071136474609375, 0.009021311759948731, 0.008978879928588868, 0.00930406379699707, 0.009080991744995118, 0.009072480201721191, 0.008873984336853028, 0.008962112426757813, 0.008892352104187012, 0.008900128364562988, 0.0088536958694458, 0.008841024398803712, 0.009269887924194337, 0.008907903671264648, 0.008943615913391113, 0.008919039726257324, 0.008972352027893066, 0.008912832260131835, 0.008937024116516113, 0.008884736061096191, 0.008996064186096192, 0.008946399688720703, 0.00901529598236084, 0.009771007537841797, 0.011583456039428711, 0.009238559722900391, 0.008893664360046388, 0.008876607894897462, 0.008851200103759766, 0.008853983879089356, 0.008824831962585449, 0.009244928359985352, 0.00882256031036377, 0.008754783630371094, 0.00873305606842041, 0.009052160263061524, 0.008803999900817872, 0.008765791893005371, 0.008787039756774903, 0.008747296333312988, 0.008759103775024413, 0.008888863563537597, 0.008784128189086913, 0.008765119552612305, 0.008917344093322755, 0.008839167594909669, 0.00910758399963379, 0.009039104461669922, 0.00901091194152832, 0.008965056419372558, 0.008880096435546875, 0.008847359657287598, 0.009020832061767578, 0.008855263710021972, 0.008776576042175292, 0.008904704093933105, 0.008880000114440918, 0.009015423774719238, 0.008990240097045899, 0.009058783531188965, 0.008992192268371581, 0.008770112037658692, 0.008802304267883301, 0.008654623985290528, 0.008637727737426758, 0.008632384300231933, 0.008718815803527832, 0.008596192359924317, 0.008613632202148438, 0.00864230442047119, 0.008566975593566895, 0.008601632118225098, 0.008706015586853027, 0.00903987216949463, 0.008644607543945313, 0.008755040168762208, 0.00872704029083252, 0.008990431785583496, 0.008894463539123536, 0.008890368461608887, 0.008816639900207519, 0.008843296051025391, 0.00880793571472168, 0.008843744277954101, 0.00882688045501709, 0.00881868839263916, 0.008865792274475098, 0.008790016174316406, 0.008843296051025391, 0.008754976272583008, 0.008857791900634765, 0.008730624198913574, 0.008810720443725585, 0.008734496116638183, 0.008731679916381836, 0.008725472450256348, 0.008817888259887696, 0.008794912338256836, 0.008800607681274414, 0.008943264007568359, 0.00895792007446289, 0.009414688110351562, 0.008965888023376466, 0.009216256141662597, 0.00927945613861084, 0.009127967834472657, 0.009128191947937011, 0.009047200202941894, 0.008927840232849121, 0.008790016174316406, 0.009003007888793945, 0.008970239639282226, 0.008945664405822755, 0.009082079887390137, 0.00897103977203369, 0.008902303695678711, 0.008871711730957032, 0.009026111602783203, 0.008838879585266112, 0.008675104141235351, 0.008798815727233887, 0.008785344123840333, 0.008810303688049316, 0.008805055618286133, 0.008773599624633789, 0.008855551719665527, 0.008751104354858399, 0.008803808212280273, 0.008859423637390137, 0.008760064125061036, 0.008898783683776855, 0.008922431945800781, 0.008839008331298828, 0.00889731216430664, 0.008777567863464356, 0.008803359985351563, 0.00882540798187256, 0.008745375633239747, 0.00871628761291504, 0.008738816261291504, 0.008753439903259278, 0.009354751586914062, 0.008805791854858398, 0.009441375732421875, 0.0089303035736084, 0.009033439636230468, 0.008884063720703125, 0.008892576217651367, 0.00885366439819336, 0.008826623916625977, 0.008822431564331054, 0.008878527641296386, 0.008859904289245605, 0.008840991973876952, 0.00884937572479248, 0.008822848320007324, 0.008826175689697265, 0.008810336112976075, 0.008857824325561524, 0.00889913558959961, 0.00886963176727295, 0.008868127822875977, 0.008960127830505371, 0.008856896400451661, 0.008800992012023926, 0.008933247566223145, 0.00894108772277832, 0.008915295600891113, 0.009019647598266602, 0.008843071937561035, 0.009033727645874023, 0.009057696342468263, 0.008942399978637695, 0.008988063812255859, 0.008950143814086915, 0.008949695587158203, 0.0089682559967041, 0.008946751594543457, 0.008973504066467285, 0.008947296142578125, 0.008927392005920411, 0.008841216087341308, 0.00875449562072754, 0.008785759925842285, 0.008739839553833008, 0.00876527976989746, 0.00869164752960205, 0.00879417610168457, 0.008816096305847168, 0.008792767524719238, 0.008768511772155761, 0.008774208068847657, 0.00879967975616455, 0.008784735679626465, 0.008750111579895019, 0.008845888137817382, 0.008726335525512696, 0.00882534408569336, 0.00875539207458496, 0.008730560302734376, 0.008805631637573242, 0.008881152153015137, 0.008762944221496581, 0.008781824111938476, 0.008816736221313477, 0.008816543579101563, 0.008964096069335938, 0.008833024024963379, 0.008870207786560058, 0.00882470417022705, 0.00885331153869629, 0.008822784423828126, 0.009063776016235352, 0.00885212802886963, 0.008962047576904298, 0.008943072319030762, 0.008886048316955567, 0.008929471969604492, 0.008898880004882813, 0.00895740795135498, 0.008868767738342285, 0.008843135833740235, 0.008888480186462402, 0.009006943702697754, 0.008996864318847657, 0.008945247650146485, 0.008948479652404785, 0.009033087730407716, 0.008877759933471679, 0.008829536437988282, 0.00882688045501709, 0.008742143630981445, 0.00882259178161621, 0.008837663650512695, 0.008807007789611816, 0.008851231575012207, 0.008806431770324707, 0.00889241600036621, 0.0088056640625, 0.008794783592224121, 0.008788064002990722, 0.008822751998901366, 0.008876031875610351, 0.008882176399230958, 0.009013248443603515, 0.00885756778717041, 0.008829312324523926, 0.0087807035446167, 0.008901375770568848, 0.00882688045501709, 0.008787967681884766, 0.008830656051635742, 0.009017663955688477, 0.009046015739440917, 0.008798272132873534, 0.009377440452575684, 0.009037983894348145, 0.008846847534179688, 0.008891008377075195, 0.008884223937988281, 0.008809696197509766, 0.0088307523727417, 0.008782848358154297, 0.0090316801071167, 0.008933664321899414, 0.008892127990722656, 0.008880127906799316, 0.008865023612976074, 0.008858367919921875, 0.008848992347717285, 0.008860063552856446, 0.008855487823486328, 0.008915007591247559, 0.008814208030700684, 0.008803008079528808, 0.00877667236328125, 0.00886678409576416, 0.008859392166137695, 0.008979999542236328, 0.008874272346496582, 0.0088536958694458, 0.008996864318847657, 0.008930591583251954, 0.008913311958312988, 0.008812959671020509, 0.008824735641479493, 0.008816639900207519, 0.008822400093078614, 0.008834815979003907, 0.008777471542358398, 0.008803135871887207, 0.008816703796386719, 0.008827103614807128, 0.00881436824798584, 0.008751456260681151, 0.008793760299682617, 0.008722432136535644, 0.008806400299072266, 0.008835071563720704, 0.008831328392028808, 0.008854559898376465, 0.008728799819946289, 0.008886688232421875, 0.008867136001586914, 0.008954400062561036, 0.008923295974731446, 0.008923135757446288, 0.008882176399230958, 0.008880127906799316, 0.008884287834167481, 0.008819840431213378, 0.008980319976806641, 0.00883187198638916, 0.008836576461791992, 0.008835712432861327, 0.00880844783782959, 0.00892518424987793, 0.008855263710021972, 0.008912384033203125, 0.008897215843200683, 0.008822879791259765, 0.00883523178100586, 0.008930879592895508, 0.008825119972229003, 0.008970239639282226, 0.008828927993774414, 0.008879903793334962, 0.008900832176208497, 0.00891808032989502, 0.00899350357055664, 0.008906304359436035, 0.008827648162841796, 0.008885951995849609, 0.008921088218688965, 0.008865311622619628, 0.008815072059631347, 0.008840736389160156, 0.008843232154846191, 0.008816991806030273, 0.008838399887084961, 0.00878275203704834, 0.008769536018371582, 0.008765439987182617, 0.008830207824707031, 0.008827327728271484, 0.008775296211242676, 0.008790719985961913, 0.008804512023925782, 0.008855392456054687, 0.008845312118530273, 0.008820735931396484, 0.008801695823669433, 0.008860095977783203, 0.00881065559387207, 0.008733856201171874, 0.008751520156860352, 0.008747776031494141, 0.008746432304382325, 0.00874726390838623, 0.008765439987182617, 0.008765439987182617, 0.008773632049560547, 0.008869888305664063, 0.009381600379943848, 0.008839232444763184, 0.008732895851135253, 0.009042176246643066, 0.008697600364685058, 0.008697855949401855, 0.008732864379882813, 0.008706944465637207, 0.008804384231567383, 0.008892576217651367, 0.009902848243713378, 0.00893727970123291, 0.008773695945739747, 0.00888435173034668, 0.008863391876220704, 0.008838720321655273, 0.008897024154663086, 0.00892460823059082, 0.00887440013885498, 0.008853119850158691, 0.00886070442199707, 0.008898336410522461, 0.008964384078979492, 0.008924896240234376, 0.008889439582824708, 0.008935808181762695, 0.008935711860656738, 0.00889030361175537, 0.008878399848937988, 0.008893952369689942, 0.008876383781433106, 0.008824480056762696, 0.008828927993774414, 0.00879372787475586, 0.008812640190124512, 0.00882697582244873, 0.009654175758361817, 0.008836000442504884, 0.008877311706542969, 0.008890848159790039, 0.008837120056152344, 0.008845024108886719, 0.008800224304199218, 0.008824895858764649, 0.008923392295837402, 0.008871935844421386, 0.008851455688476563, 0.00880844783782959, 0.008849056243896484, 0.008798144340515136, 0.00889510440826416, 0.008784735679626465, 0.00875820827484131, 0.008759296417236329, 0.008685248374938965, 0.008675904273986816, 0.008654591560363769, 0.008642239570617676, 0.008563008308410645, 0.008574591636657716, 0.008611264228820801, 0.008561599731445312, 0.008588543891906738, 0.00865187168121338, 0.008712127685546875, 0.008658368110656739, 0.008655136108398437, 0.0086364164352417, 0.008722496032714844, 0.008745247840881348, 0.00877952003479004, 0.009174943923950196, 0.008886272430419923, 0.008904704093933105, 0.008889535903930663, 0.00885638427734375, 0.008840928077697754, 0.008956255912780762, 0.008970175743103027, 0.008830975532531739, 0.00883407974243164, 0.008775775909423827, 0.008915840148925781, 0.008893728256225586, 0.008901023864746093, 0.008967488288879395, 0.008889344215393067, 0.008828031539916991, 0.008864319801330567, 0.00886195182800293, 0.008783935546875, 0.008846336364746094, 0.008839167594909669, 0.008792063713073731, 0.008781375885009766, 0.008868288040161133, 0.008781824111938476, 0.008811903953552245, 0.008880191802978516, 0.010013504028320313, 0.0090764799118042, 0.008869791984558105, 0.00885155200958252, 0.00896985626220703, 0.0088307523727417, 0.008839776039123535, 0.008865792274475098, 0.008890368461608887, 0.008853631973266602, 0.008830016136169433, 0.009368288040161132, 0.00883516788482666, 0.008863743782043456, 0.008859647750854491, 0.008914943695068359, 0.008871935844421386, 0.008955007553100585, 0.00888044834136963, 0.00879379177093506, 0.008827775955200194, 0.008833024024963379, 0.00937382411956787, 0.008836159706115723, 0.008903103828430176, 0.008845696449279785, 0.00885331153869629, 0.00876972770690918, 0.008732768058776855, 0.008755104064941406, 0.008815839767456054, 0.008774335861206056, 0.008748127937316894, 0.008746047973632813, 0.008850879669189453, 0.0089169282913208, 0.009007679939270019, 0.008895551681518555, 0.008997983932495117, 0.008939359664916992, 0.008804351806640624, 0.008865280151367188, 0.008886783599853516, 0.008782943725585938, 0.008885472297668458, 0.00883619213104248, 0.008804960250854492, 0.008890368461608887, 0.008799712181091308, 0.008917247772216797, 0.008876128196716309, 0.008791359901428222, 0.008860671997070312, 0.009241855621337891, 0.008879872322082519, 0.009955424308776856, 0.008900927543640138, 0.008900287628173829, 0.008841407775878907, 0.00884921646118164, 0.00890595245361328, 0.008810591697692872, 0.00888697624206543, 0.008863743782043456, 0.008838239669799805, 0.008931391716003418, 0.00882534408569336, 0.008855392456054687, 0.008829216003417968, 0.00878764820098877, 0.008999456405639648, 0.00881049633026123, 0.008799615859985352, 0.008773887634277344, 0.008810879707336426, 0.008748064041137696, 0.008784319877624512, 0.008806015968322754, 0.008786687850952149, 0.008855936050415038, 0.008893631935119628, 0.00881276798248291, 0.008849791526794434, 0.008828096389770508, 0.008818976402282715, 0.00881436824798584, 0.008860416412353515, 0.008894368171691895, 0.008775936126708985, 0.009041664123535156, 0.008962143898010254, 0.008859647750854491, 0.009093119621276855, 0.008970335960388183, 0.008937376022338867, 0.008902655601501466, 0.008928863525390626, 0.008958111763000489, 0.00895030403137207, 0.008988384246826173, 0.010232000350952149, 0.008892576217651367, 0.008858976364135742, 0.008874303817749023, 0.008875424385070801, 0.008856160163879395, 0.008924511909484864, 0.008839839935302735, 0.008869888305664063, 0.008817791938781738, 0.008727423667907715, 0.008751104354858399, 0.008724160194396972, 0.008778335571289063, 0.008918399810791016, 0.008838656425476075, 0.008837984085083008, 0.008832703590393067, 0.008816767692565917, 0.00887168025970459, 0.008837727546691895, 0.009317631721496583, 0.008844160079956054, 0.009297792434692382, 0.009973759651184083, 0.008919039726257324, 0.009072768211364745, 0.01081926441192627, 0.008917183876037597, 0.008890463829040527, 0.008869791984558105, 0.009002304077148438, 0.009071295738220214, 0.008887840270996093, 0.008884703636169434, 0.008898752212524414, 0.00892848014831543, 0.008890975952148437, 0.008955295562744141, 0.008892576217651367, 0.00892899227142334, 0.008872672080993653, 0.008902655601501466, 0.008980480194091797, 0.008855551719665527, 0.008841055870056153, 0.008861215591430664, 0.008888352394104003, 0.008923744201660156, 0.008828927993774414, 0.008857407569885255, 0.00878611183166504, 0.008794143676757812, 0.00881065559387207, 0.008836000442504884, 0.008846591949462891, 0.008909791946411132, 0.008900544166564942, 0.008886303901672363, 0.008938207626342773, 0.008948927879333496, 0.008903488159179688, 0.008890399932861328, 0.00885961627960205, 0.008855551719665527, 0.008884032249450684, 0.009906368255615234, 0.009743647575378418, 0.00913379192352295, 0.008881152153015137, 0.008935423851013183, 0.00899891185760498, 0.008962207794189454, 0.008900447845458985, 0.008872096061706542, 0.008898207664489746, 0.00886188793182373, 0.008828960418701171, 0.008913920402526856, 0.008891039848327637, 0.008870207786560058, 0.008839167594909669, 0.008871647834777832, 0.008903648376464844, 0.008906399726867676, 0.008870112419128417, 0.008886240005493163, 0.008818016052246094, 0.008794528007507324, 0.008869824409484863, 0.00886832046508789, 0.00924687957763672, 0.008782784461975097, 0.008842144012451172, 0.008867679595947265, 0.008872096061706542, 0.008788127899169922, 0.008833120346069336, 0.01009228801727295, 0.009058303833007812, 0.009010687828063964, 0.008984959602355957, 0.008883551597595215, 0.008890527725219727, 0.008862336158752442, 0.008931584358215332, 0.008836864471435547, 0.008878080368041993, 0.008847359657287598, 0.008769536018371582, 0.008867072105407716, 0.008775487899780273, 0.008724639892578125, 0.008780575752258301, 0.00879798412322998, 0.00892848014831543, 0.008897472381591796, 0.008821855545043946, 0.008842368125915528, 0.008841055870056153, 0.008812543869018554, 0.008824992179870605, 0.00873356819152832, 0.008737248420715332, 0.008867584228515624, 0.008801152229309082, 0.008851327896118165, 0.00886070442199707, 0.008749631881713868, 0.008811008453369141, 0.008753055572509766, 0.008822400093078614, 0.009271424293518066, 0.008763296127319336, 0.008755552291870116, 0.008720576286315918, 0.008716095924377441, 0.008766752243041992, 0.008706720352172851, 0.008763456344604492, 0.008740415573120117, 0.008831551551818848, 0.008763263702392578, 0.0089268798828125, 0.00890294361114502, 0.008881504058837891, 0.008790271759033203, 0.008802623748779296, 0.008810175895690917, 0.008839391708374023, 0.008922495841979981, 0.008874496459960938, 0.008927359580993652, 0.009014880180358887, 0.008992768287658692, 0.00888259220123291, 0.00889241600036621, 0.008988415718078614, 0.008900863647460937, 0.008935711860656738, 0.008903679847717285, 0.008944095611572266, 0.008917247772216797, 0.00887564754486084, 0.00885366439819336, 0.0089169921875, 0.009298144340515136, 0.008929408073425293, 0.00894105625152588, 0.009101056098937988, 0.008953472137451172, 0.008964991569519043, 0.00886188793182373, 0.008853440284729003, 0.008873215675354004, 0.008929056167602539, 0.008932543754577636, 0.008818079948425293, 0.008779935836791992, 0.008828831672668456, 0.00876576042175293, 0.00881161594390869, 0.008962207794189454, 0.008836864471435547, 0.008806528091430664, 0.008835647583007812, 0.008814911842346191, 0.00887388801574707, 0.009119423866271972, 0.008935968399047851, 0.008994976043701171, 0.008853280067443847, 0.008887807846069335, 0.008871711730957032, 0.008861536026000977, 0.008941887855529785, 0.00882265567779541, 0.00888092803955078, 0.008855392456054687, 0.008881376266479493, 0.009087008476257324, 0.008851936340332031, 0.008677663803100586, 0.008845312118530273, 0.00882688045501709, 0.008765631675720215, 0.008779583930969238, 0.008820735931396484, 0.008701888084411621, 0.008805600166320801, 0.0086429443359375, 0.009085247993469239, 0.008942912101745606, 0.008839008331298828, 0.008887295722961425, 0.008878080368041993, 0.00897433567047119, 0.008927231788635253, 0.008976672172546387, 0.008922271728515625, 0.00903657627105713, 0.00888748836517334, 0.008917695999145507, 0.008869471549987793, 0.00883456039428711, 0.008868672370910645, 0.008853504180908203, 0.008814240455627441, 0.008808799743652344, 0.008803584098815919, 0.008796319961547852, 0.009092960357666015, 0.008826687812805175, 0.00879097557067871, 0.008845120429992677, 0.008851167678833007, 0.008796640396118164, 0.008857631683349609, 0.008797504425048829, 0.00885750389099121, 0.008862719535827637, 0.00884006404876709, 0.008847264289855957, 0.008904800415039063, 0.008769696235656738, 0.00872316837310791, 0.00874112033843994, 0.008740799903869629, 0.008746815681457519, 0.00880844783782959, 0.008773823738098145, 0.008798015594482423, 0.0088307523727417, 0.009076959609985351, 0.008936448097229004, 0.008911104202270507, 0.008775648117065429, 0.008778047561645509, 0.008884256362915038, 0.008748671531677247, 0.008784704208374024, 0.008749055862426757, 0.008783424377441406, 0.00888649559020996, 0.008798720359802246, 0.008785408020019531, 0.00899289608001709, 0.008872032165527344, 0.008894559860229492, 0.00884931182861328, 0.008880288124084472, 0.008885215759277344, 0.008857407569885255, 0.008843263626098634, 0.008853407859802246, 0.008953951835632324, 0.008878080368041993, 0.008951680183410644, 0.008919136047363281, 0.008888256072998047, 0.008890560150146484, 0.008905632019042969, 0.008897791862487793, 0.008887519836425781, 0.008897055625915527, 0.008881888389587402, 0.008922783851623536, 0.008944255828857422, 0.008841440200805665, 0.008829024314880371, 0.008830656051635742, 0.008818752288818359, 0.008861632347106933, 0.008840767860412598, 0.008804800033569336, 0.008832320213317871, 0.008784576416015625, 0.008891488075256348, 0.008954784393310548, 0.008812543869018554, 0.008897919654846192, 0.008856191635131835, 0.008812543869018554, 0.00871769618988037, 0.008664735794067383, 0.008758560180664062, 0.008734399795532227, 0.008798272132873534, 0.008742848396301269, 0.00884761619567871, 0.008866720199584961, 0.008807264328002929, 0.009191424369812011, 0.008908127784729004, 0.008839839935302735, 0.00867302417755127, 0.0087740478515625, 0.008781503677368165, 0.008816800117492675, 0.008916576385498047, 0.00878223991394043, 0.008978272438049316, 0.008872096061706542, 0.008835391998291016, 0.008863391876220704, 0.008885919570922851, 0.008889920234680175, 0.008854016304016114, 0.008890399932861328, 0.008928576469421386, 0.00887497615814209, 0.008853504180908203, 0.008912896156311035, 0.008874239921569825, 0.00887388801574707, 0.008922207832336425, 0.008913984298706054, 0.008959839820861817, 0.0089552001953125, 0.008922016143798828, 0.008912896156311035, 0.008883999824523927, 0.009147680282592774, 0.008904671669006348, 0.008911520004272462, 0.008894559860229492, 0.008937472343444825, 0.008937536239624023, 0.008902848243713379, 0.008881279945373535, 0.008815232276916505, 0.008761343955993652, 0.008816639900207519, 0.008951199531555177, 0.008784159660339356, 0.00882310390472412, 0.008828927993774414, 0.008763392448425293, 0.008720383644104004, 0.008760736465454102, 0.008804832458496094, 0.00876147174835205, 0.008851263999938965, 0.008941791534423827, 0.008755167961120606, 0.008859328269958496, 0.008759615898132324, 0.008779264450073243, 0.008715904235839844, 0.008708703994750976, 0.008765343666076661, 0.008648736000061035, 0.008740511894226075, 0.008795999526977539, 0.008815168380737304, 0.008880288124084472, 0.008869664192199707, 0.00879036808013916, 0.008775263786315919, 0.008773632049560547, 0.00882703971862793, 0.008923392295837402, 0.008920639991760253, 0.00894438362121582, 0.009024864196777343, 0.008912351608276367, 0.008941632270812988, 0.00913923168182373, 0.009004128456115723, 0.0090033597946167, 0.009023776054382324, 0.008986687660217286, 0.008923135757446288, 0.008966143608093263, 0.00893238353729248, 0.00897532844543457, 0.009011103630065917]",tokens/s,112.45921519891753,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,906.87488,2648.571904,0.0,2246.049792,2230.657024,s,1,9.648263671875,9.648263671875,0.0,9.648263671875,9.648263671875,9.648263671875,9.648263671875,[9.648263671875],,kWh,7.785379629998109e-05,8.580637081006032e-06,2.613474313001296e-05,0.00011256917651100008,,MB,1400.459264,3239.968768,0.0,2824.863744,2571.087872,s,10,2.911514526367188,0.29115145263671877,0.0014520740468044848,0.2914717559814453,0.29225838623046874,0.2923404815673828,0.29240615783691404,"[0.29121353149414064, 0.290850830078125, 0.29172998046875, 0.2922401428222656, 0.2906698913574219, 0.29107958984375, 0.2922064819335938, 0.29193618774414065, 0.28716531372070314, 0.29242257690429685]",tokens/s,879.267466061457,kWh,8.738994583701e-06,9.6344189300864e-07,5.802896799175783e-06,1.5505333275885423e-05,tokens/kWh,16510448.07905822,MB,1443.81952,3244.163072,0.0,2829.058048,2571.090432,s,10,22.257391845703125,2.2257391845703127,0.006524247827557654,2.223558349609375,2.233086279296875,2.23418095703125,2.23505669921875,"[2.218794189453125, 2.22403662109375, 2.23271923828125, 2.222191162109375, 2.220304443359375, 2.216022216796875, 2.232843017578125, 2.232125244140625, 2.235275634765625, 2.223080078125]",tokens/s,28.30520324966215,kWh,6.484979941296245e-05,7.153251024990165e-06,4.001827874662578e-05,0.0001120213291845784,tokens/kWh,562392.8983755801,,s,630,22.255409976959253,0.03532604758247496,0.00043190094728072026,0.03521238327026367,0.03567826805114746,0.036030264091491696,0.037117982521057136,"[0.036390625, 0.03567804718017578, 0.03534438323974609, 0.0353625602722168, 0.035340065002441405, 0.03525724792480469, 0.03522355270385742, 0.03505350494384766, 0.03514524841308594, 0.03625164794921875, 0.03513193511962891, 0.035200321197509765, 0.03542700958251953, 0.03500646209716797, 0.03500032043457031, 0.03512639999389648, 0.03488211059570313, 0.035049793243408206, 0.03493628692626953, 0.03513193511962891, 0.034942657470703124, 0.03499590301513672, 0.03495932769775391, 0.034973537445068356, 0.03503769683837891, 0.03504159927368164, 0.03510086441040039, 0.035085376739501954, 0.03505023956298828, 0.035266559600830076, 0.035165985107421874, 0.03522377777099609, 0.03512319946289062, 0.035135486602783206, 0.03509657669067383, 0.03507107162475586, 0.03526089477539063, 0.03510726547241211, 0.03501465606689453, 0.035151870727539065, 0.035028961181640624, 0.035477535247802734, 0.03515814590454101, 0.035555198669433595, 0.03516211318969727, 0.03522355270385742, 0.035151870727539065, 0.03512319946289062, 0.03521331024169922, 0.035141632080078124, 0.03510681533813476, 0.0357147216796875, 0.03523209762573242, 0.035336383819580076, 0.03509433746337891, 0.03512313461303711, 0.035334014892578126, 0.035211456298828124, 0.03512934494018555, 0.03511097717285156, 0.03526377487182617, 0.03524646377563476, 0.03571878433227539, 0.03775155258178711, 0.03561676788330078, 0.03536243057250977, 0.03523417663574219, 0.035178497314453126, 0.035229248046875, 0.03512060928344726, 0.03508732986450196, 0.03526230239868164, 0.03502851104736328, 0.03516675186157227, 0.03513900756835937, 0.03512377548217773, 0.03503318405151367, 0.03498297500610351, 0.034986942291259766, 0.034977790832519534, 0.035055553436279294, 0.03500038528442383, 0.03508153533935547, 0.035154014587402346, 0.034938751220703126, 0.035125984191894534, 0.03513753509521484, 0.03527884674072266, 0.03495257568359375, 0.034996448516845705, 0.03503081512451172, 0.03503164672851562, 0.03521539306640625, 0.03503116989135742, 0.035012481689453125, 0.03503327941894531, 0.0351130256652832, 0.035045120239257814, 0.03507747268676758, 0.0350931510925293, 0.035727359771728515, 0.03512684631347656, 0.03518918228149414, 0.035026943206787106, 0.03508838272094727, 0.035091999053955075, 0.035060192108154295, 0.03922739028930664, 0.03595052719116211, 0.03530131149291992, 0.035258495330810546, 0.035066112518310544, 0.03502592086791992, 0.035334911346435544, 0.036378623962402344, 0.03522140884399414, 0.03512329483032227, 0.0356921615600586, 0.03539187240600586, 0.03511414337158203, 0.03574012756347656, 0.03537958526611328, 0.03517366409301758, 0.03513603210449219, 0.035129535675048826, 0.035198143005371094, 0.036378974914550784, 0.035546878814697265, 0.03547609710693359, 0.035450817108154294, 0.03535673522949219, 0.03543996810913086, 0.03541404724121094, 0.035604095458984374, 0.03634175872802734, 0.03727462387084961, 0.035294273376464846, 0.035348831176757814, 0.035985889434814455, 0.03678771209716797, 0.03613753509521484, 0.03568032073974609, 0.03540591812133789, 0.035221214294433596, 0.03518633651733399, 0.03519084930419922, 0.0350233268737793, 0.03502284622192383, 0.03671654510498047, 0.03518668746948242, 0.03516950225830078, 0.03517436981201172, 0.03497452926635742, 0.035119102478027346, 0.03513958358764648, 0.03509862518310547, 0.035025089263916016, 0.03513727951049805, 0.0350346565246582, 0.035039775848388674, 0.035073760986328126, 0.03507228851318359, 0.035133438110351564, 0.0351190071105957, 0.03555289459228515, 0.03528752136230469, 0.03540758514404297, 0.03542179107666016, 0.03569049453735352, 0.03513209533691406, 0.0353355827331543, 0.03532396697998047, 0.035229503631591795, 0.035256446838378905, 0.03530607986450195, 0.035135486602783206, 0.03568025588989258, 0.0353361930847168, 0.035327617645263674, 0.03525590515136719, 0.03531241607666016, 0.03513324737548828, 0.0351286735534668, 0.035695457458496095, 0.03540505599975586, 0.03508323287963867, 0.035411201477050784, 0.03536659240722656, 0.03653104019165039, 0.036192001342773436, 0.03565990447998047, 0.03553910446166992, 0.035659774780273434, 0.0353600959777832, 0.035481918334960935, 0.035184993743896484, 0.03566732788085938, 0.03530815887451172, 0.0352911376953125, 0.035243614196777344, 0.0351929931640625, 0.03529344177246094, 0.03509411239624023, 0.035066272735595705, 0.03508633422851563, 0.035348480224609374, 0.03508019256591797, 0.035282943725585936, 0.03522355270385742, 0.03504947280883789, 0.035181697845458985, 0.035052417755126956, 0.03514739227294922, 0.03500070571899414, 0.03527657699584961, 0.03528262329101563, 0.034990623474121095, 0.03516377639770508, 0.035025279998779295, 0.035286529541015625, 0.03509622573852539, 0.03539580917358399, 0.03554719924926758, 0.03527942276000977, 0.03521923065185547, 0.035242206573486326, 0.035269664764404296, 0.035263103485107423, 0.03538972854614258, 0.035360832214355466, 0.035229057312011716, 0.03510745620727539, 0.03508153533935547, 0.03535647964477539, 0.03531865692138672, 0.03522665786743164, 0.035254688262939454, 0.034988609313964844, 0.03512911987304688, 0.035176513671875, 0.03530752182006836, 0.03516227340698242, 0.035209217071533204, 0.03521535873413086, 0.035114593505859375, 0.03519939041137695, 0.035143295288085935, 0.035048927307128906, 0.035037281036376954, 0.0350560302734375, 0.036325790405273437, 0.035526241302490234, 0.0364031982421875, 0.03555644989013672, 0.0354447021484375, 0.03535558319091797, 0.03538473510742188, 0.035312030792236326, 0.03535647964477539, 0.03530745697021485, 0.035350975036621095, 0.03530099105834961, 0.03530534362792969, 0.0352465934753418, 0.03518409729003906, 0.03515584182739258, 0.035154590606689455, 0.035282943725585936, 0.03518054580688477, 0.03524105453491211, 0.035163040161132815, 0.03520892715454101, 0.035248416900634766, 0.03521945571899414, 0.03520307159423828, 0.035141632080078124, 0.035208255767822265, 0.035168289184570316, 0.03698291015625, 0.035531551361083984, 0.0352437744140625, 0.035168384552001955, 0.03511865615844727, 0.03513407897949219, 0.03518598556518555, 0.035123008728027344, 0.035055679321289064, 0.035334911346435544, 0.03514089584350586, 0.03518137741088867, 0.03509577560424805, 0.035187393188476565, 0.035110912322998046, 0.035142814636230466, 0.03517251205444336, 0.03495596694946289, 0.03502284622192383, 0.03500217437744141, 0.03503734588623047, 0.03497900772094727, 0.034968414306640626, 0.03560220718383789, 0.035301601409912106, 0.035237377166748046, 0.03519683074951172, 0.03518838500976563, 0.03521017456054688, 0.0351739501953125, 0.035129791259765626, 0.03511852645874024, 0.035091072082519534, 0.03495663833618164, 0.03493958282470703, 0.03500636672973633, 0.03500236892700195, 0.036542110443115235, 0.035367263793945315, 0.035241409301757814, 0.03523574447631836, 0.035170974731445315, 0.035068031311035155, 0.03506982421875, 0.03554886245727539, 0.03522297668457031, 0.035257217407226565, 0.03512115097045899, 0.03520716857910156, 0.03515001678466797, 0.03510774230957031, 0.035079071044921875, 0.035167423248291016, 0.035050464630126954, 0.035001312255859375, 0.03506003189086914, 0.034957889556884766, 0.034895870208740236, 0.03496278381347656, 0.03502288055419922, 0.035058303833007814, 0.035186176300048826, 0.03533670425415039, 0.0349409294128418, 0.03490111923217774, 0.034890625, 0.03494911956787109, 0.03495222473144531, 0.034998497009277346, 0.03501113510131836, 0.035016384124755856, 0.03507046508789063, 0.03539913558959961, 0.035318302154541015, 0.03534806442260742, 0.03516457748413086, 0.03510095977783203, 0.03512662506103516, 0.03495711898803711, 0.03606790542602539, 0.03602579116821289, 0.035435073852539065, 0.03528499221801758, 0.03527065658569336, 0.03516416168212891, 0.0350904312133789, 0.03515776062011719, 0.03517055892944336, 0.03513068771362305, 0.03510713577270508, 0.035248512268066405, 0.03518873596191406, 0.03502284622192383, 0.03506995010375977, 0.035178497314453126, 0.035074047088623043, 0.034993247985839845, 0.035044254302978514, 0.03493040084838867, 0.034951072692871094, 0.036256095886230466, 0.03653424072265625, 0.03534441757202148, 0.03520857620239258, 0.035181182861328125, 0.03518790435791016, 0.035076702117919925, 0.03498403167724609, 0.03808063888549805, 0.035116737365722656, 0.0352770881652832, 0.03928387069702149, 0.03553164672851562, 0.03529840087890625, 0.03528796768188477, 0.03530956649780274, 0.0351242561340332, 0.03511167907714844, 0.03504969787597656, 0.03527612686157226, 0.03622540664672851, 0.03517468643188477, 0.035143680572509765, 0.03529081726074219, 0.03544672012329102, 0.03536729431152344, 0.03525241470336914, 0.03523465728759766, 0.03515884780883789, 0.03531996917724609, 0.03520102310180664, 0.035141632080078124, 0.03516960144042969, 0.03516188812255859, 0.03529203033447265, 0.03520537567138672, 0.035968734741210935, 0.035310718536376955, 0.03521142578125, 0.03517504119873047, 0.035190910339355466, 0.03520719909667969, 0.03520716857910156, 0.0351940803527832, 0.03515676879882813, 0.03517142486572265, 0.03519315338134766, 0.03507875061035156, 0.03510265731811523, 0.035417247772216796, 0.03510265731811523, 0.035119102478027346, 0.03506227111816406, 0.035275230407714846, 0.03592393493652344, 0.0357806396484375, 0.035298366546630856, 0.03537184143066406, 0.03517248153686523, 0.03527305603027344, 0.03677459335327148, 0.03525526428222656, 0.035320926666259765, 0.03641689682006836, 0.035566238403320315, 0.03542425537109375, 0.035449886322021486, 0.03539452743530273, 0.035681697845458986, 0.035547393798828125, 0.03599097442626953, 0.03553782272338867, 0.0354090576171875, 0.035406688690185546, 0.0355676155090332, 0.03548921585083008, 0.03537363052368164, 0.03534643173217773, 0.03517440032958984, 0.03537875366210937, 0.03512364959716797, 0.03521331024169922, 0.03527420806884766, 0.035238433837890625, 0.03509657669067383, 0.035166206359863283, 0.03555123138427734, 0.03647190475463867, 0.03542419052124023, 0.03524319839477539, 0.035108543395996096, 0.03526627349853516, 0.03519062423706055, 0.03514012908935547, 0.03531161499023437, 0.03506124877929687, 0.03503769683837891, 0.035173824310302734, 0.035316001892089846, 0.03571331024169922, 0.03511296081542969, 0.03524518585205078, 0.03549683380126953, 0.03524179077148438, 0.03521926498413086, 0.03532207870483398, 0.03515526580810547, 0.03521007919311524, 0.035198047637939454, 0.03516918563842773, 0.035272705078125, 0.03564511871337891, 0.035856704711914066, 0.035335201263427735, 0.0354453125, 0.03550249481201172, 0.0353177604675293, 0.03527884674072266, 0.035292320251464844, 0.035404640197753905, 0.03527475357055664, 0.03520512008666992, 0.03737190246582031, 0.03555123138427734, 0.035471359252929685, 0.03603177642822265, 0.03669001770019531, 0.03575801467895508, 0.035729408264160156, 0.03553283309936523, 0.035579872131347653, 0.035817470550537106, 0.03549539184570313, 0.03546297454833985, 0.03551919937133789, 0.03592134475708008, 0.03555324935913086, 0.035528606414794925, 0.03539014434814453, 0.035558879852294924, 0.035417919158935544, 0.035727359771728515, 0.03560726547241211, 0.03536454391479492, 0.035427806854248046, 0.035582015991210934, 0.035402175903320315, 0.035359073638916015, 0.03548169708251953, 0.035390785217285156, 0.03532559967041016, 0.03535763168334961, 0.035366912841796876, 0.03561651229858399, 0.03553289413452149, 0.03535273742675781, 0.03567193603515625, 0.03521564865112305, 0.03531350326538086, 0.035768318176269534, 0.03576947021484375, 0.03546406555175781, 0.035835262298583984, 0.0352856330871582, 0.03549327850341797, 0.03538800048828125, 0.035194591522216795, 0.035148063659667966, 0.03537696075439453, 0.03526847839355469, 0.035203392028808594, 0.034988033294677735, 0.035112064361572264, 0.03651456069946289, 0.035173599243164065, 0.03523676681518555, 0.03519692611694336, 0.035020065307617185, 0.035278656005859374, 0.03516713714599609, 0.03521331024169922, 0.03511203384399414, 0.03518147277832031, 0.03528499221801758, 0.035211105346679684, 0.0351192626953125, 0.03510476684570313, 0.03717315292358398, 0.03573769760131836, 0.036012832641601565, 0.035588096618652344, 0.035423423767089846, 0.035194976806640625, 0.03526233673095703, 0.03517257690429688, 0.035402366638183594, 0.03506995010375977, 0.034971649169921876, 0.035074047088623043, 0.03502928161621094, 0.034971359252929685, 0.03492681503295898, 0.03493840026855469, 0.03499792098999024, 0.03526639938354492, 0.035127487182617184, 0.035020641326904293, 0.03518742370605469, 0.03542620849609375, 0.03530956649780274, 0.035236160278320314, 0.035561248779296874, 0.035106014251708985, 0.035502273559570315, 0.03532624053955078, 0.03512294387817383, 0.03536051177978516, 0.03517449569702148, 0.035125984191894534, 0.03530899047851563, 0.035195457458496095, 0.03602841567993164, 0.035714561462402344, 0.035450977325439455, 0.03533660888671875, 0.03527503967285156, 0.03518412780761719, 0.03522195053100586, 0.03546665573120117, 0.03660393524169922, 0.03554777526855469, 0.03530464172363281, 0.03519132614135742, 0.035588096618652344, 0.03527411270141602, 0.03538188934326172, 0.03554227066040039, 0.03540044784545898, 0.03520716857910156, 0.03538534545898438, 0.03523174285888672, 0.0351965446472168, 0.03520966339111328, 0.035194145202636716, 0.03515254211425781, 0.035168254852294925, 0.03509862518310547, 0.03504057693481445, 0.03504115295410156, 0.035031585693359374, 0.03506377410888672, 0.03495967864990234]",tokens/s,28.30772385915298,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,True,MB,899.907584,6589.120512,0.0,6186.5984,6098.951168,s,1,14.886970703125,14.886970703125,0.0,14.886970703125,14.886970703125,14.886970703125,14.886970703125,[14.886970703125],,kWh,0.00023214709927502403,2.5600169838048334e-05,7.848700723397939e-05,0.0003362342763470517,,MB,1369.739264,7306.346496,0.0,6891.241472,6575.0528,s,10,11.454756103515624,1.1454756103515626,0.005645295210832161,1.1456320190429687,1.1512665527343748,1.1532828735351561,1.154895930175781,"[1.132076416015625, 1.1458841552734376, 1.1433182373046875, 1.1459764404296875, 1.1477723388671874, 1.1453798828125, 1.143957275390625, 1.144273681640625, 1.1508184814453124, 1.1552991943359374]",tokens/s,223.48795355095336,kWh,3.326286412166307e-05,3.668413089920806e-06,2.2119489917798996e-05,5.905076712938287e-05,tokens/kWh,4335252.740054885,MB,1413.042176,7308.443648,0.0,6893.338624,6575.05536,s,10,57.6337158203125,5.76337158203125,0.020991582030241374,5.761346679687501,5.790430224609374,5.793684594726562,5.796288090820312,"[5.73089013671875, 5.7457548828125, 5.7443955078125, 5.74514013671875, 5.76132373046875, 5.76136962890625, 5.77553173828125, 5.78970703125, 5.7826640625, 5.79693896484375]",tokens/s,10.931101544175675,kWh,0.00016923175169250196,1.866754150400067e-05,0.00011216045083940197,0.0003000597440359046,tokens/kWh,209958.18750169146,,s,630,57.630962600707996,0.09147771841382223,0.001650278483680085,0.09122829055786133,0.09262305297851563,0.09337039947509765,0.10180197959899902,"[0.10092902374267577, 0.08995161437988282, 0.09037836456298828, 0.08993247985839843, 0.09043154907226562, 0.0899471664428711, 0.09144828796386718, 0.09005260467529297, 0.0912828140258789, 0.08939718627929688, 0.09177977752685547, 0.09131116485595703, 0.09245791625976563, 0.09013353729248047, 0.09110832214355469, 0.08990265655517578, 0.09156988525390625, 0.09000214385986328, 0.09141232299804687, 0.09048681640625, 0.09148633575439453, 0.09003411102294921, 0.09244429016113281, 0.09059161376953125, 0.09171564483642577, 0.09000678253173829, 0.09211119842529297, 0.0898338851928711, 0.0918097915649414, 0.0901337890625, 0.09067206573486328, 0.09005852508544922, 0.09167871856689454, 0.09022402954101562, 0.092002685546875, 0.09022624206542969, 0.09130255889892579, 0.09066086578369141, 0.09079740905761718, 0.09060009765625, 0.09014067077636718, 0.09063219451904297, 0.09140633392333984, 0.09065615844726563, 0.09113660430908203, 0.09066655731201172, 0.09022659301757813, 0.09102799987792969, 0.09029017639160156, 0.09122303771972656, 0.09034239959716797, 0.09116582489013672, 0.09022962951660156, 0.09123200225830078, 0.09040662384033203, 0.09147856140136719, 0.09042739105224609, 0.09120358276367188, 0.09093529510498047, 0.09095986938476562, 0.09048172760009765, 0.09154025268554687, 0.09053139495849609, 0.102170654296875, 0.09018572998046875, 0.0903741455078125, 0.09011097717285156, 0.08994009399414063, 0.08991219329833984, 0.09088121795654297, 0.09040089416503906, 0.09087977600097656, 0.09022163391113282, 0.09354844665527344, 0.09175363159179688, 0.09085984039306641, 0.0910709457397461, 0.0904908447265625, 0.09017266845703124, 0.09035855865478516, 0.09027986907958985, 0.09115449523925781, 0.09058303833007812, 0.09082991790771484, 0.09227561950683594, 0.09098028564453126, 0.09339500427246093, 0.09032918548583985, 0.09143695831298829, 0.09057389068603515, 0.09114246368408203, 0.09023296356201171, 0.09126143646240234, 0.09020416259765625, 0.09208153533935547, 0.09113782501220703, 0.09171603393554688, 0.09153107452392578, 0.09066761779785157, 0.09073571014404297, 0.09023580932617188, 0.09063014221191407, 0.09023680114746094, 0.09085955047607422, 0.09057084655761719, 0.09162540435791015, 0.0913337631225586, 0.09320076751708985, 0.09072006225585938, 0.09280178833007813, 0.09052159881591797, 0.09149644470214843, 0.09055824279785156, 0.0908220443725586, 0.09030534362792969, 0.09160704040527344, 0.09039004516601562, 0.0918443832397461, 0.0911461410522461, 0.0914659194946289, 0.09163017272949218, 0.09090457916259766, 0.09194844818115235, 0.09040700531005859, 0.09176525115966797, 0.09058060455322266, 0.10223868560791016, 0.09033663940429687, 0.09018582153320312, 0.09011869049072266, 0.08999321746826172, 0.0900008316040039, 0.09095225524902344, 0.09003330993652343, 0.09074508666992187, 0.09016381072998046, 0.09356835174560547, 0.09162406158447266, 0.09071209716796876, 0.09121078491210938, 0.09014371490478515, 0.09057484436035156, 0.09021644592285157, 0.09064447784423828, 0.09020620727539062, 0.09051955413818359, 0.09027519989013671, 0.09189440155029296, 0.09104930877685546, 0.0923431396484375, 0.09021622467041016, 0.09207977294921875, 0.09018611145019531, 0.09278620910644532, 0.09034799957275391, 0.09134630584716796, 0.09022447967529297, 0.0916488037109375, 0.090714111328125, 0.09239756774902344, 0.09093939208984375, 0.09117820739746094, 0.09073506927490234, 0.09032121276855469, 0.09152067565917969, 0.09085167694091797, 0.09064857482910156, 0.09096367645263671, 0.09043507385253906, 0.09103791809082032, 0.09156460571289063, 0.0909202880859375, 0.09229923248291015, 0.09037382507324218, 0.09268121337890625, 0.09046835327148438, 0.09198937225341797, 0.09054796600341797, 0.09159168243408203, 0.09048390197753907, 0.0918082275390625, 0.09093341064453125, 0.0912702407836914, 0.09106121826171874, 0.09043968200683594, 0.09180931091308593, 0.09126959991455078, 0.09173977661132812, 0.0904769287109375, 0.10175692749023438, 0.09020134735107421, 0.08994073486328125, 0.09001513671875, 0.08975421142578124, 0.09057484436035156, 0.09016473388671875, 0.09044429016113281, 0.09065676879882813, 0.09015500640869141, 0.09415277099609375, 0.09168275451660156, 0.09083699035644531, 0.09131407928466796, 0.0900854721069336, 0.09057209777832032, 0.0900860824584961, 0.0906649627685547, 0.09001369476318359, 0.09067724609375, 0.0908226547241211, 0.09205350494384766, 0.09101107025146485, 0.09239756774902344, 0.09014012908935547, 0.09210675048828125, 0.09034806060791016, 0.09188066864013672, 0.09010044860839844, 0.0917852783203125, 0.09039667510986328, 0.09252799987792969, 0.09020240020751953, 0.09232624053955078, 0.09088227081298828, 0.09142626953125, 0.09100281524658203, 0.09021440124511719, 0.09132656097412109, 0.09025990295410156, 0.09120547485351563, 0.09074832153320313, 0.09104412841796874, 0.09105849456787109, 0.09158656311035156, 0.09089807891845703, 0.09166678619384766, 0.09086287689208984, 0.09199388885498047, 0.0903720932006836, 0.09205264282226562, 0.09051113891601563, 0.092653564453125, 0.09025676727294922, 0.0921579818725586, 0.09103177642822266, 0.0913095703125, 0.09129869079589843, 0.09064217376708984, 0.09153324890136719, 0.09048300933837891, 0.09152102661132812, 0.0910192642211914, 0.10214064025878906, 0.09020368194580078, 0.08979084777832032, 0.09015856170654298, 0.08997542572021484, 0.08994611358642578, 0.09067314910888671, 0.08999257659912109, 0.09058163452148438, 0.09038438415527343, 0.095032958984375, 0.09219840240478516, 0.09087474822998047, 0.09149161529541015, 0.09007791900634765, 0.09124205017089844, 0.09012220764160156, 0.09174678039550781, 0.09024502563476562, 0.0919710693359375, 0.09033168029785156, 0.09389193725585937, 0.09100764465332031, 0.091766845703125, 0.0908958740234375, 0.09018624114990234, 0.0908226547241211, 0.09045820617675782, 0.09110108947753906, 0.09024214172363282, 0.09128028869628907, 0.09081037139892578, 0.09209446716308593, 0.09125888061523438, 0.09207997131347656, 0.09134915161132813, 0.09244198608398438, 0.09071475219726563, 0.09185075378417969, 0.09080012512207031, 0.09147586822509765, 0.09074809265136718, 0.09133148956298828, 0.09196307373046875, 0.09133293151855469, 0.09313689422607421, 0.09091072082519532, 0.09333331298828125, 0.09086134338378907, 0.09171600341796875, 0.09140019226074218, 0.09141558074951171, 0.09065094757080078, 0.09141725158691406, 0.09203302764892578, 0.09169446563720703, 0.09201113891601563, 0.0914901123046875, 0.09296233367919922, 0.09060009765625, 0.09254911804199219, 0.09049088287353516, 0.09130294036865234, 0.10374553680419922, 0.09014067077636718, 0.09063196563720703, 0.09012860870361328, 0.09094374084472656, 0.09017842864990235, 0.09083529663085937, 0.09008796691894531, 0.09077059173583985, 0.09048719787597656, 0.09338694763183594, 0.0915719985961914, 0.09246553802490234, 0.09026569366455078, 0.09215590667724609, 0.0902635498046875, 0.09156774139404297, 0.09016079711914063, 0.0921994857788086, 0.09024649810791016, 0.09152185821533203, 0.09142243194580078, 0.0915167007446289, 0.09132697296142578, 0.09134899139404297, 0.09089456176757812, 0.090200927734375, 0.09139830780029297, 0.09064937591552734, 0.09080636596679688, 0.0908389434814453, 0.09154354858398438, 0.09091891479492188, 0.09216745758056641, 0.09093350219726562, 0.09276464080810547, 0.09041289520263672, 0.09231743621826172, 0.09055043029785156, 0.09154528045654296, 0.09092153930664063, 0.09156182098388672, 0.09103119659423828, 0.091525634765625, 0.09151897430419922, 0.09090866851806641, 0.09168077087402343, 0.09095372772216796, 0.09213337707519531, 0.0909271011352539, 0.09288492584228515, 0.09042060852050782, 0.09237356567382812, 0.09108185577392579, 0.09176569366455078, 0.09086573028564453, 0.09059286499023438, 0.09232220458984375, 0.09125071716308594, 0.09251744079589844, 0.0905871353149414, 0.09341542053222657, 0.09056671905517578, 0.10166284942626953, 0.09013475036621094, 0.08981343841552734, 0.09047650909423828, 0.09005078125, 0.0910315170288086, 0.0901734390258789, 0.09085919952392578, 0.09044818878173828, 0.09060966491699218, 0.09427702331542968, 0.09224642944335938, 0.09113414764404297, 0.09115145874023438, 0.09072322845458984, 0.09151078033447266, 0.09075917053222657, 0.09256960296630859, 0.09055232238769531, 0.09262489318847657, 0.09157830047607422, 0.09291494750976563, 0.09151366424560547, 0.09073868560791015, 0.09192352294921875, 0.09051846313476562, 0.09112371063232422, 0.09086771392822265, 0.09158860778808593, 0.09137757110595703, 0.09280316925048829, 0.0917913589477539, 0.09278463745117188, 0.091219970703125, 0.09165618896484375, 0.09131827545166016, 0.09135718536376954, 0.0913733139038086, 0.09094290924072265, 0.09145174407958985, 0.09163619232177735, 0.09242623901367188, 0.09188556671142578, 0.09300745391845704, 0.09144361877441406, 0.09129776000976562, 0.09132444763183593, 0.09104966735839844, 0.09138550567626953, 0.09101337432861328, 0.09182249450683594, 0.09186713409423829, 0.09275702667236328, 0.09162207794189453, 0.0929808349609375, 0.09116681671142578, 0.09175920104980469, 0.09114419555664062, 0.09151897430419922, 0.09211084747314453, 0.091115234375, 0.0933501739501953, 0.09193679809570313, 0.10279727935791015, 0.09147743988037109, 0.09008390045166016, 0.09249993896484375, 0.09020829010009766, 0.09170470428466797, 0.09007987213134766, 0.09183026885986328, 0.09111961364746093, 0.0914677734375, 0.09463629150390625, 0.0924485092163086, 0.09117865753173827, 0.0913835220336914, 0.09258771514892578, 0.09062655639648437, 0.09245945739746093, 0.090785888671875, 0.0921039047241211, 0.0905214385986328, 0.09278256225585937, 0.09202777862548828, 0.09198912048339844, 0.09236978912353516, 0.09169884490966797, 0.09312700653076172, 0.09099878692626953, 0.09253590393066406, 0.09048150634765625, 0.09130540466308594, 0.09154585266113281, 0.09188390350341796, 0.09148825836181641, 0.09169305419921875, 0.09212659454345704, 0.09175513458251953, 0.09341747283935548, 0.0906910400390625, 0.09257424163818359, 0.09068707275390625, 0.09173990631103515, 0.09150316619873047, 0.09169315338134766, 0.09187532806396484, 0.09168470764160157, 0.0932435531616211, 0.09179750061035157, 0.09206169891357421, 0.09102745819091797, 0.0912384033203125, 0.09128316497802734, 0.09142422485351563, 0.09174620819091797, 0.0916817626953125, 0.09151891326904296, 0.09182208251953125, 0.09320211029052734, 0.091740478515625, 0.09223372650146484, 0.09100028991699219, 0.09207862091064453, 0.09125679779052734, 0.09141766357421875, 0.10182038116455078, 0.09088492584228515, 0.09064857482910156, 0.09059497833251953, 0.09068784332275391, 0.09103155517578125, 0.09077875518798828, 0.09070272064208984, 0.09052310180664062, 0.09134687805175781, 0.09472086334228516, 0.09400418853759765, 0.09120646667480468, 0.09105718231201172, 0.09026041412353515, 0.09176191711425781, 0.09112857818603516, 0.0904353256225586, 0.09109104156494141, 0.09057437133789062, 0.09296057891845703, 0.09251513671875, 0.09404825592041016, 0.09124636840820312, 0.0918562240600586, 0.0908662109375, 0.0904195556640625, 0.0911114273071289, 0.09081037139892578, 0.09091276550292969, 0.09141452789306641, 0.09243878173828125, 0.09246438598632813, 0.09411225891113281, 0.09143721771240235, 0.09118857574462891, 0.09122457885742187, 0.0908062744140625, 0.09129574584960938, 0.09076262664794922, 0.0913345947265625, 0.09171952056884766, 0.09230217742919922, 0.09243414306640625, 0.09360002899169922, 0.09117286682128906, 0.09221939086914062, 0.09092610931396485, 0.09141728210449218, 0.09093558502197266, 0.09098230743408203, 0.09177932739257813, 0.09177887725830078, 0.09302191925048828, 0.09201065826416016, 0.09317391967773438, 0.09145343780517579, 0.091700927734375, 0.09114633941650391, 0.09067132568359375, 0.09211289978027344, 0.09115033721923828, 0.09416499328613281, 0.10324582672119141, 0.09204326629638672, 0.09022019195556641, 0.0925412826538086, 0.09067036437988281, 0.09143369293212891, 0.09091072082519532, 0.09133379364013672, 0.09101558685302734, 0.09123270416259766, 0.09410284423828125, 0.09263394927978516, 0.09257558441162109, 0.09086972808837891, 0.09272665405273438, 0.09071065521240235, 0.0916910400390625, 0.09094876861572265, 0.09115939331054687, 0.09088614654541016, 0.09262284851074219, 0.09262489318847657, 0.09173209381103516, 0.09318115234375, 0.09129027557373047, 0.09126297760009766, 0.09122425842285156, 0.09142047882080079, 0.09108480072021484, 0.09137356567382812, 0.0916923828125, 0.09168943786621093, 0.0926805419921875, 0.0921119384765625, 0.09380249786376953, 0.09124237060546875, 0.09140672302246093, 0.09135318756103515, 0.09123248291015625, 0.091457763671875, 0.091072509765625, 0.09214701080322266, 0.09210057830810547, 0.09381552124023437, 0.09199411010742188, 0.09180528259277344, 0.0915931167602539, 0.09111347198486328, 0.09161283111572266, 0.09164205169677735, 0.09293411254882812, 0.09103174591064453, 0.09363558197021485, 0.09209555053710937, 0.09204524993896485, 0.09252249908447266, 0.09131350708007813, 0.09382978820800782, 0.09097411346435547, 0.09238236999511719, 0.09131843566894532, 0.09168691253662109, 0.09252124786376953]",tokens/s,10.931623758654005,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.52 GiB is free. Process 114852 has 13.22 GiB memory in use. Of the allocated memory 13.10 GiB is allocated by PyTorch, and 6.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,850.628608,8891.793408,0.0,8489.271296,8353.731072,s,1,19.659875,19.659875,0.0,19.659875,19.659875,19.659875,19.659875,[19.659875],,kWh,0.00036641922464588484,4.0411474263165676e-05,0.00011784203871800791,0.0005246727376270584,,MB,1300.852736,9671.933952,0.0,9256.828928,8872.17408,s,10,15.378113403320311,1.5378113403320313,0.006813084816602454,1.5394279174804688,1.5448322387695312,1.544983135986328,1.5451038537597657,"[1.5205328369140625, 1.5339605712890625, 1.5340003662109376, 1.5376812744140624, 1.5396903076171875, 1.53916552734375, 1.541322265625, 1.5447987060546875, 1.5418275146484375, 1.545134033203125]",tokens/s,166.47035516380484,kWh,4.484644105374779e-05,4.946131660513998e-06,2.987852390279877e-05,7.967109661706055e-05,tokens/kWh,3213210.4473277815,MB,1324.556288,9703.391232,0.0,9288.286208,8872.17664,s,10,79.081390625,7.9081390625,0.012561027311100606,7.907878173828125,7.9238626953125,7.9262309082031255,7.928125478515625,"[7.8876962890625, 7.90097314453125, 7.89334716796875, 7.90706884765625, 7.898474609375, 7.91760693359375, 7.92859912109375, 7.9156005859375, 7.9086875, 7.92333642578125]",tokens/s,7.966475994174514,kWh,0.0002314574782420868,2.5531605193919274e-05,0.00015282587226060073,0.00040981495569660686,tokens/kWh,153727.91823302806,,s,630,79.07883292388907,0.1255219570220463,0.0016765117454523551,0.12497339248657227,0.12695133972167968,0.1274543685913086,0.13530742141723634,"[0.13600544738769532, 0.12600931549072267, 0.12469289398193359, 0.12432630157470703, 0.12462899017333984, 0.12443231964111329, 0.12464284515380859, 0.12422198486328125, 0.12448303985595703, 0.12431005096435546, 0.12448767852783203, 0.12437709045410156, 0.12456499481201172, 0.12434483337402344, 0.12435820770263672, 0.12445536041259765, 0.12653504180908204, 0.1277078094482422, 0.12479074859619141, 0.1244491195678711, 0.12619955444335937, 0.1250912628173828, 0.12446076965332031, 0.12438748931884766, 0.12457862091064453, 0.1243993911743164, 0.12450179290771485, 0.12442851257324218, 0.1246740493774414, 0.12666291046142578, 0.12515916442871095, 0.12465049743652344, 0.12453990173339843, 0.12457794952392579, 0.12511011505126954, 0.12462850952148438, 0.12464297485351562, 0.12486665344238282, 0.12644630432128906, 0.12504883575439454, 0.12480694580078125, 0.1245758056640625, 0.1248912353515625, 0.12465331268310546, 0.12482758331298828, 0.12465567779541016, 0.12471660614013672, 0.12666038513183595, 0.12526230621337892, 0.12465408325195312, 0.12502531433105468, 0.12645420837402344, 0.12526236724853515, 0.12482777404785156, 0.12486195373535157, 0.12691903686523437, 0.12565497589111327, 0.1247940444946289, 0.12493494415283203, 0.1266852798461914, 0.12553948974609375, 0.12487474822998047, 0.12499849700927734, 0.1367025604248047, 0.12585791778564454, 0.12471078491210938, 0.1253166732788086, 0.12608761596679688, 0.12468019104003907, 0.12624198150634766, 0.12475199890136719, 0.1253235855102539, 0.1244695053100586, 0.12664144134521485, 0.1251500473022461, 0.12458188629150391, 0.12558646392822265, 0.12657295989990233, 0.12519481658935547, 0.12443193817138672, 0.12436319732666015, 0.12636569976806641, 0.1249095687866211, 0.12444467163085937, 0.12454892730712891, 0.1269065628051758, 0.12442144012451171, 0.1245882568359375, 0.12456598663330078, 0.1244590072631836, 0.1266175994873047, 0.12539040374755858, 0.12444854736328125, 0.12516214752197266, 0.1265778579711914, 0.12506393432617188, 0.12480313873291016, 0.12473139190673828, 0.1243908462524414, 0.12450157165527344, 0.12703641510009767, 0.1246527328491211, 0.12454176330566406, 0.12471501159667969, 0.1246836166381836, 0.12465779113769532, 0.12463158416748046, 0.12481887817382813, 0.12668109130859376, 0.12503833770751954, 0.1258115234375, 0.1247510757446289, 0.12474179077148438, 0.12496940612792969, 0.12469033813476563, 0.12471939086914062, 0.12711526489257813, 0.12541951751708985, 0.12464288330078124, 0.12486700439453124, 0.12648652648925782, 0.12538880157470703, 0.12482969665527344, 0.12489295959472656, 0.12690659332275392, 0.12547481536865235, 0.13477590942382814, 0.12565789031982422, 0.12442431640625, 0.1254229736328125, 0.12423436737060548, 0.12431314849853516, 0.12421574401855469, 0.1241046371459961, 0.12434809875488281, 0.1249716796875, 0.12605206298828125, 0.12484198760986329, 0.12446646118164062, 0.12422169494628907, 0.1264991683959961, 0.1251962890625, 0.12444684600830078, 0.12432998657226563, 0.12457478332519531, 0.12423785400390625, 0.12434729766845704, 0.12447036743164062, 0.12437801361083985, 0.12608716583251953, 0.12520038604736328, 0.12443625640869141, 0.124459228515625, 0.1272668151855469, 0.1247825927734375, 0.12452249908447266, 0.12445276641845703, 0.12455535888671875, 0.12631638336181641, 0.12506153869628905, 0.12472908782958984, 0.12444390106201172, 0.12444086456298828, 0.12459465789794921, 0.1246904296875, 0.1267232666015625, 0.1248202896118164, 0.12685289764404298, 0.12537010955810546, 0.12481977844238282, 0.12493341064453126, 0.1268314895629883, 0.1254318084716797, 0.12490342712402344, 0.12470649719238282, 0.1268493423461914, 0.1255811233520508, 0.12463488006591797, 0.1246437759399414, 0.12693027496337891, 0.12538947296142577, 0.1245306854248047, 0.12484515380859375, 0.1287689208984375, 0.1255106887817383, 0.1247774429321289, 0.12486041259765625, 0.1265265884399414, 0.12529459381103517, 0.1350438690185547, 0.12582879638671876, 0.12470390319824219, 0.12408889770507812, 0.1259482879638672, 0.12481651306152344, 0.1241322250366211, 0.12777267456054686, 0.1262897262573242, 0.12484006500244141, 0.12423296356201172, 0.12429583740234375, 0.12764380645751952, 0.12497920227050781, 0.12434022521972657, 0.12465151977539063, 0.12660562896728517, 0.12509123229980468, 0.12461494445800782, 0.12434550476074219, 0.1243410873413086, 0.12673426818847655, 0.12512057495117188, 0.12441741180419921, 0.12454361724853516, 0.1266175994873047, 0.12518195343017577, 0.12438118743896484, 0.12453683471679687, 0.12616092681884766, 0.12498076629638671, 0.12690476989746094, 0.12465264129638672, 0.1265079345703125, 0.1252679672241211, 0.12449398040771484, 0.12528419494628906, 0.1264742431640625, 0.12515737915039063, 0.12447090911865234, 0.12478121948242188, 0.12462371063232422, 0.12631948852539063, 0.1250918426513672, 0.1247457275390625, 0.12460176086425781, 0.12785027313232422, 0.12560002899169923, 0.12502239990234376, 0.12468259429931641, 0.1266135025024414, 0.12506521606445312, 0.12469827270507812, 0.12512905883789063, 0.12699362945556641, 0.1253174057006836, 0.12597299194335937, 0.12477584075927735, 0.12642979431152343, 0.1252158737182617, 0.12489612579345703, 0.12487782287597657, 0.12702947235107423, 0.13541506958007812, 0.1258502426147461, 0.12470076751708985, 0.12407603454589844, 0.12635340881347656, 0.12515122985839844, 0.12430131530761719, 0.12411084747314453, 0.12424192047119141, 0.12604825592041016, 0.1248501739501953, 0.12422962951660156, 0.12459180450439453, 0.12437699127197266, 0.1244205093383789, 0.12457568359375, 0.1243946533203125, 0.12429199981689452, 0.1257512969970703, 0.12612403106689454, 0.12499763488769532, 0.12455487823486328, 0.1249775390625, 0.12439100646972656, 0.12453929901123047, 0.124442626953125, 0.12621414184570312, 0.1272562255859375, 0.12543830108642579, 0.12446844482421875, 0.12442294311523437, 0.12641280364990234, 0.12498486328125, 0.124432861328125, 0.12481024169921875, 0.12464774322509765, 0.1275824966430664, 0.1253763198852539, 0.12468489837646485, 0.12465135955810547, 0.1265174102783203, 0.12523417663574218, 0.1246619873046875, 0.12462073516845704, 0.12700128173828126, 0.12530671691894532, 0.12463043212890625, 0.12484905242919922, 0.12683216094970703, 0.12534204864501952, 0.12486054229736328, 0.12468994903564454, 0.12712393951416015, 0.1257467498779297, 0.12508380889892579, 0.12499929809570312, 0.1267798080444336, 0.12531302642822265, 0.1247930908203125, 0.12484588623046874, 0.12724652862548827, 0.12561328125, 0.12496752166748047, 0.1366932830810547, 0.12667539215087892, 0.12492825317382812, 0.12424732971191406, 0.12421785736083985, 0.124368896484375, 0.12425830078125, 0.12427996826171875, 0.12444306945800782, 0.1248526382446289, 0.1254516830444336, 0.12669407653808593, 0.12543949127197265, 0.12468470764160157, 0.1244958724975586, 0.12640806579589844, 0.12673056030273439, 0.1250918426513672, 0.12682198333740236, 0.12625545501708985, 0.12499625396728516, 0.12528128051757811, 0.1272078094482422, 0.12461500549316407, 0.1257349090576172, 0.12528540802001953, 0.12559027099609374, 0.12454521942138672, 0.12514022064208985, 0.12549404907226563, 0.12610150146484375, 0.12538607788085937, 0.12502902221679688, 0.1253884506225586, 0.12481890869140624, 0.12697030639648438, 0.12531040191650392, 0.12480329895019532, 0.12477648162841797, 0.12672463989257812, 0.12562242889404296, 0.12685523223876954, 0.12742578887939454, 0.12568038177490234, 0.12488617706298828, 0.12472406768798829, 0.1269534683227539, 0.12562226867675783, 0.1247910385131836, 0.12472259521484375, 0.12705007934570312, 0.12575539398193358, 0.12470262145996094, 0.12484732818603515, 0.12706604766845703, 0.12689020538330079, 0.1248939208984375, 0.12509814453125, 0.12485359954833984, 0.12524329376220703, 0.12497571563720702, 0.12479897308349609, 0.12765798187255858, 0.13583984375, 0.1258905563354492, 0.12471501159667969, 0.12415164947509766, 0.12629993438720702, 0.12508946990966796, 0.12435731506347657, 0.12415702056884766, 0.12440614318847656, 0.12646572875976564, 0.1249493408203125, 0.12503359985351561, 0.12472361755371093, 0.12668096160888673, 0.12518268585205078, 0.125067138671875, 0.12448358154296875, 0.12658847808837892, 0.12544249725341797, 0.12442121887207032, 0.12755561828613282, 0.12649737548828124, 0.1251371841430664, 0.12456960296630859, 0.12465561676025391, 0.1268321304321289, 0.12768284606933594, 0.12571174621582032, 0.12661634826660156, 0.12544620513916016, 0.12475305938720703, 0.12500214385986327, 0.12540563201904298, 0.12592495727539063, 0.12665817260742188, 0.12502454376220704, 0.12749791717529296, 0.12606537628173828, 0.12472918701171876, 0.12456396484375, 0.12699552154541016, 0.1253710708618164, 0.12464332580566406, 0.12466175842285156, 0.12776038360595704, 0.12781958770751953, 0.12481350708007813, 0.12668637084960938, 0.12537737274169922, 0.12470035552978516, 0.12505097961425782, 0.12660115051269533, 0.1253494415283203, 0.12554927825927734, 0.12492415618896484, 0.12836767578125, 0.1262146224975586, 0.12497510528564452, 0.12489730834960938, 0.1286163787841797, 0.1258170852661133, 0.12515248107910157, 0.12768297576904297, 0.13588511657714844, 0.12586934661865234, 0.12485295867919922, 0.1264415969848633, 0.12496883392333985, 0.12511148834228517, 0.12438201904296875, 0.12639437103271484, 0.12507695770263672, 0.12432438659667969, 0.12442144012451171, 0.12436486053466797, 0.12629875183105468, 0.12491571044921874, 0.12440911865234375, 0.12455590057373046, 0.12769667053222655, 0.12543603515625, 0.12453497314453126, 0.12456253051757812, 0.12712572479248047, 0.12541158294677734, 0.12447789001464844, 0.12423577880859375, 0.12658073425292968, 0.1253396453857422, 0.12449712371826172, 0.12447750091552734, 0.12488588714599609, 0.12725382232666016, 0.12537296295166014, 0.1253376007080078, 0.12669091033935548, 0.12526201629638672, 0.1247923812866211, 0.12448220825195312, 0.12449612426757813, 0.12735667419433594, 0.1255524444580078, 0.12699251556396485, 0.12649081420898436, 0.12515679931640625, 0.12464089965820313, 0.12457846069335937, 0.12492610931396485, 0.1264394226074219, 0.1253048324584961, 0.12486450958251953, 0.12495462036132812, 0.12692889404296875, 0.12900146484375, 0.12473052978515625, 0.12699692535400392, 0.12566146850585938, 0.12493836975097657, 0.12474185943603516, 0.12705270385742187, 0.1256559371948242, 0.12476620483398437, 0.12473548889160156, 0.12720893096923827, 0.1255385284423828, 0.12491539001464844, 0.13684982299804688, 0.12579395294189452, 0.12470496368408203, 0.12424736022949219, 0.12421206665039063, 0.12434022521972657, 0.12638617706298827, 0.12482969665527344, 0.12430131530761719, 0.1244010238647461, 0.12428463745117188, 0.12617005157470704, 0.12496278381347656, 0.12420211029052734, 0.12459033966064453, 0.1273431396484375, 0.12529673767089844, 0.12435990142822266, 0.12475369262695313, 0.1260748825073242, 0.12491820526123047, 0.12452054595947265, 0.12440790557861328, 0.12428108978271485, 0.12735648345947265, 0.1253091506958008, 0.12450787353515624, 0.12440239715576172, 0.12746947479248047, 0.1266677780151367, 0.12455014038085938, 0.12457987213134766, 0.1267056350708008, 0.1252126693725586, 0.12485836791992187, 0.12557679748535155, 0.1268310089111328, 0.1254639358520508, 0.12460300445556641, 0.12444649505615234, 0.12735075378417968, 0.12541926574707032, 0.12467046356201172, 0.1247088623046875, 0.12782796478271485, 0.12556697845458983, 0.12495053100585937, 0.12484403228759766, 0.126940673828125, 0.12533971405029296, 0.12487075042724609, 0.12463958740234375, 0.12739923095703126, 0.1258441619873047, 0.12613017272949217, 0.12651519775390624, 0.1254562530517578, 0.12479666900634766, 0.12480512237548828, 0.1265893096923828, 0.12553993225097657, 0.12470687866210937, 0.12476041412353515, 0.13497964477539062, 0.12571024322509766, 0.12449890899658203, 0.12439071655273437, 0.12418271636962891, 0.12610982513427735, 0.12474201965332031, 0.12413951873779297, 0.12615679931640625, 0.1261998062133789, 0.12482048034667968, 0.12431017303466797, 0.12453103637695312, 0.12579561614990234, 0.12672688293457032, 0.1253167037963867, 0.12437948608398437, 0.1243095703125, 0.12743590545654296, 0.12524224090576172, 0.12432351684570313, 0.12453919982910157, 0.12661488342285157, 0.12509046173095703, 0.12458972930908203, 0.12453923034667969, 0.12440358734130859, 0.12667430114746095, 0.12543462371826172, 0.12449177551269532, 0.1244590072631836, 0.1273525085449219, 0.1254136962890625, 0.12874966430664062, 0.1270344009399414, 0.12520038604736328, 0.12476220703125, 0.1248463363647461, 0.12678195190429686, 0.12652326202392578, 0.12480528259277343, 0.12478288269042968, 0.1269219207763672, 0.12548880004882812, 0.12495756530761719, 0.12469657897949218, 0.12688934326171875, 0.12568179321289064, 0.1247278060913086, 0.12476742553710937, 0.12721571350097657, 0.12543462371826172, 0.12472930908203125, 0.12485363006591797, 0.12695110321044922, 0.12528675079345702, 0.1248364486694336, 0.12493619537353516, 0.12756582641601563, 0.13483404541015626, 0.12681375885009766, 0.12528057861328126, 0.124830078125]",tokens/s,7.966733659389671,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1231.867904,8448.311296,0.0,8053.06368,7930.605568,s,1,20.01771875,20.01771875,0.0,20.01771875,20.01771875,20.01771875,20.01771875,[20.01771875],,kWh,0.00037114494468332474,4.093289112526352e-05,0.0001430862255800136,0.0005551640613886018,,MB,1240.8832,10214.11328,0.0,9806.282752,9135.58528,s,10,18.41105407714844,1.8411054077148439,0.0058580154062901615,1.8429332885742187,1.8458336547851564,1.8459052307128907,1.845962491455078,"[1.8262845458984376, 1.8353111572265626, 1.8402359619140625, 1.8409066162109375, 1.8422747802734376, 1.8453515625, 1.845976806640625, 1.843591796875, 1.8453031005859375, 1.8458177490234375]",tokens/s,139.04690026289362,kWh,5.352741918416617e-05,5.9037187881541e-06,3.562297294279898e-05,9.505411091511925e-05,tokens/kWh,2693202.82453224,MB,1259.106304,10214.11328,0.0,9806.282752,9135.58784,s,10,90.73418359375,9.073418359375001,0.0156509527946229,9.0774873046875,9.088825683593749,9.089517822265625,9.090071533203124,"[9.0414873046875, 9.051458984375, 9.064400390625, 9.071638671875, 9.076765625, 9.078208984375, 9.0830068359375, 9.0883349609375, 9.088671875, 9.0902099609375]",tokens/s,6.943358886885889,kWh,0.00026539329654541496,2.9273421409762916e-05,0.000176476363403201,0.00047114308135837895,tokens/kWh,133717.3408518728,,s,630,90.7288251647949,0.14401400819808718,0.0019797132612469886,0.1439374542236328,0.14570172576904297,0.14631678695678713,0.15306474502563477,"[0.15379833984375, 0.1397642822265625, 0.1409845428466797, 0.14038812255859376, 0.14089727783203124, 0.1408256072998047, 0.14947740173339844, 0.14503549194335938, 0.14089599609375, 0.1415963897705078, 0.1414220428466797, 0.14118199157714845, 0.14272079467773438, 0.14735565185546876, 0.14382797241210937, 0.14251513671875, 0.14129362487792968, 0.1414345245361328, 0.14134719848632812, 0.14360572814941405, 0.14593434143066406, 0.14409318542480468, 0.1424937286376953, 0.1414691162109375, 0.14184463500976563, 0.1420700225830078, 0.14439846801757814, 0.14492057800292968, 0.1443736572265625, 0.14278460693359374, 0.1422459259033203, 0.14192393493652344, 0.142700927734375, 0.14486528015136718, 0.14446185302734374, 0.1448591003417969, 0.14298713684082032, 0.14260578918457031, 0.14261036682128905, 0.14342037963867188, 0.14498789978027343, 0.14528025817871093, 0.14367526245117188, 0.14357798767089844, 0.14229299926757813, 0.14309791564941407, 0.14418118286132814, 0.14484632873535155, 0.14460751342773437, 0.14494650268554687, 0.1429410858154297, 0.14263821411132813, 0.143804443359375, 0.1449316864013672, 0.14434518432617188, 0.1438693084716797, 0.14451664733886718, 0.14444610595703125, 0.1428118438720703, 0.1436764221191406, 0.14362690734863282, 0.14530979919433593, 0.14513551330566407, 0.1523686981201172, 0.1416813507080078, 0.1412472381591797, 0.14095619201660156, 0.14148200988769533, 0.1419370880126953, 0.148048095703125, 0.14472735595703126, 0.14170159912109376, 0.14197555541992188, 0.1413112335205078, 0.1409502716064453, 0.14308146667480467, 0.14584422302246094, 0.1445457000732422, 0.1417667541503906, 0.14226431274414061, 0.14122598266601563, 0.14181919860839845, 0.14360797119140625, 0.1447523193359375, 0.14514796447753905, 0.14314576721191405, 0.14196879577636717, 0.142608154296875, 0.14223052978515624, 0.14413189697265624, 0.14523802185058593, 0.1449505615234375, 0.1436904602050781, 0.14239744567871093, 0.1425807342529297, 0.14269747924804688, 0.14421577453613282, 0.14531939697265625, 0.14443399047851563, 0.1435891571044922, 0.14319638061523438, 0.1424691162109375, 0.1433242492675781, 0.14391714477539064, 0.14513385009765625, 0.14452175903320313, 0.14384332275390624, 0.14342515563964844, 0.14303680419921874, 0.14375730895996094, 0.14464125061035157, 0.14478764343261719, 0.14430677795410157, 0.14445703125, 0.14446456909179686, 0.1427569580078125, 0.1437493438720703, 0.14498825073242189, 0.1450030975341797, 0.14388633728027345, 0.1446646728515625, 0.14433010864257811, 0.14358738708496094, 0.1443046112060547, 0.1438963165283203, 0.1448379821777344, 0.15338607788085937, 0.14150137329101561, 0.1414450225830078, 0.14122195434570312, 0.14120346069335937, 0.14220492553710937, 0.1482581787109375, 0.1459554901123047, 0.14240150451660155, 0.14162944030761718, 0.14164787292480469, 0.14160281372070313, 0.14318899536132812, 0.14694633483886718, 0.14541497802734374, 0.14252774047851563, 0.14294493103027345, 0.14155302429199218, 0.14197824096679687, 0.14442413330078124, 0.14596383666992188, 0.14480802917480468, 0.14270585632324218, 0.14298390197753907, 0.14149183654785155, 0.14314533996582032, 0.14456640625, 0.14539967346191407, 0.14411978149414062, 0.14321871948242187, 0.14269760131835937, 0.1426236114501953, 0.14321417236328124, 0.14509507751464842, 0.14506710815429688, 0.14410986328125, 0.14347737121582033, 0.14311013793945312, 0.1430711669921875, 0.1435661163330078, 0.14529171752929687, 0.14498252868652345, 0.14449647521972656, 0.143414306640625, 0.14314093017578125, 0.14363935852050783, 0.14387318420410156, 0.14573846435546875, 0.14403532409667968, 0.14494700622558593, 0.1434799041748047, 0.14385862731933594, 0.1444381408691406, 0.14431027221679688, 0.14497782897949218, 0.14496572875976563, 0.14371961975097655, 0.14379283142089844, 0.14393942260742187, 0.14457679748535157, 0.14378802490234374, 0.14436341857910157, 0.14422991943359376, 0.15387295532226564, 0.14117225646972656, 0.14120393371582032, 0.14122157287597656, 0.1414822998046875, 0.1424199676513672, 0.14897561645507812, 0.14533631896972657, 0.14146969604492188, 0.1415045166015625, 0.14139334106445312, 0.14181741333007813, 0.14350361633300782, 0.14801997375488282, 0.14481202697753906, 0.14329241943359375, 0.14143849182128906, 0.14180134582519532, 0.14239395141601563, 0.14503961181640626, 0.14611430358886718, 0.14477107238769532, 0.1426282501220703, 0.14226693725585937, 0.14192437744140626, 0.14295805358886718, 0.14489208984375, 0.14623983764648438, 0.1445419464111328, 0.14333926391601562, 0.1427412109375, 0.14198789978027343, 0.14271212768554686, 0.14565840148925782, 0.1458855743408203, 0.14513874816894531, 0.14258067321777343, 0.1426862030029297, 0.14315866088867188, 0.1438536376953125, 0.14469740295410155, 0.14642166137695312, 0.14453001403808594, 0.14417277526855468, 0.14322886657714845, 0.14377609252929688, 0.14387577819824218, 0.14503762817382812, 0.14447001647949217, 0.14501478576660157, 0.1440911407470703, 0.143499267578125, 0.14340940856933593, 0.14417234802246093, 0.14511293029785155, 0.14631587219238282, 0.14440867614746095, 0.1448283233642578, 0.14360064697265626, 0.1431357421875, 0.1447751007080078, 0.14470745849609376, 0.1455485382080078, 0.15309706115722657, 0.14139187622070312, 0.14157122802734376, 0.1412823944091797, 0.14143052673339843, 0.14214306640625, 0.14870323181152345, 0.1457953643798828, 0.14304269409179687, 0.14187660217285156, 0.14167945861816406, 0.1417554931640625, 0.14370687866210938, 0.1468087615966797, 0.144859130859375, 0.14338621520996095, 0.14236099243164063, 0.1417472381591797, 0.14258476257324218, 0.14486441040039064, 0.14564639282226563, 0.14478970336914063, 0.14382386779785156, 0.1419252471923828, 0.14229005432128905, 0.14355136108398436, 0.14554829406738282, 0.14500146484375, 0.14621437072753907, 0.1433564453125, 0.14213529968261718, 0.1424752655029297, 0.1438126983642578, 0.14569667053222657, 0.1456818542480469, 0.14504531860351563, 0.14346319580078126, 0.1428164825439453, 0.14243699645996094, 0.14429766845703124, 0.14505180358886718, 0.1457236785888672, 0.14437196350097656, 0.14336384582519532, 0.14425479125976562, 0.14304864501953124, 0.14476480102539063, 0.1452046661376953, 0.14426821899414063, 0.1444085693359375, 0.14378562927246094, 0.14370428466796875, 0.14489817810058594, 0.14448214721679686, 0.1451992645263672, 0.14500250244140625, 0.14500601196289062, 0.1433871307373047, 0.14431639099121094, 0.14396182250976564, 0.1449190673828125, 0.1441544647216797, 0.14485708618164062, 0.15255990600585936, 0.14148051452636717, 0.1425960693359375, 0.14130093383789064, 0.1417442626953125, 0.14271490478515625, 0.14888934326171874, 0.14532675170898438, 0.1423209991455078, 0.1424608917236328, 0.14159103393554687, 0.1423816680908203, 0.14396424865722657, 0.14698617553710938, 0.14518531799316406, 0.14277842712402344, 0.1421528625488281, 0.14270086669921875, 0.1427072296142578, 0.14452940368652345, 0.14571929931640626, 0.1451685791015625, 0.1431092529296875, 0.14294822692871093, 0.14217913818359376, 0.14292533874511718, 0.14451145935058593, 0.14507846069335936, 0.14543238830566407, 0.14287271118164063, 0.14326889038085938, 0.1430734405517578, 0.14329270935058594, 0.1444929656982422, 0.14553651428222655, 0.14552114868164062, 0.1437532196044922, 0.14322694396972657, 0.14340089416503907, 0.14331494140625, 0.14419676208496093, 0.14578569030761718, 0.14455987548828125, 0.14387840270996094, 0.14362185668945313, 0.14368182373046876, 0.1448078155517578, 0.14434725952148436, 0.14496563720703126, 0.14503526306152345, 0.14494924926757813, 0.143519775390625, 0.14319612121582032, 0.14492697143554686, 0.14488316345214844, 0.14514614868164064, 0.1452085723876953, 0.143828857421875, 0.14447833251953124, 0.14434962463378906, 0.14395196533203125, 0.1450160675048828, 0.14412693786621095, 0.15298562622070314, 0.1412021484375, 0.14137705993652344, 0.14218882751464842, 0.14241325378417968, 0.14305381774902343, 0.14896847534179689, 0.14485154724121094, 0.14157020568847656, 0.14132447814941407, 0.14281686401367188, 0.14250384521484374, 0.14419171142578124, 0.1473047637939453, 0.14447410583496093, 0.14197145080566406, 0.1425482940673828, 0.14238156127929688, 0.14281692504882812, 0.14539523315429687, 0.1461627197265625, 0.14449043273925782, 0.1424959716796875, 0.14346630859375, 0.14220109558105468, 0.14314675903320312, 0.14585650634765626, 0.14527040100097657, 0.14413568115234374, 0.14420262145996093, 0.14292807006835936, 0.14285562133789062, 0.14364915466308595, 0.14557347106933594, 0.14451724243164063, 0.14484713745117186, 0.144278564453125, 0.14339117431640624, 0.14354888916015626, 0.1440214385986328, 0.14489971923828124, 0.14484326171875, 0.14487142944335937, 0.14404403686523437, 0.14332261657714843, 0.14349772644042968, 0.14565989685058595, 0.144959716796875, 0.14523574829101563, 0.14418060302734376, 0.14416450500488281, 0.1445672607421875, 0.14356224060058595, 0.14484284973144532, 0.14511964416503906, 0.14515737915039062, 0.14567706298828126, 0.14383120727539062, 0.1444036865234375, 0.14422285461425782, 0.14417100524902343, 0.14397859191894533, 0.14587689208984375, 0.1537274932861328, 0.14221839904785155, 0.14148089599609376, 0.14127708435058595, 0.14149139404296876, 0.14341818237304688, 0.14954905700683593, 0.1461709747314453, 0.14289308166503906, 0.14155046081542969, 0.14151065063476562, 0.14169088745117187, 0.1443094024658203, 0.14735037231445314, 0.14531558227539063, 0.14356707763671875, 0.14250518798828124, 0.14185491943359374, 0.14207212829589844, 0.14464402770996093, 0.1460284423828125, 0.14594534301757814, 0.14329417419433593, 0.1427021484375, 0.14167904663085937, 0.14278271484375, 0.14458236694335938, 0.14554934692382812, 0.14543043518066406, 0.14341539001464843, 0.14322207641601561, 0.14199058532714845, 0.14450025939941405, 0.14450732421875, 0.14570089721679688, 0.14498374938964845, 0.1436429443359375, 0.1433632354736328, 0.14272598266601563, 0.14463795471191407, 0.14519296264648437, 0.1458214111328125, 0.14435757446289063, 0.14440873718261718, 0.14391859436035156, 0.14295085144042968, 0.1450762939453125, 0.14518368530273437, 0.14538035583496095, 0.14503651428222655, 0.14400326538085936, 0.14310447692871095, 0.14385369873046874, 0.14508358764648438, 0.1452388458251953, 0.14558198547363282, 0.14517584228515626, 0.14474070739746095, 0.14396873474121094, 0.14464825439453124, 0.14421395874023438, 0.1462129211425781, 0.14536044311523438, 0.15422186279296876, 0.14164390563964843, 0.14156040954589844, 0.14159872436523438, 0.1418603515625, 0.14326425170898438, 0.14934848022460936, 0.14580313110351562, 0.14290310668945314, 0.14172384643554686, 0.14165107727050782, 0.14223965454101561, 0.14464950561523438, 0.14722221374511718, 0.1452969970703125, 0.14333378601074218, 0.1419911651611328, 0.14186944580078126, 0.1425451202392578, 0.14526016235351563, 0.1457997131347656, 0.14536294555664062, 0.14381471252441405, 0.14249098205566407, 0.14198431396484376, 0.14328221130371094, 0.14537303161621093, 0.14555357360839843, 0.14508441162109376, 0.14340821838378906, 0.14239382934570313, 0.14248124694824219, 0.14399754333496093, 0.14445135498046874, 0.14612696838378905, 0.14459458923339844, 0.1440809326171875, 0.14290950012207032, 0.14308927917480468, 0.14479434204101563, 0.14468095397949218, 0.14529481506347655, 0.14555804443359374, 0.1438078155517578, 0.14358937072753905, 0.14382560729980468, 0.14509671020507814, 0.14554521179199217, 0.14433453369140625, 0.14462416076660156, 0.1444595489501953, 0.1435914306640625, 0.1444085693359375, 0.1444552917480469, 0.1447178192138672, 0.14495578002929688, 0.14533631896972657, 0.14475273132324218, 0.14511033630371092, 0.14381936645507812, 0.14488128662109376, 0.14536895751953124, 0.14482838439941406, 0.1558402557373047, 0.14168550109863282, 0.14155938720703126, 0.14133404541015626, 0.141491455078125, 0.14241958618164063, 0.14959161376953126, 0.14631753540039064, 0.14200650024414063, 0.1416884765625, 0.14160850524902344, 0.1424034881591797, 0.14422889709472655, 0.14717987060546875, 0.1455615997314453, 0.14257586669921876, 0.14163328552246093, 0.14178250122070313, 0.14267190551757813, 0.1454535675048828, 0.1462494659423828, 0.14526693725585937, 0.14329443359375, 0.14200425720214843, 0.14180157470703125, 0.1435219268798828, 0.14548768615722657, 0.14559432983398438, 0.14559642028808595, 0.14323721313476562, 0.14280181884765625, 0.14199623107910156, 0.14433094787597656, 0.14562342834472655, 0.14586611938476562, 0.1447895965576172, 0.14426547241210938, 0.14382748413085938, 0.14249574279785157, 0.14421810913085936, 0.14534042358398438, 0.1460121612548828, 0.14489584350585938, 0.14478536987304688, 0.14392282104492188, 0.14265196228027344, 0.14388153076171875, 0.14570918273925781, 0.14569468688964843, 0.14485693359375, 0.1442166748046875, 0.1433849334716797, 0.14386976623535155, 0.14483433532714843, 0.145850341796875, 0.14496751403808594, 0.14506419372558593, 0.14378204345703124, 0.14393548583984375, 0.14362214660644532, 0.14465434265136717, 0.1461411895751953, 0.14632867431640625]",tokens/s,6.94376896047868,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,838.098944,555.614208,0.0,153.092096,140.32384,s,1,7.7428427734375,7.7428427734375,0.0,7.7428427734375,7.7428427734375,7.7428427734375,7.7428427734375,[7.7428427734375],,kWh,1.7511414754198995e-05,1.924423652816457e-06,5.3450042760139205e-06,2.478084268302937e-05,,MB,1285.746688,668.860416,0.0,253.755392,216.246784,s,11,0.2192925434112549,0.019935685764659535,0.0002761667346090473,0.019847423553466796,0.020426464080810548,0.020427935600280764,0.020429112815856933,"[0.020429407119750977, 0.0197574405670166, 0.019675071716308595, 0.019864351272583007, 0.020426464080810548, 0.020117183685302735, 0.020103296279907226, 0.019847423553466796, 0.01970172882080078, 0.019671199798583984, 0.019698976516723633]",tokens/s,12841.293899897704,kWh,5.756943646120469e-07,6.348910087332776e-08,3.8138700245757247e-07,1.0205704679429473e-06,tokens/kWh,250840101.72858647,MB,1299.345408,691.929088,0.0,276.824064,216.249344,s,11,9.798130493164063,0.8907391357421875,0.0028705744235329976,0.8913369140625,0.8936650390625,0.8945853576660157,0.8953216125488281,"[0.888602783203125, 0.8913369140625, 0.885204833984375, 0.8898414916992188, 0.891607177734375, 0.8921223754882812, 0.8936650390625, 0.8866962280273437, 0.8927362670898438, 0.8908117065429687, 0.8955056762695313]",tokens/s,70.72777816987544,kWh,2.5818978704138714e-05,2.847429621895968e-06,9.294060552709622e-06,3.796046887874431e-05,tokens/kWh,1659621.2286323046,,s,693,9.792736935615551,0.014130933529026753,0.00030044024555881993,0.01405942440032959,0.014346124839782716,0.014479910850524903,0.015264373855590822,"[0.01378883171081543, 0.014086688041687011, 0.014080448150634766, 0.014026592254638672, 0.013946720123291016, 0.013992320060729981, 0.014012191772460937, 0.014032896041870118, 0.013943072319030761, 0.014055135726928712, 0.013977503776550293, 0.01420911979675293, 0.014106623649597168, 0.014108223915100098, 0.014154175758361816, 0.014213120460510254, 0.014172160148620605, 0.014138367652893067, 0.014010656356811523, 0.013932576179504394, 0.013982399940490723, 0.013951135635375976, 0.014038880348205567, 0.014059679985046387, 0.014067328453063965, 0.014047264099121094, 0.014051520347595214, 0.01402400016784668, 0.014041983604431152, 0.014073663711547852, 0.013985376358032226, 0.01402006435394287, 0.01397158432006836, 0.014056256294250488, 0.0141080961227417, 0.014170687675476074, 0.014170207977294923, 0.014089664459228516, 0.014320063591003417, 0.014055456161499023, 0.01417625617980957, 0.017207103729248045, 0.014303423881530762, 0.014043135643005371, 0.014134783744812012, 0.014078720092773438, 0.013995519638061523, 0.013964863777160644, 0.013994688034057618, 0.013983967781066895, 0.014096159934997559, 0.014000127792358399, 0.013991935729980469, 0.013935775756835937, 0.013961983680725097, 0.013956416130065917, 0.01397043228149414, 0.013948512077331544, 0.014018239974975586, 0.014054112434387207, 0.013962783813476563, 0.01402291202545166, 0.014057472229003906, 0.0137542724609375, 0.014055520057678223, 0.014071167945861817, 0.014090527534484863, 0.014032928466796876, 0.014237824440002442, 0.014059295654296875, 0.014061984062194824, 0.013969375610351562, 0.01402064037322998, 0.01399779224395752, 0.01406390380859375, 0.013995776176452637, 0.014031295776367187, 0.014092096328735352, 0.01415167999267578, 0.014054464340209962, 0.014039520263671876, 0.014105055809020996, 0.013998080253601074, 0.018027904510498047, 0.015261568069458008, 0.014115584373474122, 0.01417625617980957, 0.014095840454101562, 0.01411945629119873, 0.014170111656188965, 0.014049280166625976, 0.01403600025177002, 0.01400723171234131, 0.014009407997131348, 0.013992544174194335, 0.014002752304077149, 0.013952992439270019, 0.013948096275329589, 0.013963552474975586, 0.014078271865844727, 0.013941887855529786, 0.014025664329528809, 0.01397977638244629, 0.014059391975402832, 0.014005791664123536, 0.014313952445983887, 0.014104576110839843, 0.014100671768188477, 0.01412281608581543, 0.01401852798461914, 0.014047136306762695, 0.013973631858825684, 0.014446592330932618, 0.014049280166625976, 0.01416761589050293, 0.014131839752197266, 0.013995840072631835, 0.014007519721984864, 0.014011167526245117, 0.014012415885925293, 0.014007967948913574, 0.014149951934814452, 0.01406287956237793, 0.014064255714416505, 0.014035039901733399, 0.014037023544311524, 0.013726592063903809, 0.014145343780517579, 0.01403600025177002, 0.013969504356384277, 0.013992799758911132, 0.01395257568359375, 0.014043264389038086, 0.013943136215209961, 0.013989407539367676, 0.013966079711914062, 0.014034655570983887, 0.013973376274108888, 0.014319744110107422, 0.014091584205627441, 0.01397747230529785, 0.014025535583496094, 0.014034111976623536, 0.014019392013549804, 0.013936863899230958, 0.014022239685058594, 0.013936256408691406, 0.014031423568725585, 0.01406387233734131, 0.014003680229187012, 0.013919872283935547, 0.014101152420043945, 0.013925824165344238, 0.014000703811645508, 0.013955327987670898, 0.014042880058288574, 0.013966976165771484, 0.014056991577148438, 0.014113439559936523, 0.014196127891540527, 0.014083168029785157, 0.014081727981567383, 0.014004223823547364, 0.014061311721801757, 0.013996447563171387, 0.013986944198608398, 0.01399852752685547, 0.014234911918640137, 0.0140665283203125, 0.014037471771240234, 0.013940544128417968, 0.014120415687561035, 0.013971743583679199, 0.014139424324035645, 0.013973600387573242, 0.014235551834106446, 0.01410201644897461, 0.014080608367919922, 0.014182432174682617, 0.014106592178344727, 0.013990912437438965, 0.013987104415893554, 0.014480640411376953, 0.014100223541259766, 0.014050016403198242, 0.014108672142028808, 0.014009471893310546, 0.01407043170928955, 0.014020928382873535, 0.013849504470825195, 0.01405247974395752, 0.014013376235961914, 0.014116031646728516, 0.013990719795227051, 0.014026752471923828, 0.013967552185058595, 0.014015999794006348, 0.014016063690185546, 0.013984224319458009, 0.01397987174987793, 0.014044575691223145, 0.014023200035095214, 0.014227359771728516, 0.014017760276794433, 0.014012831687927246, 0.014024991989135743, 0.014004544258117676, 0.014108511924743652, 0.0140217924118042, 0.013966303825378418, 0.013970560073852539, 0.01403996753692627, 0.013983743667602539, 0.013943008422851563, 0.013946175575256348, 0.01407369613647461, 0.014000096321105958, 0.013979424476623535, 0.014041664123535157, 0.013903679847717286, 0.014104703903198242, 0.014080384254455566, 0.013960607528686523, 0.013919072151184082, 0.013993727684020995, 0.013981056213378907, 0.013986432075500488, 0.013938688278198242, 0.014184224128723144, 0.014053183555603027, 0.014246175765991211, 0.014467007637023925, 0.014294591903686524, 0.014323776245117187, 0.014189120292663573, 0.014116512298583985, 0.01403324794769287, 0.01400387191772461, 0.014286208152770995, 0.014479424476623535, 0.014684255599975587, 0.014416288375854493, 0.014268832206726074, 0.014272543907165527, 0.014060544013977052, 0.014106975555419923, 0.01401638412475586, 0.013985823631286622, 0.014016544342041016, 0.014011072158813477, 0.014988448143005371, 0.01554543972015381, 0.014014464378356933, 0.014075072288513183, 0.014158944129943847, 0.014036767959594727, 0.014086079597473145, 0.014022015571594238, 0.014039936065673829, 0.014210816383361816, 0.014138879776000977, 0.014050016403198242, 0.0140730562210083, 0.01409830379486084, 0.014080703735351563, 0.013987839698791504, 0.014018560409545898, 0.015005087852478028, 0.014119520187377929, 0.014058624267578125, 0.013982144355773926, 0.014065759658813477, 0.014145248413085938, 0.014123647689819337, 0.014014047622680664, 0.013971872329711914, 0.014036992073059081, 0.014451807975769042, 0.014074784278869629, 0.01414896011352539, 0.014018336296081542, 0.014011263847351074, 0.014047231674194336, 0.014129152297973633, 0.013969152450561524, 0.013951231956481933, 0.014014464378356933, 0.014075263977050782, 0.013961888313293458, 0.014036895751953125, 0.014001215934753418, 0.014193120002746583, 0.013977343559265137, 0.01420143985748291, 0.014026944160461425, 0.01403711986541748, 0.014001215934753418, 0.013996319770812988, 0.013967264175415038, 0.014086784362792969, 0.014002304077148437, 0.014025600433349609, 0.014386176109313965, 0.014221280097961427, 0.014057696342468262, 0.014054719924926757, 0.01493667221069336, 0.015060352325439454, 0.014856032371520996, 0.014527168273925781, 0.014178303718566895, 0.014202143669128418, 0.014225119590759278, 0.0143340482711792, 0.014072416305541992, 0.01388755226135254, 0.01417840003967285, 0.014115903854370117, 0.014022527694702148, 0.014424896240234375, 0.014078047752380371, 0.01447935962677002, 0.01434227180480957, 0.01402460765838623, 0.01396230411529541, 0.014004159927368164, 0.014064607620239257, 0.014048416137695313, 0.014150495529174805, 0.01408409595489502, 0.014079968452453613, 0.01408777618408203, 0.014159296035766602, 0.014000896453857423, 0.014059776306152344, 0.014021727561950683, 0.014156703948974609, 0.014169952392578126, 0.01402400016784668, 0.014105440139770508, 0.014034943580627441, 0.013987839698791504, 0.014045439720153808, 0.014170047760009765, 0.014081855773925782, 0.013983743667602539, 0.014235967636108399, 0.014007200241088867, 0.014016703605651855, 0.013969216346740723, 0.014133631706237793, 0.014598655700683593, 0.014010208129882813, 0.014057536125183106, 0.014194368362426758, 0.014040863990783692, 0.014149632453918457, 0.014166239738464356, 0.013961536407470703, 0.014129088401794434, 0.01414345645904541, 0.014122079849243165, 0.0140830717086792, 0.014278656005859374, 0.014107744216918945, 0.014062496185302734, 0.014099871635437012, 0.014123583793640137, 0.014266495704650878, 0.014171903610229492, 0.01427676773071289, 0.014093952178955078, 0.014063488006591797, 0.014430368423461914, 0.014697952270507813, 0.014721055984497071, 0.014678879737854004, 0.01446729564666748, 0.013877408027648926, 0.014188832283020019, 0.01439577579498291, 0.014280096054077148, 0.014255935668945313, 0.014287648200988769, 0.014276288032531738, 0.01414790439605713, 0.014227456092834472, 0.014526528358459473, 0.01469257640838623, 0.014452704429626465, 0.014817024230957031, 0.014450592041015625, 0.01417840003967285, 0.014086015701293945, 0.014065567970275878, 0.014026271820068359, 0.014026464462280274, 0.015296640396118165, 0.014477791786193848, 0.014128928184509277, 0.014023263931274415, 0.014018560409545898, 0.014032383918762208, 0.014079936027526855, 0.014104415893554687, 0.014025440216064453, 0.014046496391296386, 0.014117919921875, 0.014024383544921875, 0.014055423736572266, 0.014075424194335938, 0.014047360420227051, 0.01402454376220703, 0.013963775634765625, 0.013973600387573242, 0.014350239753723144, 0.01508134365081787, 0.014106016159057617, 0.014129887580871582, 0.01408777618408203, 0.014105024337768555, 0.014079520225524902, 0.013983391761779785, 0.014043935775756836, 0.014028960227966309, 0.014124447822570801, 0.014025152206420899, 0.014198304176330567, 0.014222880363464356, 0.014039008140563965, 0.014123264312744141, 0.014220000267028809, 0.014108672142028808, 0.014016511917114258, 0.014035136222839355, 0.01401427173614502, 0.014018176078796386, 0.014084735870361327, 0.01406339168548584, 0.014059488296508788, 0.014071200370788574, 0.01386291217803955, 0.014173376083374023, 0.014082271575927735, 0.014127712249755859, 0.014161760330200196, 0.014061183929443359, 0.014037088394165039, 0.014043583869934081, 0.014050592422485351, 0.014039775848388673, 0.014083456039428711, 0.014431872367858888, 0.014441535949707032, 0.014268351554870605, 0.014202624320983888, 0.014137760162353515, 0.014128992080688477, 0.014130559921264649, 0.014399135589599609, 0.014097375869750977, 0.014030336380004883, 0.013959296226501465, 0.01410431957244873, 0.0140447998046875, 0.014039584159851074, 0.01405561637878418, 0.014010656356811523, 0.01405951976776123, 0.014006272315979004, 0.013998016357421875, 0.013945952415466308, 0.014006303787231446, 0.013957823753356934, 0.014144031524658204, 0.014010080337524414, 0.014006272315979004, 0.014194687843322755, 0.013977919578552246, 0.014042816162109374, 0.013998016357421875, 0.013971712112426758, 0.013969152450561524, 0.013995903968811035, 0.014178079605102539, 0.014048831939697266, 0.013976384162902832, 0.013971487998962402, 0.013957471847534179, 0.013950719833374023, 0.01400432014465332, 0.013991647720336915, 0.014034527778625489, 0.01399244785308838, 0.014034943580627441, 0.014081760406494141, 0.013992511749267578, 0.014067423820495606, 0.013996031761169434, 0.014124608039855957, 0.013943039894104003, 0.014265888214111328, 0.014072575569152833, 0.01404099178314209, 0.013705216407775878, 0.013993984222412109, 0.014022656440734863, 0.014091967582702637, 0.014024928092956543, 0.013994208335876465, 0.015333248138427734, 0.015945376396179198, 0.014149984359741212, 0.014344256401062011, 0.014094176292419434, 0.014086239814758301, 0.014098591804504394, 0.01409334373474121, 0.014153920173645019, 0.014031231880187988, 0.014024895668029785, 0.013975616455078124, 0.014018879890441895, 0.0140665283203125, 0.014123871803283691, 0.014323967933654785, 0.014384767532348634, 0.014634943962097169, 0.014461119651794434, 0.014378911972045898, 0.014135392189025878, 0.014046815872192383, 0.014342559814453124, 0.01435871982574463, 0.014323583602905273, 0.014325792312622071, 0.014367679595947265, 0.014187104225158691, 0.013982080459594726, 0.014112447738647461, 0.013962592124938965, 0.014064703941345215, 0.014106528282165527, 0.014004128456115723, 0.014004447937011718, 0.013983872413635254, 0.014070528030395508, 0.014027423858642578, 0.013969759941101074, 0.014007648468017579, 0.013964096069335937, 0.014044511795043945, 0.014043231964111329, 0.013999615669250488, 0.014787487983703614, 0.014262304306030274, 0.01410041618347168, 0.014104384422302246, 0.01396947193145752, 0.013938976287841796, 0.013973055839538574, 0.013988384246826172, 0.013999903678894043, 0.01399625587463379, 0.01406982421875, 0.014018272399902344, 0.014042783737182617, 0.013791007995605469, 0.013967295646667481, 0.014094400405883788, 0.013979104042053223, 0.013959712028503417, 0.013987839698791504, 0.013993568420410156, 0.01402239990234375, 0.014066335678100585, 0.014106623649597168, 0.013996031761169434, 0.014056447982788087, 0.014048255920410157, 0.014061471939086915, 0.013969280242919923, 0.014201120376586914, 0.013973535537719727, 0.01405942440032959, 0.013983776092529297, 0.013928064346313477, 0.014922080039978027, 0.016748191833496094, 0.014385503768920898, 0.014065567970275878, 0.014233695983886718, 0.01402012825012207, 0.014092800140380859, 0.014090463638305665, 0.014018303871154785, 0.013991935729980469, 0.014665727615356445, 0.014564640045166015, 0.014432095527648925, 0.014420864105224609, 0.014112223625183105, 0.014009119987487793, 0.014083840370178223, 0.013952383995056153, 0.014029567718505859, 0.014201919555664062, 0.014039872169494629, 0.013954719543457032, 0.014017087936401367, 0.014061344146728516, 0.01420083236694336, 0.013987839698791504, 0.013999551773071288, 0.013992128372192382, 0.014063136100769043, 0.014089216232299804, 0.01418777561187744, 0.014074463844299317, 0.013979647636413574, 0.013957119941711426, 0.0140163516998291, 0.014060768127441406, 0.014157983779907226, 0.013921055793762207, 0.014238975524902343, 0.013939135551452637, 0.013997920036315918, 0.013967904090881347, 0.014128447532653809, 0.01386083221435547, 0.014256223678588868, 0.014198975563049316, 0.01406156826019287, 0.014221311569213867, 0.01419264030456543, 0.014246047973632813, 0.014153568267822266, 0.014255328178405762, 0.014387392044067382, 0.01397219181060791, 0.014101792335510253, 0.014397407531738281, 0.014566271781921387, 0.014522047996520996, 0.01458182430267334, 0.014434304237365723, 0.014298879623413085, 0.014105919837951661, 0.014295167922973633, 0.01434659194946289, 0.014049759864807129, 0.014073856353759765, 0.013956159591674804, 0.014205663681030274, 0.014165535926818847, 0.014074591636657715, 0.014226431846618653, 0.014021120071411132, 0.01415824031829834, 0.014100831985473633, 0.014031935691833496, 0.014113439559936523, 0.014120800018310547, 0.014221471786499023, 0.014362784385681152, 0.014469247817993164, 0.014696352005004883, 0.01445628833770752, 0.01422332763671875, 0.014081664085388183, 0.014297856330871581, 0.014440447807312011, 0.01448755168914795, 0.014644351959228516, 0.01433289623260498, 0.014362527847290038, 0.014151264190673828, 0.01418073558807373, 0.014034496307373048, 0.01412758445739746, 0.014075551986694336, 0.014004704475402831, 0.014026623725891113, 0.014032447814941406, 0.014027520179748535, 0.014223135948181153, 0.014187487602233886, 0.013972160339355468, 0.01406390380859375, 0.013954400062561035, 0.01399180793762207, 0.014150400161743164]",tokens/s,70.7667329936746,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1034.113024,896.466944,0.0,501.219328,495.906816,s,1,8.51478515625,8.51478515625,0.0,8.51478515625,8.51478515625,8.51478515625,8.51478515625,[8.51478515625],,kWh,3.898841823337685e-05,4.293543847427892e-06,1.479251183399144e-05,5.807447391479619e-05,,MB,1263.230976,1060.0448,0.0,652.214272,602.748928,s,10,0.5001668472290038,0.050016684722900384,0.0006455890622503593,0.05000513458251953,0.05044609451293945,0.05104636001586914,0.05152657241821289,"[0.051646625518798825, 0.0493304328918457, 0.05031270217895508, 0.04941177749633789, 0.04983046340942383, 0.05011782455444336, 0.050098175048828124, 0.04931433486938477, 0.05019241714477539, 0.04991209411621094]",tokens/s,5118.292054307013,kWh,1.6843767835250786e-06,1.8575746518989712e-07,1.1239153308045111e-06,2.9940495795194866e-06,tokens/kWh,85502926.12091123,MB,1274.0608,1072.627712,0.0,664.797184,611.073536,s,10,19.49507995605469,1.9495079956054688,0.008052663444680344,1.9501111450195312,1.95942490234375,1.9622285034179687,1.9644713842773438,"[1.943477783203125, 1.950402099609375, 1.9455321044921876, 1.94429345703125, 1.9650321044921875, 1.9343731689453125, 1.9498201904296875, 1.952763916015625, 1.950583251953125, 1.9588018798828124]",tokens/s,32.31584591702778,kWh,5.653039424814239e-05,6.235005340874565e-06,2.4545577426994328e-05,8.73109770160113e-05,tokens/kWh,721558.7564487671,,s,630,19.488627748489375,0.03093432975950695,0.00047099883001466136,0.030900239944458008,0.03128377838134765,0.03153155012130737,0.03251617794036866,"[0.030343807220458985, 0.030756256103515626, 0.030570560455322266, 0.03055436706542969, 0.030871328353881837, 0.030700031280517577, 0.0306376953125, 0.030679264068603516, 0.03081222343444824, 0.03073606491088867, 0.031139360427856446, 0.03132620811462403, 0.031193216323852538, 0.031150335311889647, 0.03108710479736328, 0.031033599853515625, 0.03110268783569336, 0.031127071380615233, 0.03115804862976074, 0.031146432876586912, 0.030974239349365235, 0.031096895217895507, 0.03115001678466797, 0.030936864852905272, 0.031117408752441407, 0.03095680046081543, 0.030896703720092775, 0.03082271957397461, 0.031285247802734374, 0.03117670440673828, 0.030920703887939452, 0.03126095962524414, 0.03137212753295898, 0.030980928421020508, 0.031188512802124025, 0.031017536163330077, 0.031174495697021486, 0.030943199157714842, 0.030628223419189454, 0.03064950370788574, 0.030726783752441405, 0.031016960144042968, 0.030899295806884764, 0.030702495574951173, 0.030852127075195312, 0.030476383209228516, 0.030619808197021484, 0.030427423477172852, 0.03063974380493164, 0.03093996810913086, 0.03077292823791504, 0.030635871887207032, 0.030361408233642577, 0.03044528007507324, 0.030431648254394532, 0.030335519790649416, 0.03032899284362793, 0.030398591995239258, 0.03070742416381836, 0.03062182426452637, 0.030913471221923828, 0.03064841651916504, 0.03034339141845703, 0.029878335952758787, 0.03026527976989746, 0.030281728744506835, 0.030460895538330077, 0.03057472038269043, 0.03077884864807129, 0.030730079650878907, 0.030908384323120118, 0.031005279541015625, 0.03101487922668457, 0.030797248840332032, 0.031169055938720703, 0.03091872024536133, 0.03077324867248535, 0.030705919265747072, 0.030643903732299804, 0.030431455612182617, 0.03070876884460449, 0.030572479248046874, 0.03080691146850586, 0.03053379249572754, 0.030657440185546874, 0.03084998321533203, 0.030838783264160157, 0.030922752380371094, 0.03095043182373047, 0.030592992782592775, 0.030911487579345705, 0.030836063385009764, 0.030855199813842774, 0.03075369644165039, 0.030694751739501952, 0.030964096069335936, 0.030838783264160157, 0.03086966323852539, 0.030853023529052736, 0.030711872100830078, 0.030502784729003907, 0.030709600448608397, 0.032264350891113285, 0.03134259223937988, 0.03126275253295899, 0.03153033638000488, 0.03171622467041016, 0.03134623908996582, 0.03129567909240723, 0.03105699157714844, 0.031114143371582033, 0.031226144790649415, 0.031403743743896484, 0.031104671478271485, 0.031265312194824216, 0.031493951797485355, 0.031135744094848632, 0.03127055931091308, 0.031166816711425783, 0.03132204818725586, 0.031174720764160155, 0.03120947265625, 0.031145280838012695, 0.031247039794921876, 0.03126681518554687, 0.03108803176879883, 0.030557695388793944, 0.03103753662109375, 0.030960031509399414, 0.030689504623413084, 0.030522239685058593, 0.030468576431274413, 0.030736127853393556, 0.030843711853027343, 0.031109216690063477, 0.031102752685546874, 0.031061599731445313, 0.031260608673095706, 0.030815935134887694, 0.030866336822509766, 0.030729728698730467, 0.030974464416503908, 0.030778303146362304, 0.030837696075439455, 0.030838016510009766, 0.0310296630859375, 0.031043071746826172, 0.031097696304321288, 0.030936800003051757, 0.03071151924133301, 0.030609983444213867, 0.030631935119628906, 0.03077292823791504, 0.030665023803710938, 0.030878944396972655, 0.03068582344055176, 0.03072630310058594, 0.030885887145996094, 0.030814016342163086, 0.030638208389282228, 0.030535743713378905, 0.030629728317260744, 0.03091676712036133, 0.030863359451293947, 0.030859039306640624, 0.030869375228881835, 0.030660415649414064, 0.03094748878479004, 0.031091167449951173, 0.031217439651489258, 0.031006816864013673, 0.031031200408935547, 0.030834144592285156, 0.030691999435424805, 0.03069856071472168, 0.03181216049194336, 0.030906688690185546, 0.030663839340209963, 0.030935136795043946, 0.030845760345458984, 0.03077324867248535, 0.03074470329284668, 0.03104140853881836, 0.031016191482543944, 0.031127456665039063, 0.03132912063598633, 0.031037311553955078, 0.03082048034667969, 0.030684511184692384, 0.030762239456176756, 0.03256291198730469, 0.030993919372558593, 0.03093494415283203, 0.030965375900268554, 0.031144927978515625, 0.03088108825683594, 0.030974655151367186, 0.030904319763183592, 0.031124639511108398, 0.0310994873046875, 0.031222143173217774, 0.03086310386657715, 0.030898303985595704, 0.03080188751220703, 0.030816287994384767, 0.03092889595031738, 0.030879232406616212, 0.030962175369262695, 0.03092403221130371, 0.030794496536254882, 0.030906368255615234, 0.030963712692260743, 0.03096780776977539, 0.03091059112548828, 0.030967775344848632, 0.030971839904785158, 0.03103331184387207, 0.030873407363891603, 0.030791711807250977, 0.030838111877441406, 0.03080054473876953, 0.030838752746582033, 0.030922239303588867, 0.030503423690795898, 0.030428672790527345, 0.0303438720703125, 0.030650367736816408, 0.03057663917541504, 0.030502239227294923, 0.030517919540405274, 0.030604576110839842, 0.030962207794189452, 0.030849023818969725, 0.03087311935424805, 0.03055580711364746, 0.030692447662353517, 0.030830495834350585, 0.030689279556274415, 0.030611135482788085, 0.03173587226867676, 0.03076531219482422, 0.03087366485595703, 0.030828447341918946, 0.03026323127746582, 0.030242912292480467, 0.030574304580688477, 0.030661151885986327, 0.03078118324279785, 0.030902271270751954, 0.03068262481689453, 0.03101136016845703, 0.030898399353027343, 0.030267967224121093, 0.030699520111083983, 0.030521568298339845, 0.030525503158569337, 0.0308121280670166, 0.030477439880371094, 0.030617536544799803, 0.030595232009887695, 0.030580543518066407, 0.030467071533203126, 0.030760608673095702, 0.03096384048461914, 0.030754751205444335, 0.030699520111083983, 0.03192831993103027, 0.030992191314697267, 0.03100492858886719, 0.03101308822631836, 0.030852832794189454, 0.030719263076782227, 0.030828351974487304, 0.030847808837890626, 0.03075872039794922, 0.030837087631225585, 0.031028671264648436, 0.03138227272033691, 0.03107200050354004, 0.03103878402709961, 0.03118707275390625, 0.031014463424682618, 0.031062303543090822, 0.03132086372375488, 0.03112259292602539, 0.031093536376953126, 0.031170656204223633, 0.03551132965087891, 0.03143564796447754, 0.03162112045288086, 0.03134623908996582, 0.03117251205444336, 0.0312073917388916, 0.031192928314208983, 0.03135094451904297, 0.031537120819091796, 0.03128361511230469, 0.031188447952270507, 0.031184640884399414, 0.03108348846435547, 0.031131807327270507, 0.03119705581665039, 0.031225791931152345, 0.03203702545166016, 0.0323680305480957, 0.031262624740600584, 0.03120185661315918, 0.03298099136352539, 0.03239116668701172, 0.03138764762878418, 0.031143936157226562, 0.030988447189331053, 0.03105366325378418, 0.03102720069885254, 0.0308787841796875, 0.03059916877746582, 0.0306177921295166, 0.030410560607910156, 0.0302508487701416, 0.03045187187194824, 0.030685407638549805, 0.030621471405029296, 0.030703264236450194, 0.030853471755981444, 0.030887935638427736, 0.030896127700805662, 0.03119308853149414, 0.030693376541137695, 0.03059507179260254, 0.030447071075439452, 0.03043587112426758, 0.030570207595825197, 0.03083625602722168, 0.030611295700073243, 0.03100048065185547, 0.030671327590942384, 0.03047644805908203, 0.03050739288330078, 0.030447551727294922, 0.03060108757019043, 0.03079360008239746, 0.030814079284667967, 0.030913120269775392, 0.030797664642333984, 0.030961568832397462, 0.03092246437072754, 0.0306876163482666, 0.030600223541259765, 0.030958560943603514, 0.03052297592163086, 0.030402687072753905, 0.030517248153686522, 0.03038547134399414, 0.030519775390625, 0.03048899269104004, 0.03037808036804199, 0.030533344268798827, 0.030772607803344728, 0.031037984848022462, 0.03093507194519043, 0.030755456924438478, 0.030671680450439453, 0.030516128540039062, 0.030437376022338865, 0.03042323112487793, 0.030847999572753908, 0.030622528076171874, 0.030482528686523437, 0.03051852798461914, 0.030708192825317383, 0.030884031295776368, 0.031065088272094726, 0.031108095169067384, 0.030907712936401367, 0.03095577621459961, 0.031095232009887695, 0.030842912673950194, 0.03085001564025879, 0.030591840744018554, 0.031159679412841798, 0.031204256057739257, 0.031077280044555664, 0.031122400283813478, 0.031301631927490234, 0.031076351165771485, 0.031160192489624025, 0.031234176635742187, 0.03106435203552246, 0.0311080322265625, 0.03204995346069336, 0.031136768341064453, 0.031075328826904298, 0.03196703910827637, 0.031111040115356446, 0.031178432464599608, 0.03144355201721191, 0.03136828804016113, 0.03090118408203125, 0.03079987144470215, 0.030881792068481444, 0.0307957763671875, 0.03075200080871582, 0.031040512084960937, 0.03083852767944336, 0.03080601692199707, 0.03087068748474121, 0.03080303955078125, 0.03090127944946289, 0.03081113624572754, 0.03111292839050293, 0.03087343978881836, 0.030959775924682617, 0.03095756721496582, 0.030834688186645507, 0.030840896606445314, 0.030761056900024415, 0.030256383895874022, 0.031586912155151366, 0.03058585548400879, 0.03061862373352051, 0.030842880249023437, 0.031222911834716798, 0.03122617530822754, 0.03116499137878418, 0.030825471878051756, 0.03064499282836914, 0.030621631622314453, 0.030593536376953126, 0.030682592391967772, 0.03052169609069824, 0.03070899200439453, 0.030946271896362305, 0.03076803207397461, 0.03064678382873535, 0.03055449676513672, 0.030420223236083985, 0.03071183967590332, 0.030978559494018554, 0.030659040451049804, 0.030674688339233397, 0.030757055282592774, 0.03003798484802246, 0.030630048751831056, 0.03024185562133789, 0.0302807674407959, 0.03052649688720703, 0.03099679946899414, 0.030955615997314452, 0.03085139274597168, 0.030644224166870116, 0.030588512420654298, 0.030699392318725587, 0.03125283241271973, 0.03123628807067871, 0.03110028839111328, 0.03104217529296875, 0.03097395133972168, 0.030979103088378906, 0.03112063980102539, 0.03144470405578613, 0.0310435848236084, 0.031104927062988282, 0.031157535552978517, 0.03177350425720215, 0.031153535842895506, 0.031113279342651366, 0.031087167739868166, 0.031016992568969726, 0.031208703994750977, 0.031023712158203126, 0.031043935775756835, 0.031030303955078126, 0.030989055633544923, 0.031008159637451172, 0.031079008102416993, 0.031045663833618165, 0.030955360412597655, 0.03127631950378418, 0.030954336166381834, 0.03108780860900879, 0.03110393524169922, 0.03095747184753418, 0.03102921676635742, 0.031035327911376955, 0.03162732887268067, 0.03153254318237304, 0.031141696929931642, 0.030866111755371094, 0.031221759796142577, 0.030912511825561522, 0.030543872833251953, 0.030849023818969725, 0.030760959625244142, 0.030880767822265624, 0.030852096557617188, 0.031545343399047854, 0.030594911575317383, 0.030519519805908203, 0.03088777542114258, 0.03121552085876465, 0.031160512924194337, 0.031139839172363282, 0.03112550354003906, 0.03083263969421387, 0.030429407119750975, 0.030572639465332032, 0.03619110488891602, 0.030766304016113282, 0.030474143981933592, 0.030830432891845703, 0.03031449508666992, 0.030306304931640625, 0.030527360916137697, 0.030515552520751953, 0.03051024055480957, 0.03146937561035156, 0.03156265640258789, 0.031070112228393554, 0.030832799911499023, 0.030318431854248047, 0.03037183952331543, 0.0301977596282959, 0.030676992416381835, 0.030547967910766603, 0.03065353584289551, 0.03044374465942383, 0.031105728149414064, 0.03022412872314453, 0.030394399642944336, 0.030298208236694334, 0.03053683280944824, 0.03091100883483887, 0.030886112213134767, 0.030788896560668945, 0.030641120910644533, 0.03072204780578613, 0.031092384338378905, 0.03106163215637207, 0.03129743957519531, 0.03112384033203125, 0.030975872039794922, 0.031170560836791993, 0.031104864120483397, 0.03082080078125, 0.03085955238342285, 0.03102908706665039, 0.031067935943603516, 0.03157439994812012, 0.030930240631103514, 0.031584768295288085, 0.03108268737792969, 0.03116851234436035, 0.031143936157226562, 0.031094783782958983, 0.031117536544799804, 0.03095523262023926, 0.031010496139526368, 0.03090880012512207, 0.031396127700805666, 0.030920192718505858, 0.0309967041015625, 0.031064064025878906, 0.03084492874145508, 0.030948928833007813, 0.031477792739868164, 0.031057600021362305, 0.030931808471679687, 0.030653440475463867, 0.0310732479095459, 0.031069599151611327, 0.03086409568786621, 0.03112739181518555, 0.030605600357055663, 0.03050268745422363, 0.03033238410949707, 0.030616352081298828, 0.03094259262084961, 0.03093328094482422, 0.031737951278686526, 0.031236127853393556, 0.03133225631713867, 0.03258988952636719, 0.031107072830200196, 0.03100611114501953, 0.0307042236328125, 0.031083776473999025, 0.0304289608001709, 0.030333343505859374, 0.030214719772338867, 0.031070207595825194, 0.03002979278564453, 0.03034217643737793, 0.030706687927246092, 0.03097599983215332, 0.030836896896362306, 0.031197023391723634, 0.030991840362548827, 0.03093097686767578, 0.030781919479370118, 0.03053059196472168, 0.03081113624572754, 0.03117990493774414, 0.03342015838623047, 0.030807327270507813, 0.03071049690246582, 0.03142201614379883, 0.030933439254760744, 0.03108016014099121, 0.031184288024902345, 0.0310743350982666, 0.03141087913513184, 0.031107295989990236, 0.03108803176879883, 0.03153775978088379, 0.03171526336669922, 0.03143440055847168, 0.03103984069824219, 0.031096288681030274, 0.030763776779174804, 0.030729248046875, 0.030730016708374025, 0.03132889556884766, 0.03240176010131836, 0.03319004821777344, 0.03123740768432617, 0.031318559646606445, 0.03179427146911621, 0.031036352157592772, 0.03088559913635254, 0.030777631759643556]",tokens/s,32.32654490251799,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,1097.99424,599.6544,0.0,197.132288,173.338112,s,1,9.730724609375,9.730724609375,0.0,9.730724609375,9.730724609375,9.730724609375,9.730724609375,[9.730724609375],,kWh,2.5093173962496943e-05,2.7607349090420882e-06,8.162784307999549e-06,3.601669317953858e-05,,MB,1354.747904,689.831936,0.0,266.338304,236.552704,s,10,0.3878918113708496,0.03878918113708496,0.0001387912913806202,0.03877657508850098,0.03890340423583984,0.03900224685668945,0.03908132095336914,"[0.03878271865844726, 0.03910108947753906, 0.03859228897094726, 0.03884624099731445, 0.038644351959228516, 0.03886511993408203, 0.038743968963623046, 0.038770431518554686, 0.038664161682128904, 0.038881439208984375]",tokens/s,6599.778404583217,kWh,1.1228234231891012e-06,1.238280850326079e-07,4.149351610076891e-07,1.661586669229398e-06,tokens/kWh,154069603.91583204,MB,1387.8272,704.512,0.0,281.018368,236.555264,s,10,23.4156474609375,2.34156474609375,0.006536681097138066,2.3414364013671873,2.3508436767578127,2.351442102050781,2.3519208422851565,"[2.336951904296875, 2.3323134765625, 2.35204052734375, 2.350710693359375, 2.3478916015625, 2.341781494140625, 2.34109130859375, 2.342534912109375, 2.333770751953125, 2.336560791015625]",tokens/s,26.905085629213538,kWh,6.725117126056078e-05,7.417611750352688e-06,2.1955082734992474e-05,9.662386574590594e-05,tokens/kWh,652012.828442122,,s,630,23.410602104187006,0.037159685879661924,0.00046671344816687563,0.03705315208435059,0.03757233238220215,0.03784230918884277,0.03891705593109131,"[0.03674668884277344, 0.036861759185791015, 0.03685823822021484, 0.037209758758544924, 0.036907745361328126, 0.037393951416015626, 0.036921825408935544, 0.03680665588378906, 0.03740399932861328, 0.03710006332397461, 0.036931713104248046, 0.03682918548583984, 0.036859039306640626, 0.03706752014160156, 0.03697673416137695, 0.03693129730224609, 0.036991519927978514, 0.03720569610595703, 0.03715283203125, 0.03701583862304687, 0.037287776947021484, 0.036980033874511715, 0.03725164794921875, 0.036999263763427735, 0.03693097686767578, 0.03807888031005859, 0.042203392028808594, 0.03714265441894531, 0.03739843368530273, 0.03696860885620117, 0.037016353607177734, 0.036870784759521484, 0.036982398986816406, 0.03692393493652344, 0.036775199890136716, 0.03726614379882812, 0.037198238372802735, 0.0369119987487793, 0.036938240051269534, 0.037163105010986325, 0.03694015884399414, 0.036704448699951174, 0.03677148818969726, 0.03677142333984375, 0.03689436721801758, 0.03683814239501953, 0.03703548812866211, 0.03753398513793945, 0.037275680541992186, 0.036983009338378905, 0.0369356803894043, 0.03687753677368164, 0.0368504638671875, 0.03713561630249024, 0.03684019088745117, 0.03675471878051758, 0.036711166381835934, 0.036836574554443356, 0.0367828483581543, 0.03673014450073242, 0.036819007873535155, 0.03688710403442383, 0.03707932662963867, 0.0365362548828125, 0.03676364898681641, 0.03662649536132812, 0.03718876647949219, 0.036862239837646485, 0.03676377487182617, 0.0368021125793457, 0.03707148742675781, 0.036918846130371094, 0.03679091262817383, 0.036757503509521484, 0.03684163284301758, 0.03672777557373047, 0.0368587532043457, 0.036953182220458985, 0.03683216094970703, 0.03672057723999023, 0.03709110260009765, 0.03702403259277344, 0.03728793716430664, 0.03729103851318359, 0.03697923278808594, 0.03702131271362305, 0.03705311965942383, 0.036923393249511716, 0.03705193710327148, 0.03781078338623047, 0.03722668838500977, 0.03709478378295898, 0.03714092636108399, 0.03689846420288086, 0.0368109130859375, 0.0368416633605957, 0.03678822326660156, 0.0370497932434082, 0.03703478240966797, 0.037160831451416014, 0.0371379508972168, 0.03704991912841797, 0.03851551818847656, 0.037343616485595706, 0.03710960006713867, 0.03693340682983398, 0.03686604690551758, 0.03704435348510742, 0.037139873504638675, 0.03691772842407227, 0.036865310668945314, 0.03692179107666015, 0.03712438583374023, 0.03685171127319336, 0.036857856750488284, 0.03694736099243164, 0.03762851333618164, 0.037054462432861326, 0.03712409591674805, 0.03704022216796875, 0.03725503921508789, 0.036945953369140624, 0.036926719665527345, 0.03685968017578125, 0.03687238311767578, 0.036888607025146486, 0.03687801742553711, 0.03719750213623047, 0.0371146240234375, 0.03698451232910156, 0.036964542388916014, 0.0368221435546875, 0.037670944213867186, 0.03694063949584961, 0.03673907089233398, 0.036741119384765625, 0.036640766143798825, 0.03688608169555664, 0.03681884765625, 0.03688499069213867, 0.03682700729370117, 0.03680223846435547, 0.03754953765869141, 0.03994460678100586, 0.03836783981323242, 0.03702374267578125, 0.03680665588378906, 0.03706582260131836, 0.036952480316162106, 0.03956361770629883, 0.03785753631591797, 0.03730419158935547, 0.037115966796875, 0.03709747314453125, 0.03703395080566406, 0.03716534423828125, 0.037287647247314454, 0.0373043212890625, 0.03725516891479492, 0.03753113555908203, 0.03734089660644531, 0.0372757453918457, 0.03697939300537109, 0.03701484680175781, 0.03711011123657226, 0.0375968017578125, 0.03766684722900391, 0.037203712463378905, 0.03728060913085938, 0.03779132843017578, 0.03725568008422851, 0.03738828659057617, 0.03732073593139648, 0.03714214324951172, 0.03709203338623047, 0.03718928146362305, 0.03706880187988281, 0.037125343322753905, 0.03888412857055664, 0.038857120513916016, 0.03744112014770508, 0.037746688842773435, 0.037410816192626956, 0.03714569473266602, 0.03716150283813477, 0.037208446502685545, 0.03718348693847656, 0.0372408332824707, 0.03725680160522461, 0.03682108688354492, 0.036999263763427735, 0.037101566314697264, 0.03716505432128906, 0.03713433456420898, 0.0374889907836914, 0.0379667854309082, 0.037585952758789065, 0.03745964813232422, 0.03718963241577149, 0.03696012878417969, 0.03697881698608398, 0.036878143310546875, 0.0384040641784668, 0.03891651153564453, 0.03795084762573242, 0.03802297592163086, 0.03770966339111328, 0.037207168579101564, 0.03715139389038086, 0.03785574340820313, 0.03776899337768555, 0.03769635009765625, 0.03751923370361328, 0.037310462951660156, 0.03735551834106445, 0.03829779052734375, 0.037410655975341794, 0.037189697265625, 0.03702159881591797, 0.03719110488891601, 0.0375035514831543, 0.03766828918457031, 0.03788447952270508, 0.037168800354003904, 0.036958560943603516, 0.037367103576660156, 0.037031646728515624, 0.036915454864501956, 0.03696543884277344, 0.03726505661010742, 0.03717030334472656, 0.03714896011352539, 0.03706531143188477, 0.03697654342651367, 0.03701699066162109, 0.037139137268066405, 0.037054462432861326, 0.037015647888183595, 0.03716495895385742, 0.03732070541381836, 0.03724095916748047, 0.036972415924072265, 0.03687366485595703, 0.03707340621948242, 0.037185440063476564, 0.037039295196533206, 0.03688137435913086, 0.03692252731323242, 0.037247390747070314, 0.0370810546875, 0.03710534286499023, 0.0370731201171875, 0.036585823059082034, 0.03700886535644531, 0.03693033599853516, 0.03700876617431641, 0.03753023910522461, 0.037804031372070314, 0.037648384094238284, 0.03739043045043945, 0.03733647918701172, 0.03731894302368164, 0.03702544021606445, 0.03710844802856445, 0.03732799911499023, 0.037079776763916016, 0.037005313873291014, 0.037875553131103516, 0.03699932861328125, 0.03705238342285156, 0.03721596908569336, 0.037075263977050785, 0.03709542465209961, 0.03695606231689453, 0.03708732986450195, 0.037335041046142575, 0.037449726104736326, 0.03754111862182617, 0.037525726318359376, 0.03715353775024414, 0.03704604721069336, 0.03729817581176758, 0.037214305877685545, 0.03716700744628906, 0.037187583923339845, 0.0372690544128418, 0.03717731094360351, 0.03707785415649414, 0.03720915222167969, 0.03760800170898437, 0.038813793182373046, 0.03756595230102539, 0.038522529602050784, 0.037219295501708986, 0.03712812805175781, 0.03707587051391602, 0.03715695953369141, 0.037081920623779296, 0.036876129150390624, 0.03709747314453125, 0.03694611358642578, 0.0371912956237793, 0.03710403060913086, 0.03766694259643555, 0.037377792358398436, 0.037419265747070315, 0.03707648086547852, 0.03695372772216797, 0.03703910446166992, 0.037134048461914065, 0.03724060821533203, 0.037658401489257816, 0.03706719970703125, 0.03712156677246094, 0.037140670776367186, 0.03662153625488281, 0.036836128234863284, 0.03685171127319336, 0.03694291305541992, 0.03720604705810547, 0.03762643051147461, 0.03741321563720703, 0.03726473617553711, 0.03705830383300781, 0.03697292709350586, 0.0369956169128418, 0.0369747200012207, 0.03713798522949219, 0.037048896789550784, 0.037738529205322266, 0.037504959106445315, 0.0371833610534668, 0.037101470947265625, 0.03694387054443359, 0.03702329635620117, 0.03702115249633789, 0.03693414306640625, 0.036996639251708985, 0.037104576110839844, 0.03714585494995117, 0.037134239196777344, 0.037157726287841794, 0.036918624877929684, 0.03693020629882812, 0.036943199157714844, 0.0370266227722168, 0.03719676971435547, 0.03855984115600586, 0.03747100830078125, 0.0377262077331543, 0.03701964950561523, 0.03703609466552735, 0.03701763153076172, 0.03704207992553711, 0.037195518493652345, 0.03714876937866211, 0.037719390869140626, 0.03754886245727539, 0.03709952163696289, 0.036948192596435545, 0.036865825653076174, 0.036896766662597655, 0.03744467163085938, 0.03743840026855469, 0.037566463470458986, 0.03748012924194336, 0.03803097534179688, 0.037122974395751955, 0.03719865417480469, 0.037045215606689455, 0.036835487365722654, 0.03680883026123047, 0.03681078338623047, 0.03678995132446289, 0.036850719451904296, 0.03726131057739258, 0.037239070892333984, 0.03704288101196289, 0.036687328338623044, 0.03699942398071289, 0.037825889587402343, 0.037157535552978516, 0.03739263916015625, 0.03772611236572266, 0.03695391845703125, 0.03704873657226562, 0.036974208831787106, 0.03701760101318359, 0.036966014862060546, 0.036837665557861325, 0.0370118408203125, 0.036826847076416015, 0.03678835296630859, 0.03702748870849609, 0.037034561157226566, 0.03692854309082031, 0.03678835296630859, 0.036741214752197264, 0.036788383483886716, 0.03802137756347656, 0.03679609680175781, 0.03695273590087891, 0.037015201568603516, 0.03700672149658203, 0.03705014419555664, 0.03688943862915039, 0.03682515335083008, 0.037017536163330075, 0.03689648056030274, 0.03738771057128906, 0.03739449691772461, 0.03724687957763672, 0.036837406158447265, 0.03806825637817383, 0.03752022552490234, 0.037244705200195315, 0.03735174560546875, 0.037120094299316404, 0.037103710174560545, 0.037074657440185545, 0.03686313629150391, 0.03759212875366211, 0.037053184509277345, 0.03704729461669922, 0.037127967834472655, 0.03697216033935547, 0.0371492805480957, 0.037275646209716795, 0.037269504547119144, 0.03709939193725586, 0.03749427032470703, 0.03824710464477539, 0.036929473876953126, 0.03731280136108398, 0.03733187103271484, 0.03722124862670898, 0.0376649284362793, 0.03757222366333008, 0.037183647155761716, 0.036896766662597655, 0.03694944000244141, 0.036909984588623046, 0.03994291305541992, 0.038917278289794924, 0.03712659072875977, 0.037327423095703124, 0.03721526336669922, 0.03705276870727539, 0.0368985595703125, 0.03682156753540039, 0.036710273742675784, 0.03688288116455078, 0.0368721923828125, 0.036982784271240236, 0.0369758415222168, 0.03702044677734375, 0.03696156692504883, 0.037010143280029294, 0.03708927917480469, 0.03688784027099609, 0.037130977630615236, 0.03688857650756836, 0.03698006439208985, 0.0373623046875, 0.03686431884765625, 0.037461631774902346, 0.03680368041992187, 0.03974006271362305, 0.037386497497558596, 0.03757894515991211, 0.03743484878540039, 0.03722063827514648, 0.03713654327392578, 0.037203968048095705, 0.037392383575439454, 0.03774006271362305, 0.037185440063476564, 0.03707897567749024, 0.03678470230102539, 0.03704665756225586, 0.037004993438720706, 0.036953086853027346, 0.03689302444458008, 0.03731305694580078, 0.03703228759765625, 0.036893695831298826, 0.037018463134765624, 0.03713391876220703, 0.0369339828491211, 0.03757603073120117, 0.03750316619873047, 0.037075233459472653, 0.036964542388916014, 0.037005313873291014, 0.03696844863891602, 0.0367836799621582, 0.036784576416015624, 0.036802719116210934, 0.036732833862304685, 0.03688473510742187, 0.03696966552734375, 0.03698467254638672, 0.03689241409301758, 0.036918174743652346, 0.036703712463378904, 0.03700307083129883, 0.036716991424560544, 0.03664646530151367, 0.03665203094482422, 0.036710590362548826, 0.03664630508422852, 0.03674563217163086, 0.036890846252441406, 0.036732704162597656, 0.03671039962768555, 0.03686601638793945, 0.03722684860229492, 0.03695584106445313, 0.036841472625732424, 0.03688857650756836, 0.03683327865600586, 0.037119998931884765, 0.03725449752807617, 0.03715878295898437, 0.03706345748901367, 0.03742310333251953, 0.03730950546264648, 0.03711884689331055, 0.03687984085083008, 0.03694371032714844, 0.03714534378051758, 0.03703807830810547, 0.036795391082763675, 0.036695072174072266, 0.0370810546875, 0.036833633422851564, 0.036861534118652346, 0.03698211288452148, 0.036843711853027344, 0.03688652801513672, 0.03709801483154297, 0.036873313903808595, 0.0369060173034668, 0.036947742462158206, 0.03729436874389649, 0.037784801483154294, 0.038844863891601564, 0.03695443344116211, 0.03685353469848633, 0.03692563247680664, 0.03679199981689453, 0.036931678771972655, 0.03693328094482422, 0.03692544174194336, 0.03777990341186523, 0.039561088562011716, 0.03712614440917969, 0.03732902526855469, 0.03718963241577149, 0.036978496551513675, 0.03700345611572266, 0.03705855941772461, 0.03685990524291992, 0.03670425415039062, 0.03671376037597656, 0.03681148910522461, 0.036880382537841795, 0.036773887634277344, 0.03696227264404297, 0.036886463165283205, 0.03691020965576172, 0.03682144165039063, 0.03670809555053711, 0.036703006744384765, 0.03700940704345703, 0.03707289505004883, 0.03678956985473633, 0.03669881439208984, 0.03687452697753906, 0.036818527221679685, 0.03679782485961914, 0.03682374572753906, 0.037142593383789065, 0.0370055046081543, 0.03724006271362305, 0.03689865493774414, 0.03692345428466797, 0.0371800651550293, 0.03720710372924805, 0.03712636947631836, 0.03719446563720703, 0.03721401596069336, 0.03717788696289062, 0.03714012908935547, 0.0373205451965332, 0.0375522575378418, 0.037158592224121094, 0.037220703125, 0.03726486587524414, 0.03700585556030273, 0.03685580825805664, 0.03692041778564453, 0.037022880554199215, 0.03708265686035156, 0.036848926544189455, 0.03701216125488281, 0.03727996826171875, 0.03711097717285156, 0.037243743896484376, 0.03693939208984375, 0.036844192504882814, 0.036824031829833986, 0.03743801498413086, 0.037355712890625, 0.038379199981689455, 0.03738662338256836, 0.0369870719909668, 0.03688832092285156, 0.03681689453125, 0.036927680969238284, 0.03737497711181641, 0.037079551696777346, 0.03805964660644531, 0.03757331085205078, 0.037066368103027346, 0.03707638549804688, 0.036909950256347655, 0.03680624008178711, 0.03695052719116211, 0.03739033508300781]",tokens/s,26.910884102691394,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,846.307328,543.031296,0.0,140.509184,133.641728,s,1,9.6094267578125,9.6094267578125,0.0,9.6094267578125,9.6094267578125,9.6094267578125,9.6094267578125,[9.6094267578125],,kWh,1.776063388749994e-05,1.951986212470339e-06,5.3711154079996715e-06,2.508373550796995e-05,,MB,1320.640512,647.888896,0.0,232.783872,198.57152,s,10,0.25864601325988773,0.025864601325988768,0.00019516895404560067,0.025826735496520994,0.026136172103881836,0.02623083820343018,0.026306571083068848,"[0.026115135192871095, 0.025827295303344728, 0.02582428741455078, 0.025833919525146486, 0.026325504302978517, 0.025830848693847656, 0.02563999938964844, 0.025741119384765625, 0.025681728363037108, 0.025826175689697264]",tokens/s,9897.697504533775,kWh,7.709288500549649e-07,8.501983695481537e-08,4.949043830237466e-07,1.3508530700335267e-06,tokens/kWh,189509877.63135955,MB,1353.240576,660.471808,0.0,245.366784,198.57408,s,10,11.553272094726562,1.1553272094726563,0.003461870168238179,1.1548450317382812,1.1585695922851562,1.1607751525878907,1.1625396008300781,"[1.152549560546875, 1.1563560791015626, 1.1579696044921874, 1.15507666015625, 1.1580794677734374, 1.162980712890625, 1.15140283203125, 1.152963623046875, 1.1512801513671875, 1.1546134033203126]",tokens/s,54.53000629038768,kWh,3.4335442517028374e-05,3.7867117267516914e-06,1.208980012917634e-05,5.021195437295641e-05,tokens/kWh,1254681.296251856,,s,630,11.548340072631836,0.018330698527987042,0.0004452735056958193,0.018254208564758304,0.01847313232421875,0.018655440616607667,0.01991246801376343,"[0.018054304122924806, 0.018357023239135743, 0.018503040313720704, 0.018324352264404296, 0.018351839065551757, 0.018372608184814454, 0.01825382423400879, 0.018307071685791015, 0.018266111373901366, 0.018288480758666993, 0.01825584030151367, 0.018318527221679686, 0.018301952362060548, 0.018233343124389647, 0.018368831634521486, 0.018239168167114257, 0.018345983505249023, 0.018292736053466797, 0.018354175567626953, 0.018345983505249023, 0.018386943817138672, 0.018292736053466797, 0.018255456924438477, 0.01829520034790039, 0.018237279891967773, 0.018141056060791017, 0.01824550437927246, 0.018127264022827147, 0.018339839935302735, 0.01829596710205078, 0.018267040252685548, 0.018192319869995116, 0.01826323127746582, 0.018213695526123046, 0.018433759689331055, 0.018253856658935547, 0.018333951950073243, 0.01829631996154785, 0.018306751251220704, 0.018262624740600586, 0.018269920349121095, 0.018180192947387694, 0.018256288528442383, 0.018628671646118165, 0.018380352020263672, 0.018316736221313478, 0.018301664352416994, 0.01822764778137207, 0.018339616775512695, 0.018267616271972657, 0.0182873592376709, 0.018294559478759766, 0.018298879623413086, 0.01815894317626953, 0.018236064910888673, 0.018192384719848635, 0.01825388717651367, 0.018464704513549805, 0.018251455307006836, 0.01822502326965332, 0.01826860809326172, 0.01825312042236328, 0.01815622329711914, 0.01791440010070801, 0.018190528869628905, 0.018303167343139647, 0.01868796730041504, 0.020813823699951172, 0.018316736221313478, 0.01822572708129883, 0.018259967803955078, 0.01822105598449707, 0.018237728118896485, 0.018251775741577148, 0.01817964744567871, 0.01825606346130371, 0.018155487060546874, 0.01823744010925293, 0.018249727249145507, 0.01833942413330078, 0.018205408096313477, 0.018242687225341798, 0.018166208267211915, 0.01822528076171875, 0.018212095260620117, 0.018158559799194337, 0.018328384399414064, 0.018300064086914064, 0.018615392684936522, 0.018392192840576173, 0.018402912139892577, 0.018485248565673826, 0.018360511779785156, 0.01834783935546875, 0.018305023193359374, 0.018163711547851562, 0.018577407836914063, 0.018542591094970702, 0.01851011276245117, 0.019694496154785156, 0.01834102439880371, 0.01843404769897461, 0.0184050235748291, 0.018312543869018556, 0.018245567321777345, 0.018184928894042968, 0.018202720642089845, 0.01821891212463379, 0.018222431182861328, 0.018221727371215822, 0.018219200134277344, 0.018300735473632812, 0.018362367630004883, 0.018255872726440428, 0.01839308738708496, 0.018183263778686523, 0.01823785591125488, 0.018227552413940428, 0.018245792388916014, 0.018265535354614258, 0.018200544357299803, 0.0181943359375, 0.018222015380859376, 0.01825152015686035, 0.01824278450012207, 0.01822425651550293, 0.01801456069946289, 0.01827516746520996, 0.01817478370666504, 0.01825152015686035, 0.0181824951171875, 0.0181759033203125, 0.01826233673095703, 0.018236928939819336, 0.018220287322998047, 0.018215839385986327, 0.018249759674072264, 0.01818992042541504, 0.018266592025756836, 0.018330944061279296, 0.01852070426940918, 0.018354175567626953, 0.01831920051574707, 0.01821251106262207, 0.018284128189086913, 0.01830169677734375, 0.01836867141723633, 0.018505151748657227, 0.01841619110107422, 0.018521728515625, 0.018880895614624024, 0.0186200008392334, 0.018745439529418945, 0.018265888214111327, 0.018258016586303712, 0.018254079818725587, 0.01828166389465332, 0.01824278450012207, 0.01818169593811035, 0.018192703247070313, 0.018255680084228516, 0.01825596809387207, 0.018267744064331053, 0.018440704345703125, 0.01822710418701172, 0.01817804718017578, 0.01824358367919922, 0.01832054328918457, 0.018189088821411133, 0.01826959991455078, 0.01843062400817871, 0.021393728256225587, 0.01957036781311035, 0.018534400939941405, 0.018341087341308595, 0.018253759384155275, 0.018288511276245117, 0.018246784210205078, 0.018310047149658202, 0.01817900848388672, 0.01818649673461914, 0.018181888580322266, 0.01819443130493164, 0.018332672119140626, 0.01827507209777832, 0.018229503631591797, 0.01838489532470703, 0.0183767032623291, 0.018362495422363283, 0.01798294448852539, 0.018161983489990235, 0.01819660758972168, 0.01816419219970703, 0.018226591110229493, 0.018188831329345703, 0.018165727615356446, 0.018111936569213866, 0.018092575073242186, 0.019357120513916016, 0.018253728866577147, 0.01835078430175781, 0.01841334342956543, 0.018274431228637696, 0.018458719253540038, 0.018935712814331054, 0.018368480682373046, 0.018356704711914064, 0.018277599334716798, 0.01824198341369629, 0.018415615081787108, 0.018190336227416993, 0.01837571144104004, 0.01825686454772949, 0.01827849578857422, 0.018174175262451173, 0.01826140785217285, 0.018474687576293947, 0.0182126407623291, 0.018214048385620116, 0.018173599243164064, 0.018478239059448242, 0.01822812843322754, 0.018182079315185548, 0.01815951919555664, 0.018139039993286133, 0.018272480010986327, 0.018265151977539064, 0.01820729637145996, 0.018119007110595702, 0.01819647979736328, 0.018054208755493163, 0.018131103515625, 0.01817475128173828, 0.018106208801269532, 0.0181429443359375, 0.018127552032470705, 0.018038528442382813, 0.018231168746948242, 0.0181711368560791, 0.01818227195739746, 0.018154239654541014, 0.018163328170776368, 0.018114944458007813, 0.01810806465148926, 0.018246368408203126, 0.018129823684692382, 0.018178112030029298, 0.018149887084960938, 0.018579391479492186, 0.018400800704956054, 0.018864831924438476, 0.022125856399536133, 0.018144287109375, 0.01845347213745117, 0.018322816848754885, 0.018655872344970702, 0.01849100875854492, 0.020151968002319335, 0.01832419204711914, 0.01833580780029297, 0.018263999938964843, 0.018206880569458007, 0.01815331268310547, 0.018288639068603514, 0.018110464096069336, 0.018136159896850586, 0.018094783782958986, 0.018039039611816406, 0.01824355125427246, 0.018535488128662108, 0.018184928894042968, 0.01803651237487793, 0.018166208267211915, 0.018198848724365235, 0.018140863418579102, 0.018909183502197266, 0.01821286392211914, 0.018181440353393554, 0.018122880935668946, 0.018183008193969726, 0.01810812759399414, 0.018132768630981445, 0.018107776641845704, 0.0181724796295166, 0.018210208892822266, 0.018124704360961915, 0.018109407424926758, 0.018032480239868164, 0.0186549129486084, 0.019876319885253905, 0.018789920806884765, 0.018323936462402345, 0.018337984085083008, 0.018298688888549804, 0.018207935333251952, 0.01817888069152832, 0.018736864089965822, 0.018713951110839844, 0.01863484764099121, 0.018274463653564454, 0.018264511108398437, 0.019642112731933593, 0.018292415618896486, 0.018254655838012696, 0.01822345542907715, 0.018285440444946288, 0.01826076889038086, 0.018274303436279296, 0.018266143798828124, 0.0182413444519043, 0.018320831298828125, 0.018303808212280274, 0.01879849624633789, 0.018487295150756835, 0.018378751754760742, 0.018137216567993164, 0.01831513595581055, 0.01813657569885254, 0.018463232040405272, 0.018207008361816407, 0.018218719482421875, 0.018187711715698242, 0.018758432388305664, 0.018269983291625977, 0.018227552413940428, 0.018253471374511717, 0.018237152099609376, 0.01821731185913086, 0.018208703994750976, 0.01818979263305664, 0.018307231903076173, 0.018243967056274416, 0.018251775741577148, 0.01822105598449707, 0.01819267272949219, 0.018220479965209962, 0.018182079315185548, 0.018298656463623046, 0.018184768676757813, 0.018290271759033205, 0.018186656951904297, 0.018249504089355467, 0.01820899200439453, 0.018328927993774415, 0.025959104537963868, 0.01992723274230957, 0.018276416778564453, 0.01824880027770996, 0.018235904693603516, 0.018321855545043945, 0.018275487899780275, 0.018307775497436524, 0.018261344909667968, 0.01851852798461914, 0.018317312240600587, 0.01829478454589844, 0.018256095886230467, 0.018214239120483398, 0.018222944259643555, 0.018188896179199218, 0.018241535186767577, 0.018175968170166014, 0.01859328079223633, 0.01840937614440918, 0.018452863693237304, 0.018855360031127928, 0.01892639923095703, 0.018441823959350585, 0.01837651252746582, 0.018301376342773436, 0.018367712020874023, 0.01845894432067871, 0.018317663192749023, 0.01825430488586426, 0.018210687637329102, 0.018324928283691408, 0.018241439819335938, 0.01834185600280762, 0.01789952087402344, 0.018305023193359374, 0.018413248062133788, 0.01829305648803711, 0.01817526435852051, 0.018201311111450194, 0.018204639434814453, 0.018126752853393553, 0.018280576705932618, 0.0181711368560791, 0.01820953559875488, 0.01817635154724121, 0.01822480010986328, 0.018196800231933593, 0.018339391708374023, 0.018270463943481446, 0.01827199935913086, 0.018275936126708983, 0.018257503509521485, 0.01817081642150879, 0.018266143798828124, 0.018249504089355467, 0.01826646423339844, 0.018181983947753905, 0.018237344741821288, 0.018256256103515625, 0.018314239501953124, 0.01822604751586914, 0.018202239990234376, 0.01824812889099121, 0.018270336151123046, 0.018226207733154295, 0.018172639846801758, 0.018285503387451173, 0.01816268730163574, 0.018251968383789063, 0.01819196891784668, 0.018198591232299804, 0.0181549129486084, 0.018301664352416994, 0.01891360092163086, 0.018857791900634767, 0.01845030403137207, 0.018382848739624022, 0.018366464614868162, 0.018413087844848634, 0.01825430488586426, 0.018263519287109373, 0.0182891845703125, 0.018259359359741212, 0.018313823699951173, 0.01823539161682129, 0.01826793670654297, 0.01818022346496582, 0.018276384353637695, 0.01835139274597168, 0.018170143127441408, 0.018309247970581054, 0.018248064041137695, 0.018282047271728517, 0.018180320739746094, 0.018238880157470702, 0.018278783798217773, 0.017948160171508788, 0.018234560012817383, 0.01827561569213867, 0.018188543319702148, 0.018213119506835938, 0.018153120040893554, 0.018151679992675782, 0.018172319412231446, 0.01814463996887207, 0.0181265926361084, 0.018166112899780273, 0.018086015701293947, 0.018251455307006836, 0.018269535064697265, 0.018200767517089843, 0.018244384765625, 0.018245983123779296, 0.018194080352783203, 0.0181878719329834, 0.01832102394104004, 0.0182728328704834, 0.018111007690429688, 0.018147008895874023, 0.01868185615539551, 0.01857535934448242, 0.018138944625854494, 0.01836432075500488, 0.018254112243652344, 0.01840662384033203, 0.018254623413085938, 0.018853216171264647, 0.018293344497680664, 0.018209087371826173, 0.018177791595458983, 0.018231168746948242, 0.018278528213500976, 0.018329023361206054, 0.01836294364929199, 0.01823321533203125, 0.01822105598449707, 0.01817635154724121, 0.018279264450073242, 0.01814009666442871, 0.01821436882019043, 0.018104639053344727, 0.018251327514648436, 0.019069599151611327, 0.019619840621948242, 0.018540544509887694, 0.018472959518432617, 0.01835212707519531, 0.01839923286437988, 0.018387264251708984, 0.01817184066772461, 0.01819321632385254, 0.01820979118347168, 0.018286239624023436, 0.018178335189819338, 0.018305023193359374, 0.018300256729125976, 0.018238111495971678, 0.018216447830200197, 0.018217695236206054, 0.01790086364746094, 0.018254528045654295, 0.018184064865112304, 0.018226816177368165, 0.018261568069458008, 0.018197439193725587, 0.01819647979736328, 0.01822719955444336, 0.01828000068664551, 0.018364927291870118, 0.018589632034301758, 0.018530431747436522, 0.018478239059448242, 0.01840208053588867, 0.018384063720703125, 0.018327423095703125, 0.01825702476501465, 0.018171648025512695, 0.01824300765991211, 0.018249759674072264, 0.018280607223510742, 0.018196863174438478, 0.018276351928710938, 0.018143232345581056, 0.01824083137512207, 0.01828518486022949, 0.018248960494995116, 0.01817683219909668, 0.018405376434326173, 0.018243295669555664, 0.018120447158813478, 0.01826255989074707, 0.01824947166442871, 0.018402816772460938, 0.018418432235717774, 0.018223104476928712, 0.01835795211791992, 0.018191776275634765, 0.018250656127929688, 0.018203903198242187, 0.018248064041137695, 0.018249216079711913, 0.018207359313964843, 0.018253599166870117, 0.01821539115905762, 0.018214912414550782, 0.018177824020385744, 0.018335968017578124, 0.018298879623413086, 0.01822719955444336, 0.018225439071655275, 0.018247007369995117, 0.018307071685791015, 0.018163839340209962, 0.01822336006164551, 0.018251583099365233, 0.01826041603088379, 0.018239328384399414, 0.018371904373168945, 0.01838140869140625, 0.01825155258178711, 0.01825404739379883, 0.018264064788818358, 0.01804323196411133, 0.018273632049560548, 0.018299039840698243, 0.018214719772338867, 0.01824620819091797, 0.018288511276245117, 0.018270463943481446, 0.01820035171508789, 0.018534912109375, 0.018369983673095704, 0.018244224548339842, 0.018300575256347658, 0.018245183944702148, 0.01824403190612793, 0.01826793670654297, 0.018307167053222655, 0.018497663497924803, 0.018190336227416993, 0.018282047271728517, 0.018221504211425783, 0.018388320922851562, 0.01829750442504883, 0.018289823532104493, 0.018632896423339845, 0.018244575500488282, 0.018340896606445313, 0.018392927169799806, 0.018219839096069335, 0.018333696365356447, 0.01819443130493164, 0.018345983505249023, 0.018224416732788087, 0.018289247512817384, 0.018202560424804688, 0.018431711196899413, 0.020857311248779296, 0.01831507110595703, 0.018311552047729492, 0.01833513641357422, 0.01818441581726074, 0.01812643241882324, 0.018168415069580078, 0.01808905601501465, 0.018312255859375, 0.018159135818481446, 0.01826438331604004, 0.018210464477539063, 0.01819036865234375, 0.01830681610107422, 0.01830067253112793, 0.0183253116607666, 0.01826304054260254, 0.018257919311523436, 0.018229248046875, 0.018245759963989257, 0.018573183059692383, 0.018214015960693358, 0.01823161506652832, 0.018251424789428712, 0.01823427200317383, 0.01824799919128418, 0.018339519500732423, 0.018214080810546877]",tokens/s,54.5532947625108,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1128.30464,4944.953344,0.0,4542.431232,4484.571136,s,1,14.2267783203125,14.2267783203125,0.0,14.2267783203125,14.2267783203125,14.2267783203125,14.2267783203125,[14.2267783203125],,kWh,0.00021125614057916663,2.3295780818528554e-05,7.152116832799538e-05,0.00030607308972569055,,MB,1433.27232,5488.115712,0.0,5073.010688,4884.617216,s,10,10.293136474609376,1.0293136474609375,0.005392288935339248,1.029697998046875,1.0333704223632814,1.0353196716308595,1.036879071044922,"[1.0155052490234375, 1.027007080078125, 1.0283468017578126, 1.02914013671875, 1.030255859375, 1.0286800537109375, 1.031076171875, 1.0329189453125, 1.0372689208984376, 1.032937255859375]",tokens/s,248.709419749256,kWh,2.9900010956664573e-05,3.2960591049788427e-06,1.9883182573199322e-05,5.307925263484274e-05,tokens/kWh,4822976.724279541,MB,1455.65696,5504.892928,0.0,5087.690752,4884.619776,s,10,46.361851074218755,4.636185107421875,0.01174072447260743,4.637613525390625,4.647243652343749,4.649689208984375,4.651645654296875,"[4.6126533203125, 4.62065185546875, 4.6293408203125, 4.6346708984375, 4.63806787109375, 4.6371591796875, 4.64453271484375, 4.645939453125, 4.6467001953125, 4.652134765625]",tokens/s,13.588758546147334,kWh,0.0001360908442775038,1.5013284834288154e-05,9.01584332377997e-05,0.00024126256234959162,tokens/kWh,261126.2990265039,,s,630,46.358755027770925,0.07358532544090635,0.0015792500946562723,0.07340851211547851,0.07420360641479493,0.07433155860900878,0.08475951026916503,"[0.08477664184570312, 0.07460028839111328, 0.07375923156738282, 0.07332454681396484, 0.07273664093017577, 0.07273270416259765, 0.07275529479980469, 0.0727531509399414, 0.07277056121826173, 0.07273529815673828, 0.07279456329345703, 0.07281565093994141, 0.07277590179443359, 0.07268838500976563, 0.07273648071289063, 0.0727627182006836, 0.07270905303955078, 0.07273267364501954, 0.07271014404296874, 0.07279199981689453, 0.07276755523681641, 0.0728084487915039, 0.07275631713867188, 0.07280528259277344, 0.07279615783691407, 0.073340576171875, 0.0743283233642578, 0.07351058959960938, 0.0729459228515625, 0.07284719848632812, 0.07283539581298829, 0.07282892608642579, 0.07285759735107422, 0.07282796478271485, 0.07282537841796875, 0.07290204620361328, 0.0728832015991211, 0.07285350036621094, 0.07305980682373046, 0.07423747253417969, 0.07366143798828124, 0.07319142150878906, 0.07301324462890625, 0.0729169921875, 0.07290675354003906, 0.07286271667480469, 0.07286067199707032, 0.07291439819335938, 0.07290729522705078, 0.07287139129638671, 0.07298214721679687, 0.07297116851806641, 0.0743403549194336, 0.07366851043701172, 0.0732754898071289, 0.07291820526123047, 0.07297721862792969, 0.07293746948242187, 0.07291494750976563, 0.07292108917236328, 0.07291903686523438, 0.07301939392089844, 0.07294156646728515, 0.08629177856445312, 0.0744571533203125, 0.07377523040771485, 0.07320832061767578, 0.0727224349975586, 0.07278940582275391, 0.0728070068359375, 0.07274867248535156, 0.07270195007324219, 0.07272077178955078, 0.0727053451538086, 0.07273951721191406, 0.07277513885498046, 0.07275574493408203, 0.0732938232421875, 0.07405977630615235, 0.07355391693115235, 0.07314022064208985, 0.07276898956298829, 0.07280079650878907, 0.07290470123291015, 0.07346975708007812, 0.07288655853271485, 0.07278502655029297, 0.07278422546386719, 0.07278841400146484, 0.07278797149658203, 0.072806396484375, 0.07354678344726563, 0.07388668823242188, 0.07349644470214843, 0.07290201568603516, 0.0729587173461914, 0.07288832092285157, 0.07287808227539062, 0.07286784362792968, 0.07286374664306641, 0.07290201568603516, 0.07290534210205078, 0.07288626861572266, 0.07347200012207031, 0.07314608001708985, 0.07346393585205079, 0.07413571166992187, 0.07358668518066407, 0.07326310729980469, 0.07292108917236328, 0.0729354248046875, 0.07293746948242187, 0.0729039077758789, 0.07286659240722657, 0.07292301177978516, 0.07289663696289063, 0.07291680145263672, 0.07293955230712891, 0.07334518432617188, 0.07427056121826171, 0.07365443420410156, 0.0736153564453125, 0.07364534759521485, 0.07316758728027344, 0.07331839752197265, 0.07301939392089844, 0.0851599349975586, 0.07431340789794921, 0.07362592315673828, 0.07326310729980469, 0.07276953887939454, 0.07276134490966797, 0.07275520324707031, 0.0727531509399414, 0.07274412536621094, 0.07278604888916015, 0.07273699188232421, 0.07272700500488281, 0.0728406753540039, 0.07311779022216797, 0.07389766693115235, 0.07394585418701172, 0.07382425689697265, 0.07348406219482422, 0.07290697479248047, 0.07289036560058594, 0.07284326171875, 0.07289810943603516, 0.07298291015625, 0.07288191986083985, 0.07282672119140625, 0.07281430053710937, 0.07405184173583984, 0.07321241760253906, 0.0740838394165039, 0.0735396499633789, 0.07348883056640625, 0.07356787109375, 0.07353939056396484, 0.07295769500732421, 0.07287071990966797, 0.07412294769287109, 0.07359260559082031, 0.07323280334472657, 0.07290476989746093, 0.0728924789428711, 0.07289794921875, 0.07292784118652344, 0.07312716674804688, 0.07411094665527344, 0.07356607818603515, 0.07302236938476563, 0.07295164489746093, 0.07300521850585938, 0.07422566223144532, 0.07359487915039062, 0.07322134399414063, 0.07295375823974609, 0.07290662384033203, 0.07298534393310546, 0.07460009765625, 0.07345622253417969, 0.07411097717285156, 0.07363731384277344, 0.0730621795654297, 0.072999267578125, 0.07319187164306641, 0.07427852630615234, 0.07360530853271484, 0.08637439727783203, 0.07459363555908204, 0.07388553619384766, 0.0734621124267578, 0.07276544189453125, 0.07385721588134765, 0.07275497436523437, 0.07277823638916016, 0.07275654602050781, 0.07275180816650391, 0.07274291229248046, 0.07279411315917969, 0.07289571380615234, 0.07349721527099609, 0.07438556671142578, 0.07363788604736328, 0.07321395111083985, 0.07291664123535156, 0.07290914916992187, 0.07287506866455078, 0.07286825561523437, 0.07286224365234376, 0.0741246109008789, 0.07366521453857422, 0.0732774429321289, 0.072880126953125, 0.07288813018798829, 0.07419622039794922, 0.07353644561767578, 0.07316480255126953, 0.07361065673828125, 0.07394348907470703, 0.0735346908569336, 0.07312684631347656, 0.07295999908447266, 0.072880126953125, 0.07350064086914063, 0.07403846740722657, 0.073552734375, 0.07293746948242187, 0.07287808227539062, 0.07356317138671875, 0.0738922882080078, 0.07360975646972656, 0.07298028564453125, 0.07411321258544921, 0.07358179473876954, 0.07332125091552734, 0.07295795440673829, 0.07293727874755859, 0.07420947265625, 0.07362274932861328, 0.07329440307617187, 0.07292336273193359, 0.07335321807861328, 0.07405091094970703, 0.07353414154052734, 0.07294358062744141, 0.0729518051147461, 0.07424147033691406, 0.07363846588134766, 0.07336140441894531, 0.07300860595703125, 0.08540659332275391, 0.07426457977294922, 0.07370079803466797, 0.073357666015625, 0.07291926574707032, 0.07279206085205078, 0.07278105926513671, 0.07271910095214844, 0.07279373168945312, 0.07272691345214843, 0.07273267364501954, 0.07272348785400391, 0.07326000213623046, 0.07413116455078125, 0.07342108917236329, 0.07429116821289063, 0.07366563415527344, 0.07329046630859375, 0.07288547515869141, 0.0728053741455078, 0.07275631713867188, 0.07275202941894532, 0.07277977752685547, 0.07274905395507812, 0.07275279998779297, 0.07341868591308594, 0.07406221008300781, 0.07367683410644531, 0.0740863037109375, 0.07356409454345703, 0.0735064926147461, 0.07349705505371094, 0.07349187469482422, 0.07348643493652343, 0.07292979431152344, 0.07291085052490234, 0.07290185546875, 0.07288706970214844, 0.07411068725585937, 0.07357469177246094, 0.07320575714111328, 0.07431954956054687, 0.073793212890625, 0.07341878509521485, 0.07411138916015625, 0.07363807678222656, 0.07326924896240235, 0.07419084930419922, 0.07360649871826172, 0.07326163482666016, 0.07294969940185547, 0.07292739105224609, 0.07291494750976563, 0.07336540985107422, 0.07422557067871094, 0.07361901092529297, 0.07613401794433594, 0.07336653137207032, 0.0742010269165039, 0.07371558380126954, 0.07312985229492187, 0.07361312103271485, 0.07420697784423828, 0.08471756744384766, 0.0743342056274414, 0.07368016052246094, 0.07327401733398438, 0.07271430206298828, 0.07272857666015625, 0.07274291229248046, 0.07276697540283203, 0.07282125091552734, 0.07285078430175781, 0.07275081634521484, 0.07275971221923828, 0.07344697570800782, 0.07472431945800781, 0.07407206726074218, 0.07364179229736328, 0.07346809387207032, 0.07345961761474609, 0.0734303970336914, 0.07343122863769531, 0.07287862396240234, 0.07281251525878907, 0.07287375640869141, 0.07282099151611328, 0.07277776336669922, 0.07309104156494141, 0.07414374542236328, 0.07353497314453125, 0.0741913299560547, 0.07370454406738282, 0.07347296142578125, 0.07351872253417968, 0.07432624053955078, 0.07378141021728515, 0.07344742584228516, 0.07288626861572266, 0.07298662567138672, 0.07282876586914062, 0.07331241607666016, 0.07416422271728515, 0.07353926086425781, 0.07357266998291015, 0.07417036437988281, 0.07355801391601563, 0.07364927673339844, 0.0741563491821289, 0.07358633422851563, 0.0731513900756836, 0.07292880249023438, 0.07292066955566406, 0.07301004791259766, 0.0736247329711914, 0.07390025329589844, 0.07350335693359375, 0.07360307312011719, 0.07414742279052734, 0.07355404663085938, 0.07310160064697266, 0.07426198577880859, 0.073712158203125, 0.07333392333984375, 0.07309910583496093, 0.07333990478515626, 0.08534825897216797, 0.07441817474365234, 0.07410073852539062, 0.07356620788574218, 0.07305801391601563, 0.07282921600341796, 0.07284941101074219, 0.07297023773193359, 0.07278765106201172, 0.07409081268310547, 0.07352291107177734, 0.07318351745605468, 0.07296176147460938, 0.07434406280517578, 0.07392937469482422, 0.07419699096679687, 0.07366246032714843, 0.0732774429321289, 0.07402086639404297, 0.07354573059082031, 0.07313203430175781, 0.07278797149658203, 0.07283280181884766, 0.07278377532958985, 0.07276780700683594, 0.07349212646484375, 0.0740191650390625, 0.0734576644897461, 0.07362969970703125, 0.07401427459716797, 0.07351136016845704, 0.07411872100830078, 0.07347654724121094, 0.07310336303710938, 0.07292108917236328, 0.07303548431396484, 0.07413788604736328, 0.07351500701904297, 0.07312384033203125, 0.07296723175048828, 0.07350537872314453, 0.0739474868774414, 0.07373824310302735, 0.07407411193847656, 0.07363724517822266, 0.07417225646972657, 0.07358473968505859, 0.07317142486572266, 0.07328313446044922, 0.07407683563232421, 0.07355126190185547, 0.07306301116943359, 0.07291903686523438, 0.0741928939819336, 0.07380172729492188, 0.07393689727783204, 0.07439897918701172, 0.07366118621826172, 0.07360102081298828, 0.07427251434326172, 0.07369084930419922, 0.07333042907714844, 0.07314921569824219, 0.08467644500732421, 0.07431231689453124, 0.07367453002929687, 0.0731895980834961, 0.07273798370361328, 0.07273145294189454, 0.07296614074707031, 0.072880126953125, 0.07273580932617188, 0.07275312042236329, 0.07274297332763671, 0.07278479766845704, 0.0744130859375, 0.07486563110351563, 0.07425638580322266, 0.07380992126464844, 0.07348162841796875, 0.07345974731445312, 0.07342546844482421, 0.07300096130371093, 0.07293952178955078, 0.07282262420654297, 0.07278009796142579, 0.07277142333984375, 0.07403510284423828, 0.0735862045288086, 0.07415654754638672, 0.07480854034423828, 0.07426525115966796, 0.07359506988525391, 0.0734044189453125, 0.07416831970214843, 0.07343923187255859, 0.07305216217041016, 0.07290470123291015, 0.0728795166015625, 0.0740906524658203, 0.07356665802001953, 0.07329177856445312, 0.073385986328125, 0.07416124725341797, 0.07359158325195313, 0.07432527923583984, 0.07384464263916016, 0.07345247650146484, 0.07441407775878907, 0.07386521911621094, 0.07352499389648437, 0.07304994964599609, 0.0733022689819336, 0.07410294342041016, 0.07356211090087891, 0.07345970916748047, 0.0741396484375, 0.07358624267578125, 0.07383084869384765, 0.07408179473876954, 0.07425894165039063, 0.0736522216796875, 0.07361331176757813, 0.07417036437988281, 0.0736358413696289, 0.07315660858154296, 0.08458854675292969, 0.07443865966796875, 0.07385088348388671, 0.07335731506347656, 0.07278797149658203, 0.07274905395507812, 0.07274857330322265, 0.07276521301269531, 0.07274361419677734, 0.07272003173828125, 0.07271049499511718, 0.07279411315917969, 0.0739082260131836, 0.07466598510742188, 0.07424614715576172, 0.07414963531494141, 0.07373216247558594, 0.07340438079833984, 0.07301961517333984, 0.07276134490966797, 0.07341868591308594, 0.07413356781005859, 0.07342806243896484, 0.07300508880615235, 0.07289126586914063, 0.07376854705810547, 0.07420323181152344, 0.07406009674072266, 0.07450418853759766, 0.07397990417480468, 0.07358589172363281, 0.07317788696289063, 0.0730738525390625, 0.0741957778930664, 0.0736358413696289, 0.07359190368652344, 0.07294454193115234, 0.0729307861328125, 0.07439151763916016, 0.07381664276123047, 0.07418013000488281, 0.07394477081298828, 0.07429609680175782, 0.0736890869140625, 0.07334022521972657, 0.0731673583984375, 0.07428729248046875, 0.07364137268066406, 0.07315257263183594, 0.07295222473144532, 0.07412544250488282, 0.07359417724609375, 0.07355248260498047, 0.07415203094482421, 0.07374642944335938, 0.07431372833251954, 0.07363772583007812, 0.07381622314453125, 0.07364157104492187, 0.07370294189453125, 0.07368089294433594, 0.07374118041992188, 0.07317903900146484, 0.08536883544921875, 0.07428915405273437, 0.0736727066040039, 0.07324671936035156, 0.07268556976318359, 0.0727040023803711, 0.07285955047607422, 0.0739920654296875, 0.07362786865234375, 0.0730808334350586, 0.07400614166259765, 0.0735072021484375, 0.07335040283203124, 0.07404815673828125, 0.07354492950439454, 0.07421004486083985, 0.07390016174316406, 0.07343619537353516, 0.07313302612304687, 0.07400646209716796, 0.07365174102783204, 0.07318787384033203, 0.07283302307128907, 0.07353065490722656, 0.0738239974975586, 0.07342998504638672, 0.07372713470458984, 0.0741622085571289, 0.07379232025146484, 0.07411650848388672, 0.07351689910888672, 0.0731778564453125, 0.07411097717285156, 0.07361945343017579, 0.07337983703613281, 0.07427891540527344, 0.07361917114257813, 0.0731302719116211, 0.07292720031738281, 0.07422569274902344, 0.07356620788574218, 0.0735023651123047, 0.07413180541992187, 0.0735923843383789, 0.073904541015625, 0.0743608627319336, 0.07370956420898438, 0.07341260528564453, 0.07366178894042968, 0.07417289733886719, 0.0735643539428711, 0.07308073425292969, 0.07343113708496093, 0.07426252746582031, 0.07359257507324218, 0.07363308715820313, 0.0741935043334961, 0.07361958312988282, 0.07481529235839844, 0.07428099060058593, 0.07364790344238281, 0.07318540954589844, 0.07354825592041016]",tokens/s,13.589666064643051,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,14696.873984,7853.703168,0.0,7451.181056,7445.507072,s,1,32.3067734375,32.3067734375,0.0,32.3067734375,32.3067734375,32.3067734375,32.3067734375,[32.3067734375],,kWh,0.0007380927997625348,8.140708269009104e-05,0.0002539568698320116,0.0010734567522846373,,MB,1436.635136,8004.698112,0.0,7581.20448,7570.843648,s,10,1.2512040939331055,0.12512040939331054,0.0004928704130643679,0.1251166229248047,0.12563593978881835,0.12583901176452636,0.12600146934509276,"[0.12513686370849608, 0.12546953582763673, 0.12529996490478515, 0.12492781066894532, 0.1250963821411133, 0.12559081268310546, 0.12485311889648437, 0.12425430297851563, 0.12604208374023437, 0.12453321838378906]",tokens/s,2046.0291110083822,kWh,3.6289686384256458e-06,4.0021078930852705e-07,2.407155560839639e-06,6.436334988573812e-06,tokens/kWh,39774188.3314755,MB,1458.917376,8017.281024,0.0,7593.787392,7514.46784,s,10,74.84006689453125,7.484006689453125,0.01865005143678795,7.476088378906249,7.51313076171875,7.51566376953125,7.51769017578125,"[7.51819677734375, 7.466888671875, 7.47148828125, 7.48547021484375, 7.51256787109375, 7.47767724609375, 7.46505908203125, 7.46749365234375, 7.5007255859375, 7.47449951171875]",tokens/s,8.417950786813575,kWh,0.00021505571818532785,2.372167220535054e-05,9.890661993455892e-05,0.00033768401032523734,tokens/kWh,186564.94851302585,,s,630,74.83677191925051,0.11878852685595315,0.0013794488682901537,0.11849339294433595,0.1197469467163086,0.12184152488708495,0.12467963745117189,"[0.11741398620605469, 0.11893264007568359, 0.11804662322998047, 0.11785862731933594, 0.11805868530273438, 0.11777938842773437, 0.11825708770751953, 0.1220610580444336, 0.11862252807617188, 0.11889663696289063, 0.11858441925048828, 0.11806806182861328, 0.11856288146972656, 0.11864883422851563, 0.11812655639648438, 0.11864886474609375, 0.11852390289306641, 0.1185607681274414, 0.11903590393066406, 0.11761449432373047, 0.11788902282714844, 0.11884349060058594, 0.12124569702148437, 0.11774278259277343, 0.11821139526367187, 0.11730140686035156, 0.1186253433227539, 0.11931932830810547, 0.11941478729248046, 0.11907401275634766, 0.11938896179199218, 0.11965430450439453, 0.11846047973632813, 0.11995568084716797, 0.11908000183105469, 0.1211072006225586, 0.11921817779541016, 0.11878809356689453, 0.1195572509765625, 0.11908335876464844, 0.11906825256347656, 0.12469526672363282, 0.1189439697265625, 0.12077670288085937, 0.11971993255615235, 0.11833046722412109, 0.11885865783691406, 0.11892704010009765, 0.11908118438720704, 0.11954160308837891, 0.11910476684570312, 0.11869081878662109, 0.120172607421875, 0.11882438659667968, 0.11871897888183594, 0.11926525115966796, 0.11948639678955078, 0.12464137268066407, 0.12624620819091797, 0.12120953369140625, 0.12175769805908203, 0.11884944152832032, 0.12067849731445313, 0.1187081298828125, 0.11912652587890625, 0.12037129974365235, 0.11856253051757812, 0.11975043487548828, 0.11817932891845703, 0.11746601867675781, 0.1174299545288086, 0.11730489349365235, 0.11840383911132812, 0.11817984008789062, 0.11759740447998047, 0.12073859405517579, 0.11992269134521484, 0.11826790618896485, 0.11821875, 0.11789260864257813, 0.11807817840576172, 0.11871949005126953, 0.11828099060058593, 0.11769395446777343, 0.11937017822265625, 0.11838470458984375, 0.11781529235839844, 0.11768153381347657, 0.11760726165771485, 0.11817346954345703, 0.11837606048583985, 0.11778307342529297, 0.1177409896850586, 0.11852828979492187, 0.1175862045288086, 0.11776188659667969, 0.11790688323974609, 0.11768889617919921, 0.11872051239013671, 0.11900109100341796, 0.11875702667236328, 0.12010940551757812, 0.11858739471435546, 0.11784521484375, 0.11830473327636719, 0.11913263702392578, 0.11830934143066406, 0.11897433471679687, 0.11817346954345703, 0.11790972900390626, 0.11883724975585938, 0.1171987533569336, 0.11778467559814453, 0.11808905792236328, 0.1189271697998047, 0.11849404907226563, 0.11872630310058593, 0.1179375991821289, 0.11861289978027344, 0.11812249755859375, 0.11770230102539063, 0.1180265884399414, 0.11799961853027344, 0.11874742126464843, 0.12746927642822264, 0.11877785491943359, 0.11844719696044922, 0.11724601745605469, 0.11711583709716797, 0.11760368347167968, 0.11770912170410157, 0.11803660583496094, 0.12164927673339844, 0.11807539367675782, 0.11931439971923828, 0.11773136138916016, 0.11746070098876953, 0.11696975708007812, 0.11761254119873046, 0.11895311737060547, 0.12191011047363282, 0.11930944061279297, 0.11904402923583984, 0.1197118377685547, 0.11832806396484374, 0.11837030029296874, 0.11847602844238281, 0.11833740997314453, 0.11834073638916015, 0.11825328063964843, 0.11873497772216797, 0.11940659332275391, 0.11896336364746093, 0.11830742645263671, 0.11880051422119141, 0.11841548919677734, 0.11919155120849609, 0.1177426528930664, 0.1188562240600586, 0.12298604583740234, 0.11901833343505859, 0.11929804992675781, 0.11889049530029297, 0.11878572845458985, 0.11819225311279297, 0.11864444732666016, 0.11761507415771484, 0.11842508697509765, 0.11950335693359375, 0.11905433654785157, 0.11783782196044922, 0.11771084594726562, 0.1181122589111328, 0.1180008316040039, 0.11805276489257813, 0.1180230712890625, 0.11879014587402344, 0.11841673278808594, 0.1178155517578125, 0.11795609283447266, 0.11861510467529297, 0.1178663330078125, 0.11893350219726563, 0.11863654327392578, 0.1183804473876953, 0.11892540740966796, 0.1185582046508789, 0.11810889434814453, 0.11960038757324219, 0.1171376953125, 0.11712300872802735, 0.1177084503173828, 0.11812451171875, 0.11862204742431641, 0.11804937744140626, 0.11728643035888672, 0.1178536605834961, 0.11882393646240234, 0.11946320343017579, 0.11887007904052735, 0.11872854614257812, 0.1187845458984375, 0.11846685028076172, 0.11803411102294922, 0.11839679718017578, 0.11821715545654297, 0.11907023620605468, 0.12226608276367187, 0.11847987365722656, 0.1184532470703125, 0.11934243011474609, 0.1185654067993164, 0.11904537963867187, 0.11815718078613281, 0.1183096923828125, 0.1188121566772461, 0.11848499298095704, 0.1184610595703125, 0.1182164764404297, 0.11866345977783203, 0.11796435546875, 0.11850796508789062, 0.11853414154052734, 0.12368828582763672, 0.11945231628417968, 0.11952963256835937, 0.11831625366210938, 0.11869821166992188, 0.11832319641113281, 0.12080579376220703, 0.11973632049560547, 0.11801599884033204, 0.11873484802246094, 0.11851058959960938, 0.11856793975830078, 0.11888435363769531, 0.11945369720458984, 0.11860377502441406, 0.11891043090820312, 0.11825433349609375, 0.11838441467285156, 0.11854233551025391, 0.12396278381347656, 0.11875593566894531, 0.11913123321533203, 0.11874179077148438, 0.11884352111816407, 0.11848883056640624, 0.11854771423339844, 0.11871539306640624, 0.11898470306396484, 0.11854038238525391, 0.1189850845336914, 0.11859503936767578, 0.11832169342041016, 0.11936930847167969, 0.12359414672851562, 0.11899801635742188, 0.11923251342773437, 0.11865676879882812, 0.11957273864746094, 0.11928355407714844, 0.11882921600341798, 0.11810771179199218, 0.11910297393798829, 0.1190552978515625, 0.11943321228027344, 0.11862604522705078, 0.11866953277587891, 0.11881680297851563, 0.11985110473632812, 0.11923363494873047, 0.11888098907470702, 0.11843183898925781, 0.11829811096191406, 0.11917568206787109, 0.11832115173339844, 0.11932032012939453, 0.1189144287109375, 0.1192252197265625, 0.11840102386474609, 0.11922431945800781, 0.11895603179931641, 0.11908118438720704, 0.11807743835449219, 0.11867727661132813, 0.11887615966796874, 0.1190414047241211, 0.11882994842529297, 0.11901910400390625, 0.12401270294189454, 0.11933679962158203, 0.11883849334716796, 0.11852674865722657, 0.11957469177246094, 0.11926719665527344, 0.11890828704833985, 0.11910630035400391, 0.1189578857421875, 0.11897055816650391, 0.11827200317382812, 0.11904409790039062, 0.11956838226318359, 0.1195665283203125, 0.11922207641601562, 0.11973193359375, 0.12480441284179687, 0.11923680114746094, 0.11919849395751952, 0.11910758209228516, 0.11853977966308593, 0.11924246215820312, 0.11952963256835937, 0.11897714996337891, 0.11958477020263672, 0.11754086303710938, 0.11815526580810547, 0.11802828979492187, 0.11855651092529297, 0.11858550262451172, 0.11828838348388672, 0.11842969512939452, 0.11809382629394531, 0.11773654174804687, 0.12077174377441406, 0.11900902557373047, 0.11944332885742187, 0.11921171569824218, 0.11836217498779297, 0.11857865905761719, 0.11859552001953125, 0.1181042251586914, 0.11830287933349609, 0.11949737548828125, 0.1183846435546875, 0.11878112030029297, 0.11863737487792969, 0.11810406494140625, 0.11890073394775391, 0.11849273681640625, 0.11842966461181641, 0.11865289306640625, 0.11876812744140625, 0.11861949157714843, 0.11857740783691406, 0.1191460189819336, 0.11846336364746093, 0.11869731140136719, 0.11870240020751953, 0.1184257583618164, 0.11847449493408203, 0.11818595123291016, 0.1187739486694336, 0.12035100555419923, 0.11949788665771484, 0.11909779357910157, 0.11825737762451172, 0.11816413116455078, 0.11832109069824219, 0.11888835144042968, 0.11910572814941406, 0.11895193481445313, 0.11780857849121094, 0.11788652801513672, 0.1229686050415039, 0.11832157135009766, 0.11823872375488281, 0.11757746887207031, 0.11795295715332031, 0.1182456283569336, 0.11808525085449219, 0.11775027465820312, 0.11747337341308593, 0.12638607788085937, 0.11760208129882813, 0.11799747467041016, 0.11780537414550782, 0.11811020660400391, 0.11690704345703125, 0.1181677474975586, 0.12567632293701173, 0.1177864990234375, 0.11735667419433594, 0.11758182525634765, 0.11781536102294922, 0.11770464324951171, 0.11833750152587891, 0.11767401885986328, 0.1277373733520508, 0.12329821014404296, 0.11773958587646484, 0.1180211181640625, 0.11745382690429687, 0.1179156494140625, 0.11823104095458985, 0.11822799682617187, 0.11812348937988282, 0.11780915069580078, 0.12206899261474609, 0.1183825912475586, 0.11776748657226563, 0.1174616928100586, 0.11813481903076171, 0.11802534484863281, 0.11805372619628907, 0.11794416046142578, 0.11808988952636719, 0.12173081970214844, 0.11780534362792969, 0.11770777893066406, 0.1175519027709961, 0.11805043029785156, 0.11799727630615234, 0.11803324890136718, 0.1182003173828125, 0.1178071060180664, 0.11884678649902344, 0.11787334442138672, 0.11803401947021484, 0.11783558654785156, 0.11794217681884765, 0.11789180755615235, 0.11782959747314453, 0.11760435485839844, 0.11802735900878907, 0.11764403533935547, 0.11848515319824218, 0.11810800170898438, 0.11830825805664062, 0.11804137420654297, 0.11859308624267578, 0.11891737365722656, 0.11817711639404296, 0.11851347351074219, 0.11820313262939453, 0.11832454681396484, 0.11928057861328124, 0.11876351928710938, 0.11782553863525391, 0.11776000213623047, 0.11753472137451172, 0.11731763458251954, 0.11748131561279297, 0.11799772644042969, 0.11755110168457031, 0.11727871704101563, 0.11853804779052735, 0.11772115325927734, 0.11763314819335938, 0.11832476806640625, 0.11780553436279297, 0.11808464050292969, 0.11778768157958984, 0.11805689239501953, 0.11818803405761719, 0.11781890869140625, 0.11993135833740234, 0.11802623748779296, 0.1176013412475586, 0.11791629028320312, 0.11773113250732421, 0.11832985687255859, 0.11757968139648438, 0.11859977722167969, 0.11765872192382812, 0.11911670684814453, 0.11800166320800781, 0.1181239013671875, 0.11790605163574219, 0.118443359375, 0.11837916564941406, 0.12113629150390624, 0.12208214569091796, 0.11804204559326172, 0.11812640380859375, 0.11804905700683593, 0.11788253021240234, 0.11768915557861329, 0.11825321960449219, 0.11783993530273437, 0.11867549133300781, 0.11804857635498046, 0.11730579376220703, 0.11777228546142578, 0.1177927703857422, 0.11838054656982422, 0.11806012725830078, 0.11918838500976563, 0.11975270080566407, 0.11883929443359376, 0.12051229095458985, 0.11921167755126953, 0.11856339263916016, 0.11937792205810546, 0.11937177276611328, 0.11948556518554687, 0.12029811096191406, 0.12012505340576173, 0.11931279754638671, 0.11946211242675782, 0.11984480285644532, 0.11897248077392578, 0.11954975891113281, 0.11919910430908204, 0.12191948699951172, 0.118614013671875, 0.11840921783447265, 0.11842259216308594, 0.11794322967529297, 0.1187143325805664, 0.11875321960449219, 0.11858684539794923, 0.11941542053222656, 0.11974655914306641, 0.1191178207397461, 0.11903180694580077, 0.11864268493652344, 0.11878310394287109, 0.12086873626708984, 0.11896096038818359, 0.1188025894165039, 0.11924845123291015, 0.11779033660888671, 0.11794723510742187, 0.11852931213378906, 0.11807129669189453, 0.11872454071044922, 0.11841110229492187, 0.1185330581665039, 0.11911373138427735, 0.11897650909423828, 0.11787673950195313, 0.11833929443359376, 0.11808386993408203, 0.11765139007568359, 0.12258719635009765, 0.11805081939697265, 0.11934925079345703, 0.1183726043701172, 0.12130252838134765, 0.11862860870361328, 0.11870207977294922, 0.11853823852539062, 0.11817481231689453, 0.12285635375976563, 0.11899440002441407, 0.11948086547851562, 0.1185660171508789, 0.1186569595336914, 0.11828934478759766, 0.11820435333251954, 0.11832940673828125, 0.11848073577880859, 0.12280345916748046, 0.12254710388183594, 0.12060467529296875, 0.11846451568603515, 0.11869593811035156, 0.11837644958496094, 0.11772927856445313, 0.1182918701171875, 0.1230198745727539, 0.11898194885253906, 0.11879033660888672, 0.11838310241699218, 0.11806924438476563, 0.11806022644042968, 0.11782902526855468, 0.11774323272705078, 0.12249715423583984, 0.11837117004394532, 0.11744041442871093, 0.11999852752685547, 0.11840697479248047, 0.11785852813720703, 0.11783561706542969, 0.11806674957275391, 0.11814720153808594, 0.12314192199707032, 0.11855532836914062, 0.11780451202392578, 0.11808399963378906, 0.12327715301513673, 0.11748585510253906, 0.11759584045410157, 0.11803475189208984, 0.12209667205810547, 0.11864163208007812, 0.1176657943725586, 0.11733952331542968, 0.11769305419921874, 0.11774566650390625, 0.1177474594116211, 0.11768787384033202, 0.11754678344726563, 0.1227047348022461, 0.11930630493164063, 0.11863859558105469, 0.1182218246459961, 0.1180579833984375, 0.11806924438476563, 0.11951222229003906, 0.11819721221923828, 0.11824114990234375, 0.1191546859741211, 0.11813887786865235, 0.11788902282714844, 0.11888435363769531, 0.11824336242675781, 0.11816915130615234, 0.11843382263183594, 0.118505859375, 0.1187221450805664, 0.11878441619873047, 0.11821209716796875, 0.11831721496582032, 0.11830716705322265, 0.11788082885742188, 0.11771699523925781, 0.11761376190185546, 0.11784185791015625, 0.11863046264648437, 0.11792396545410157, 0.12070368194580078, 0.11747718048095702, 0.11797727966308594, 0.11738521575927735, 0.11796889495849609, 0.11905843353271485, 0.12293119812011719]",tokens/s,8.418321419312091,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1129.312256,4944.953344,0.0,4542.431232,4484.571136,s,1,14.8475390625,14.8475390625,0.0,14.8475390625,14.8475390625,14.8475390625,14.8475390625,[14.8475390625],,kWh,0.00022843728737082072,2.5191184985873418e-05,7.687922817001258e-05,0.0003305077005267067,,MB,1437.097984,5488.115712,0.0,5073.010688,4884.617216,s,10,10.289482666015626,1.0289482666015624,0.00662603541614141,1.02908935546875,1.035421435546875,1.0366109252929687,1.0375625170898437,"[1.01443896484375, 1.021544677734375, 1.0272686767578125, 1.0273184814453125, 1.028142333984375, 1.030036376953125, 1.0378004150390625, 1.0351571044921875, 1.0344757080078124, 1.0332999267578125]",tokens/s,248.7977367856633,kWh,2.9916882178746766e-05,3.2981082258967346e-06,1.990423814559883e-05,5.3119228550242334e-05,tokens/kWh,4819347.09872273,MB,1459.273728,5504.892928,0.0,5087.690752,4884.619776,s,10,46.325497558593746,4.632549755859374,0.012953731310911036,4.6348486328125,4.64461953125,4.6486730468749995,4.651915859374999,"[4.6073330078125, 4.61697119140625, 4.62491015625, 4.62723486328125, 4.63360205078125, 4.63609521484375, 4.63936181640625, 4.64371875, 4.6435439453125, 4.6527265625]",tokens/s,13.599422201632242,kWh,0.00013603047826084018,1.5006067513893657e-05,9.012693321260035e-05,0.00024116347898733414,tokens/kWh,261233.58422486827,,s,630,46.322400482177756,0.07352761981298052,0.001601817255771196,0.07320217514038085,0.07417182083129882,0.07445948600769044,0.08496531227111817,"[0.08488909149169922, 0.07436524963378906, 0.07364857482910156, 0.07325312042236329, 0.072742431640625, 0.07275766754150391, 0.07272035217285157, 0.07280595397949219, 0.07274960327148437, 0.07272978973388672, 0.07274169921875, 0.0730091552734375, 0.07269347381591797, 0.0726935043334961, 0.07268611145019531, 0.07274700927734375, 0.0727509765625, 0.07278524780273438, 0.07278262329101562, 0.07274905395507812, 0.0727531509399414, 0.07279821014404297, 0.07285555267333985, 0.0732525405883789, 0.07284960174560547, 0.07302899169921875, 0.07291696166992187, 0.07278876495361328, 0.07280207824707031, 0.07280387115478516, 0.07281529235839844, 0.07279535675048829, 0.07278876495361328, 0.07281423950195312, 0.07286000061035157, 0.07290060424804687, 0.07287715148925782, 0.07284009552001953, 0.07285555267333985, 0.07288774108886718, 0.07284342193603516, 0.07408477020263672, 0.07353910064697265, 0.07320419311523438, 0.07285350036621094, 0.07283676910400391, 0.07291120147705078, 0.07290991973876954, 0.07294195556640624, 0.07290284729003907, 0.07293376159667969, 0.07293452453613282, 0.07291990661621094, 0.07289977264404297, 0.0729578857421875, 0.07293785858154297, 0.0728983383178711, 0.0729044189453125, 0.07295283508300782, 0.07297433471679687, 0.07296227264404297, 0.07291881561279297, 0.0732200927734375, 0.08481958770751953, 0.07445283508300782, 0.07370233917236328, 0.07324025726318359, 0.0727449951171875, 0.07272886657714844, 0.0727162857055664, 0.07267842864990234, 0.0727479019165039, 0.07271635437011718, 0.0727449951171875, 0.07276105499267578, 0.07276351928710938, 0.07278403472900391, 0.07287193298339843, 0.07306172943115234, 0.07338460540771484, 0.07380105590820313, 0.07341887664794922, 0.07275475311279297, 0.07273366546630859, 0.07279206085205078, 0.07278736114501953, 0.07314508819580078, 0.07278940582275391, 0.07281094360351563, 0.07283324432373046, 0.07284028625488281, 0.07282582092285156, 0.07289315032958985, 0.0730245132446289, 0.07410073852539062, 0.07344534301757813, 0.07312592315673828, 0.07283856201171875, 0.07282758331298828, 0.07280429077148437, 0.07284921264648438, 0.072864990234375, 0.07311798095703125, 0.07353411102294923, 0.07286399841308594, 0.07289561462402344, 0.07286233520507812, 0.07341862487792969, 0.07392182159423828, 0.0734417953491211, 0.07300137329101562, 0.07291426849365235, 0.07286415863037109, 0.07412754821777344, 0.07366860961914062, 0.07324671936035156, 0.07288832092285157, 0.07294134521484374, 0.07299651336669923, 0.07295033264160156, 0.07290828704833985, 0.0735257568359375, 0.07398553466796876, 0.0734909439086914, 0.07292851257324219, 0.0729464340209961, 0.08603398132324219, 0.07452540588378906, 0.07368819427490235, 0.07335997009277344, 0.07283299255371094, 0.07281053161621094, 0.07281228637695313, 0.07283888244628907, 0.07279465484619141, 0.07277056121826173, 0.07274188995361328, 0.07277318572998047, 0.07273312377929687, 0.0734534683227539, 0.0739247055053711, 0.0734755859375, 0.07284172821044922, 0.07275958251953125, 0.07275692749023438, 0.07407421112060547, 0.07350879669189453, 0.0731240005493164, 0.07279942321777344, 0.07281501007080078, 0.07279014587402344, 0.07273865509033203, 0.07279580688476563, 0.07282259368896485, 0.07277037048339843, 0.07307657623291015, 0.07402931213378906, 0.07346390533447265, 0.07299644470214844, 0.07407766723632812, 0.07351369476318359, 0.073198974609375, 0.0729791030883789, 0.07290879821777344, 0.07282892608642579, 0.07282892608642579, 0.07281980895996094, 0.07285043334960938, 0.07285116577148437, 0.07351634979248046, 0.07387974548339844, 0.07367654418945313, 0.07289670562744141, 0.0734621124267578, 0.07395164489746094, 0.07355152130126953, 0.07295011138916016, 0.07294780731201173, 0.07291817474365235, 0.07291776275634766, 0.07295292663574218, 0.07294454193115234, 0.07301529693603516, 0.07423545837402344, 0.07354412841796874, 0.07319964599609376, 0.07362147521972656, 0.0741560287475586, 0.07365631866455079, 0.08501404571533203, 0.07446761322021485, 0.073740478515625, 0.07335430145263672, 0.0727965087890625, 0.07270870208740235, 0.07271424102783203, 0.07275910186767579, 0.07273244476318359, 0.07278633880615235, 0.07271427154541016, 0.07271820831298828, 0.07268486022949219, 0.073253662109375, 0.07383049774169922, 0.07352921295166015, 0.07404956817626954, 0.07343647766113282, 0.07296275329589844, 0.07277740478515625, 0.07274345397949218, 0.07272630310058593, 0.07393807983398437, 0.07360393524169923, 0.07319142150878906, 0.07282217407226563, 0.07290326690673828, 0.07285350036621094, 0.07318553924560547, 0.0739400634765625, 0.0735382080078125, 0.07290803527832031, 0.07283366394042969, 0.07286707305908204, 0.07332899475097657, 0.07399177551269531, 0.07353635406494141, 0.07292691040039062, 0.07281705474853516, 0.07405721282958984, 0.07357286071777344, 0.07318319702148438, 0.07429737854003907, 0.07372697448730468, 0.07345069122314453, 0.07288742065429688, 0.07285625457763673, 0.07295999908447266, 0.07291398620605469, 0.07321670532226562, 0.07406960296630859, 0.07358326721191406, 0.0730684814453125, 0.0729969253540039, 0.07299686431884765, 0.07363545227050781, 0.07396784210205078, 0.07356022644042969, 0.07290985870361329, 0.07296063995361328, 0.07299721527099609, 0.07301119995117188, 0.07435878753662109, 0.08547129821777344, 0.07448985290527343, 0.07370547485351563, 0.0732805404663086, 0.0727286376953125, 0.07270195007324219, 0.07274588775634766, 0.07267123413085938, 0.07269993591308593, 0.07278307342529297, 0.07279488372802734, 0.07275071716308594, 0.07279603576660157, 0.07404927825927735, 0.0738414077758789, 0.07418870544433594, 0.07378339385986328, 0.0734516830444336, 0.07304585266113281, 0.07287967681884766, 0.07274950408935547, 0.07274870300292968, 0.07278217315673828, 0.07285759735107422, 0.07290470123291015, 0.07301308441162109, 0.07411427307128907, 0.07345657348632813, 0.07408992004394531, 0.07376713562011719, 0.07349897766113281, 0.07354777526855469, 0.07321600341796874, 0.07291289520263672, 0.07289647674560547, 0.07313516998291016, 0.07332963562011718, 0.07319961547851563, 0.07308067321777344, 0.07335747528076172, 0.07451446533203125, 0.07389734649658203, 0.07342960357666016, 0.07418895721435546, 0.07372345733642578, 0.07330441284179688, 0.07289440155029298, 0.0728453140258789, 0.07287142181396485, 0.07336505889892578, 0.07419999694824218, 0.07377241516113281, 0.07357430267333985, 0.07303855895996093, 0.07366451263427734, 0.07408025360107422, 0.07379350280761719, 0.07368851470947266, 0.07427337646484375, 0.07370108795166015, 0.07313436889648438, 0.07293746948242187, 0.07290265655517578, 0.08678176116943359, 0.07466476440429687, 0.07372412872314453, 0.07345257568359374, 0.07274163055419922, 0.07265280151367187, 0.0727162857055664, 0.0727388153076172, 0.07277568054199218, 0.07269964599609376, 0.0727267837524414, 0.07280976104736328, 0.07309347534179687, 0.07438934326171875, 0.0742896957397461, 0.07382220458984375, 0.07349862670898437, 0.07351686096191407, 0.07347116851806641, 0.07310028839111328, 0.07282688140869141, 0.07280550384521485, 0.07276976013183593, 0.07285228729248047, 0.07280006408691406, 0.07327337646484375, 0.07387570953369141, 0.07380512237548828, 0.0740315170288086, 0.07349251556396484, 0.07323347473144531, 0.07410556793212891, 0.07357603454589844, 0.07313846588134766, 0.07286396789550781, 0.072844482421875, 0.07283625793457031, 0.07293724822998048, 0.07285568237304688, 0.0741559066772461, 0.07361331176757813, 0.0732590103149414, 0.07417036437988281, 0.07378883361816406, 0.07410060882568359, 0.07358739471435546, 0.07313398742675781, 0.0731045150756836, 0.0729692153930664, 0.07295507049560547, 0.07300994873046875, 0.07343682861328125, 0.07398207855224609, 0.07370527648925781, 0.07347654724121094, 0.0742318115234375, 0.07366025543212891, 0.07366585540771485, 0.07424454498291015, 0.0737591323852539, 0.07323670196533204, 0.07294454193115234, 0.07300732421875, 0.08560435485839844, 0.07458611297607422, 0.07373197174072266, 0.073334716796875, 0.07272438049316406, 0.07272061157226563, 0.0727492446899414, 0.07275885009765624, 0.07276534271240234, 0.0727259521484375, 0.07276841735839844, 0.07274697875976563, 0.07363292694091797, 0.07478479766845703, 0.07415647888183594, 0.07372569274902344, 0.07345161437988282, 0.07342899322509766, 0.07345024108886719, 0.07303350067138673, 0.07278601837158204, 0.07279206085205078, 0.0728391342163086, 0.07283888244628907, 0.07290467071533203, 0.07403759765625, 0.07358787536621093, 0.07375347137451171, 0.07433987426757813, 0.07370591735839843, 0.07337920379638672, 0.07446492767333984, 0.07395222473144532, 0.07353343963623046, 0.07288435363769531, 0.07288591766357422, 0.07291641235351562, 0.07292108917236328, 0.07367369842529296, 0.0741495361328125, 0.07361142730712891, 0.07344322967529297, 0.07408035278320313, 0.07358287811279297, 0.07372128295898438, 0.07408268737792968, 0.07360297393798829, 0.07311347198486329, 0.07312992095947266, 0.07299228668212891, 0.07296681976318359, 0.07352896118164062, 0.07426815795898438, 0.07357721710205078, 0.0737485122680664, 0.07418492889404296, 0.0736065902709961, 0.07386771392822265, 0.07404716491699219, 0.07364173126220704, 0.07302371215820312, 0.07297264099121094, 0.07302492523193359, 0.08499644470214844, 0.07464620971679688, 0.07379212951660157, 0.07348668670654297, 0.07280461120605469, 0.07278157043457031, 0.07273878479003906, 0.07268966674804687, 0.07274502563476562, 0.07277359771728516, 0.0727715835571289, 0.07400265502929687, 0.0735250244140625, 0.07410185241699219, 0.07482470703125, 0.07487062072753906, 0.07358477020263672, 0.07334015655517578, 0.07285810852050781, 0.07285369873046875, 0.07404764556884766, 0.07357360076904297, 0.07316738891601562, 0.07281375885009765, 0.07282371520996093, 0.07303372955322265, 0.07416143798828125, 0.07393762969970703, 0.07454310607910156, 0.07405567932128906, 0.07376691436767578, 0.07303577423095703, 0.07291725158691406, 0.07318038177490234, 0.07400847625732422, 0.07352925109863281, 0.07298735809326172, 0.07292076873779296, 0.07300048065185546, 0.07422441864013672, 0.07383385467529296, 0.07421014404296875, 0.07363565063476563, 0.07360208129882813, 0.07353030395507812, 0.0733016357421875, 0.07356454467773438, 0.0740126724243164, 0.07358668518066407, 0.07291903686523438, 0.07291693115234375, 0.07365382385253906, 0.07395926666259765, 0.07364630126953126, 0.07364857482910156, 0.07401062774658203, 0.0735432357788086, 0.07358303833007812, 0.07396332550048829, 0.07369939422607422, 0.07308937835693359, 0.07352646636962891, 0.07404399871826171, 0.08476611328125, 0.07435903930664063, 0.07362185668945312, 0.07328553771972657, 0.07294371032714844, 0.07282278442382813, 0.07280652618408204, 0.072746337890625, 0.07272297668457031, 0.07272857666015625, 0.07290675354003906, 0.07280230712890624, 0.07390035247802734, 0.07549059295654297, 0.07430799865722656, 0.07454662322998047, 0.07394579315185547, 0.0734637451171875, 0.07289644622802735, 0.07283609771728515, 0.07292825317382813, 0.07284326171875, 0.07281868743896484, 0.07282268524169921, 0.07290070343017578, 0.07367475128173828, 0.07432707214355469, 0.0739949722290039, 0.07490790557861328, 0.07419187164306641, 0.07357283020019531, 0.07320015716552734, 0.07292105865478515, 0.07282460784912109, 0.07284716796875, 0.07288262176513671, 0.07285775756835937, 0.07295900726318359, 0.07457625579833985, 0.07398812866210938, 0.07387904357910156, 0.07447376251220703, 0.07430582427978516, 0.07394751739501954, 0.07363990020751954, 0.07360934448242187, 0.07360704040527344, 0.07317830657958985, 0.07293421173095703, 0.07293660736083984, 0.07294857788085937, 0.07333273315429688, 0.07435775756835937, 0.07452365112304687, 0.07438294219970704, 0.07401123046875, 0.07372383880615234, 0.07367052459716797, 0.07367884826660157, 0.073793212890625, 0.07362387084960938, 0.07360307312011719, 0.07314173126220704, 0.0854480972290039, 0.07432879638671876, 0.07367014312744141, 0.07335539245605469, 0.0728062744140625, 0.07276377868652344, 0.07291670227050781, 0.07276723480224609, 0.07273948669433594, 0.07312358093261719, 0.07340460968017579, 0.0728371810913086, 0.07404118347167969, 0.07482998657226562, 0.07448735809326172, 0.0742504653930664, 0.07415216064453126, 0.07385906982421875, 0.07340163421630859, 0.07401955413818359, 0.07360717010498047, 0.073197021484375, 0.07408284759521484, 0.07358464050292969, 0.07313203430175781, 0.0741171875, 0.07361519622802734, 0.07316015625, 0.07337868499755859, 0.07408614349365235, 0.07357357025146484, 0.073735107421875, 0.07395667266845703, 0.0735544662475586, 0.07412099456787109, 0.073609375, 0.07321929931640625, 0.0741119384765625, 0.07359180450439454, 0.07317190551757813, 0.07467826843261718, 0.07367401885986329, 0.0732916488647461, 0.07339299011230468, 0.0741212158203125, 0.07356845092773437, 0.07351251220703126, 0.07425459289550781, 0.07362076568603515, 0.07333657836914062, 0.0745063705444336, 0.07382921600341796, 0.07340207672119141, 0.0742314224243164, 0.07358057403564452, 0.0731095962524414, 0.07333907318115235, 0.07419939422607422, 0.07369219207763672, 0.07388361358642578, 0.07422383880615234, 0.07362435150146485, 0.07352249908447266]",tokens/s,13.600331447468678,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,14695.358464,7853.703168,0.0,7451.181056,7445.507072,s,1,32.504814453125,32.504814453125,0.0,32.504814453125,32.504814453125,32.504814453125,32.504814453125,[32.504814453125],,kWh,0.0007451544039333081,8.218871593631659e-05,0.000255848260234004,0.0010831913801036285,,MB,1434.308608,8004.698112,0.0,7581.20448,7570.843648,s,10,1.2741542739868164,0.12741542739868164,0.0010960168902025057,0.12754068756103515,0.12856558685302735,0.12861909713745118,0.12866190536499023,"[0.12818144226074218, 0.12855369567871094, 0.1277715835571289, 0.12648799896240234, 0.12679122924804687, 0.128672607421875, 0.1273097915649414, 0.12687862396240235, 0.12848538208007812, 0.12502191925048828]",tokens/s,2009.175852771568,kWh,3.7281525262657736e-06,4.1115019253871045e-07,2.471200992430527e-06,6.610503711235011e-06,tokens/kWh,38726247.07326163,MB,1456.259072,8017.281024,0.0,7593.787392,7514.46784,s,10,75.77573535156252,7.57757353515625,0.03393330820249127,7.580833984375,7.61707626953125,7.62138994140625,7.624840878906251,"[7.62570361328125, 7.61611767578125, 7.59520703125, 7.5763017578125, 7.59208349609375, 7.576779296875, 7.584888671875, 7.57276025390625, 7.5171005859375, 7.51879296875]",tokens/s,8.31400707729337,kWh,0.0002207612952262328,2.435103414717294e-05,0.00010075057602936972,0.0003458629054027755,tokens/kWh,182153.09886046668,,s,630,75.7725807037353,0.12027393762497673,0.001286677127173258,0.12018638610839844,0.12127783203125,0.12209627723693847,0.12554023155212402,"[0.11987862396240234, 0.12045011138916016, 0.12017955017089844, 0.12055757141113281, 0.12041216278076172, 0.12073062133789063, 0.12011417388916015, 0.12138905334472656, 0.12013164520263672, 0.12022777557373047, 0.12021884918212891, 0.1206976318359375, 0.1202503662109375, 0.12092825317382813, 0.1205923843383789, 0.12110201263427735, 0.12028345489501953, 0.12345670318603516, 0.12046601867675781, 0.12087113952636719, 0.12051455688476563, 0.13022976684570312, 0.1239334716796875, 0.12042211151123047, 0.12081129455566406, 0.12024630737304688, 0.12061305236816407, 0.1205860824584961, 0.12055484771728515, 0.1200914535522461, 0.1223720932006836, 0.12040601348876953, 0.12082937622070312, 0.12044086456298828, 0.12002973175048828, 0.12001894378662109, 0.1208031997680664, 0.12082803344726563, 0.1209325408935547, 0.12037251281738282, 0.12413977813720703, 0.12074626922607422, 0.12097090911865234, 0.12046969604492187, 0.1206580810546875, 0.12035072326660157, 0.121065185546875, 0.12120230102539062, 0.12023430633544922, 0.12125628662109375, 0.12060892486572265, 0.12028707122802734, 0.12053427124023437, 0.12092066955566406, 0.12210511779785156, 0.12437206268310547, 0.12116764831542969, 0.12140953826904297, 0.12062249755859375, 0.12050492858886719, 0.12122930908203125, 0.12103065490722656, 0.12151103973388672, 0.1195145263671875, 0.12005235290527344, 0.12001065826416016, 0.12028854370117187, 0.11983955383300782, 0.1216447982788086, 0.1208198699951172, 0.12092211151123047, 0.12028518676757813, 0.12128050994873046, 0.12065177917480469, 0.12018278503417969, 0.12048793792724609, 0.12039679718017578, 0.12066099548339844, 0.12073779296875, 0.12043264007568359, 0.12004351806640624, 0.12194927978515625, 0.12092713928222656, 0.12085411071777344, 0.12085289764404297, 0.12048588562011718, 0.12085257720947265, 0.120270751953125, 0.12010486602783203, 0.12176541137695313, 0.12043321228027344, 0.12046896362304688, 0.1212093734741211, 0.12107488250732422, 0.1220513916015625, 0.12148636627197265, 0.12128355407714844, 0.12185785675048828, 0.12109593963623047, 0.12585779571533204, 0.12122361755371094, 0.1213248291015625, 0.12105801391601563, 0.12759449768066405, 0.121059326171875, 0.12077260589599609, 0.12023808288574218, 0.12011257934570313, 0.12015673828125, 0.12040396881103516, 0.12048790740966797, 0.12063337707519531, 0.12061695861816406, 0.1200005111694336, 0.12028518676757813, 0.1201065902709961, 0.1210433578491211, 0.12015821075439453, 0.11954994964599609, 0.12035481262207032, 0.12045721435546874, 0.12033638763427734, 0.12382335662841797, 0.12025923156738282, 0.12042256164550781, 0.1201858901977539, 0.1194250259399414, 0.11951254272460937, 0.11961398315429687, 0.11939862060546876, 0.12089699554443359, 0.12053705596923828, 0.12063772583007812, 0.12009273529052734, 0.1196131820678711, 0.119838623046875, 0.12014425659179688, 0.11944956970214844, 0.12076236724853516, 0.12003097534179688, 0.12023996734619141, 0.12016476440429688, 0.12001074981689454, 0.12042406463623047, 0.1213031005859375, 0.12087123107910157, 0.12125593566894531, 0.12042205047607422, 0.12056816101074219, 0.12070272064208984, 0.12162493133544922, 0.12123104095458985, 0.12076054382324218, 0.1200202865600586, 0.12098387145996094, 0.11991702270507812, 0.12025142669677734, 0.12112973022460938, 0.12048601531982422, 0.12074188995361328, 0.12067750549316406, 0.12022259521484376, 0.1203016357421875, 0.12114717102050782, 0.12019728088378906, 0.12029337310791016, 0.12013299560546875, 0.12041471862792968, 0.12131545257568359, 0.12079718780517579, 0.11971193695068359, 0.12019647979736328, 0.11969376373291016, 0.11978857421875, 0.12021654510498046, 0.11996288299560547, 0.12229299163818359, 0.12231680297851563, 0.12011315155029297, 0.12088524627685547, 0.12030361938476562, 0.12347821044921875, 0.12045113372802735, 0.12052249908447266, 0.12035481262207032, 0.12557698822021485, 0.12082575988769531, 0.12008796691894531, 0.11956521606445313, 0.1192344970703125, 0.11956192016601562, 0.1199632339477539, 0.12043746948242187, 0.11963801574707031, 0.11940630340576172, 0.11930038452148438, 0.11981330871582031, 0.11974329376220703, 0.12055757141113281, 0.11959705352783204, 0.1206783676147461, 0.11999644470214844, 0.11989810943603516, 0.12136592102050782, 0.12021820831298828, 0.11996585845947266, 0.12018994903564453, 0.12047459411621093, 0.12011824035644532, 0.1202441635131836, 0.12042134094238281, 0.11917107391357422, 0.11951103973388671, 0.11954895782470704, 0.11999139404296875, 0.1199205093383789, 0.12005171203613281, 0.12046329498291015, 0.12061443328857421, 0.11964470672607422, 0.11959295654296875, 0.1192440643310547, 0.1195584945678711, 0.12004185485839844, 0.12028543853759766, 0.1214169921875, 0.12222271728515625, 0.12087852478027344, 0.12053091430664062, 0.1200997772216797, 0.12221644592285157, 0.12008972930908203, 0.12149235534667968, 0.12063744354248047, 0.12018688201904297, 0.12103180694580078, 0.11992153930664062, 0.11960066986083985, 0.11958319854736328, 0.1202391357421875, 0.12051554870605469, 0.12077260589599609, 0.12208547210693359, 0.12108364868164062, 0.1204266586303711, 0.12060652923583984, 0.12076774597167969, 0.12007315063476562, 0.12038329315185547, 0.11987782287597656, 0.12077225494384766, 0.12002738952636718, 0.11863526153564453, 0.11898201751708984, 0.11972201538085937, 0.12019356536865235, 0.11987763214111329, 0.11991881561279297, 0.12018870544433594, 0.11949215698242187, 0.1195032958984375, 0.11970150756835937, 0.12081356811523437, 0.11958425903320312, 0.12055187225341797, 0.11980579376220703, 0.12469475555419922, 0.12116957092285156, 0.1204975357055664, 0.11988272094726563, 0.1195849609375, 0.1202594223022461, 0.12334998321533203, 0.12017052459716797, 0.12059795379638671, 0.1208918685913086, 0.11993446350097656, 0.12000240325927734, 0.12015894317626953, 0.12053913879394532, 0.1204591064453125, 0.12027305603027344, 0.12059648132324219, 0.12084786987304688, 0.11989859008789062, 0.1214807357788086, 0.12026112365722656, 0.12020454406738282, 0.12037811279296876, 0.1219583969116211, 0.12017664337158203, 0.12063772583007812, 0.12058370971679687, 0.11991840362548828, 0.12471129608154297, 0.119642333984375, 0.11956816101074219, 0.12012134552001953, 0.11984207916259766, 0.12031459045410156, 0.12262108612060547, 0.1208903045654297, 0.1203969268798828, 0.12000950622558594, 0.11994886779785156, 0.120244384765625, 0.12036329650878906, 0.12088524627685547, 0.12119449615478516, 0.12085453033447266, 0.12047769927978516, 0.12013369750976563, 0.12038751983642579, 0.12061901092529297, 0.12216422271728515, 0.1197574691772461, 0.11973216247558593, 0.11994732666015626, 0.11981414031982422, 0.11883328247070313, 0.11968889617919921, 0.12000070190429687, 0.12013382720947266, 0.12067148590087891, 0.1200830078125, 0.12010495758056641, 0.11985708618164062, 0.12002925109863281, 0.12049116516113281, 0.12019593811035156, 0.11992034912109376, 0.12024777221679687, 0.12021228790283203, 0.11968064117431641, 0.1199593276977539, 0.11937852478027344, 0.11979174041748047, 0.11990573120117187, 0.12013423919677735, 0.120321533203125, 0.12026089477539062, 0.1198512954711914, 0.119783203125, 0.12013158416748047, 0.12198313903808594, 0.12097110748291015, 0.12039933013916015, 0.11949520111083985, 0.12529417419433594, 0.12089590454101562, 0.12028237152099609, 0.121697021484375, 0.11983859252929688, 0.119607421875, 0.12024118041992188, 0.12016047668457032, 0.12100054168701171, 0.12012281799316406, 0.11941961669921874, 0.11918131256103516, 0.11981951904296875, 0.11988642883300782, 0.11980524444580078, 0.11972898864746094, 0.12004351806640624, 0.12019097900390625, 0.1203936996459961, 0.12179254150390625, 0.12072755432128907, 0.12051776123046876, 0.12035775756835937, 0.12046131134033203, 0.1207437744140625, 0.12064374542236328, 0.12023104095458985, 0.12016079711914063, 0.12007167816162109, 0.12136441802978516, 0.11998703765869141, 0.12040601348876953, 0.12048992156982422, 0.11985679626464844, 0.11919197082519531, 0.1192591323852539, 0.11961305236816407, 0.11970310211181641, 0.11950070190429687, 0.11979663848876954, 0.12041011047363281, 0.12103641510009766, 0.12030828857421876, 0.12022322845458984, 0.1194796142578125, 0.12025711822509766, 0.12036563110351563, 0.12134130859375, 0.12062911987304688, 0.12024797058105469, 0.12015302276611328, 0.1197973403930664, 0.11974848175048829, 0.1194706268310547, 0.1200552978515625, 0.11974297332763671, 0.12009439849853516, 0.12006444549560547, 0.12031986999511719, 0.11978684997558593, 0.1197410888671875, 0.1197709732055664, 0.11948457336425782, 0.11979293060302734, 0.12009974670410156, 0.1205401611328125, 0.1208631362915039, 0.12076483154296876, 0.1200025634765625, 0.11943465423583985, 0.11996835327148438, 0.12041011047363281, 0.12000870513916016, 0.12017561340332031, 0.12103884887695313, 0.12450918579101562, 0.11989952087402343, 0.11962579345703125, 0.12010105895996094, 0.12667533111572266, 0.12030361938476562, 0.12008038330078125, 0.12449791717529297, 0.12131267547607422, 0.12028800201416015, 0.12028240203857422, 0.12063533020019532, 0.12048838043212891, 0.12091410827636719, 0.12063455963134766, 0.12065670776367188, 0.12029337310791016, 0.11995661163330078, 0.11910959625244141, 0.1195025634765625, 0.11972351837158203, 0.11997267150878907, 0.12050016021728516, 0.12015827178955078, 0.12021759796142578, 0.12133171081542969, 0.12040396881103516, 0.11990863800048829, 0.11984425354003907, 0.11995782470703124, 0.12029542541503906, 0.12056185913085937, 0.12023612976074219, 0.11950796508789062, 0.12407830047607422, 0.11924320220947265, 0.11951725006103515, 0.11939820861816407, 0.12024217224121093, 0.12048118591308593, 0.12059318542480468, 0.11981362915039062, 0.11945011138916016, 0.11921817779541016, 0.11972627258300782, 0.11980182647705079, 0.12039766693115235, 0.12065993499755859, 0.12071702575683593, 0.12089071655273438, 0.12007113647460937, 0.1199595184326172, 0.12014390563964844, 0.12018278503417969, 0.12139110565185547, 0.12027606201171875, 0.11998505401611329, 0.12000665283203125, 0.11954380798339843, 0.11939430236816406, 0.11919347381591797, 0.11934518432617187, 0.11970095825195312, 0.11981651306152344, 0.12151548767089844, 0.12048185729980469, 0.12011599731445312, 0.12031180572509766, 0.12020326232910156, 0.12022930908203125, 0.12024700927734375, 0.12045638275146485, 0.12185596466064454, 0.12099158477783203, 0.12046217346191407, 0.12002098846435547, 0.1199570541381836, 0.1202806396484375, 0.12039462280273437, 0.119947265625, 0.12045311737060548, 0.11973283386230468, 0.12000665283203125, 0.11965408325195312, 0.11948268890380859, 0.12867788696289062, 0.12014316558837891, 0.11879023742675782, 0.127344482421875, 0.11918617248535156, 0.11881865692138673, 0.11881488037109375, 0.11842912292480469, 0.1187149429321289, 0.11855257415771485, 0.11876112365722656, 0.11883926391601562, 0.11932505798339844, 0.1186344985961914, 0.11832505798339844, 0.11829488372802735, 0.11854617309570313, 0.1180584945678711, 0.11812866973876954, 0.11879481506347657, 0.11864678192138672, 0.11946534729003906, 0.12271065521240235, 0.11884544372558593, 0.11857081604003906, 0.11915628814697266, 0.11859008026123047, 0.11866278076171875, 0.1187720947265625, 0.11985100555419922, 0.11902713775634766, 0.11905696105957031, 0.11909324645996094, 0.11899827575683594, 0.11890764617919922, 0.118181884765625, 0.11875516510009766, 0.12230643463134766, 0.11927350616455078, 0.1188001937866211, 0.11851763153076172, 0.11864031982421876, 0.11886243438720703, 0.11893977355957032, 0.11845024108886719, 0.11951522827148438, 0.11955120086669922, 0.11918211364746094, 0.11843507385253907, 0.11785929870605469, 0.11874102020263672, 0.12082879638671876, 0.11888642883300782, 0.11927843475341797, 0.11921775817871094, 0.11889295959472657, 0.11898035430908203, 0.11804083251953125, 0.11826175689697266, 0.11732546997070313, 0.11773174285888671, 0.11868473815917968, 0.11848588562011719, 0.11829561614990235, 0.11813779449462891, 0.11812025451660156, 0.11861625671386719, 0.11865644836425782, 0.1188787841796875, 0.11959629058837891, 0.11960713958740235, 0.11899513244628906, 0.1189191665649414, 0.11858815765380859, 0.11888829040527343, 0.11878179168701172, 0.11849449920654297, 0.11889965057373048, 0.11930335998535156, 0.11879484558105469, 0.11844432067871094, 0.11827597045898437, 0.11837452697753906, 0.12127753448486328, 0.11923244476318359, 0.11883004760742187, 0.12545024108886718, 0.12044493103027344, 0.11969945526123046, 0.11972969818115234, 0.12026656341552734, 0.11961583709716797, 0.11950726318359375, 0.11944960021972656, 0.12079923248291016, 0.1185269775390625, 0.11922943878173828, 0.11867135620117188, 0.11881676483154296, 0.11881683349609375, 0.11869792175292969, 0.11846060943603516, 0.11921302032470703, 0.12088829040527344, 0.12062652587890625, 0.12288787078857422, 0.11957286071777344, 0.11929440307617188, 0.11948035430908203, 0.11947132873535156, 0.11908809661865234, 0.11943411254882813, 0.1195836181640625, 0.11964387512207031, 0.1197910385131836, 0.11926211547851563, 0.11927299499511719, 0.12003907012939453, 0.11914323425292969, 0.12021756744384765, 0.119140380859375, 0.12001280212402343]",tokens/s,8.31435321522503,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,877.969408,6477.971456,0.0,6075.449344,6044.13184,s,1,16.382861328125,16.382861328125,0.0,16.382861328125,16.382861328125,16.382861328125,16.382861328125,[16.382861328125],,kWh,0.00022474943417499842,2.478418804404335e-05,7.37447812180006e-05,0.0003232784034370424,,MB,1413.402624,7002.259456,0.0,6587.154432,6469.997056,s,10,10.500339721679687,1.0500339721679688,0.004072959387086454,1.0505767822265626,1.0533483520507811,1.0546319396972657,1.055658809814453,"[1.039845458984375, 1.0480531005859375, 1.048997802734375, 1.0487259521484376, 1.050087890625, 1.0527032470703126, 1.0518819580078125, 1.051065673828125, 1.0530631103515624, 1.05591552734375]",tokens/s,243.80163574274238,kWh,3.0558921268748234e-05,3.370068941438215e-06,2.0278099555800373e-05,5.420708976598683e-05,tokens/kWh,4722629.4771617055,MB,1438.072832,7014.842368,0.0,6599.737344,6469.999616,s,10,50.35640283203124,5.035640283203125,0.010476999126782918,5.040569580078126,5.045694384765625,5.048056665039062,5.049946489257812,"[5.02327392578125, 5.02011328125, 5.0209091796875, 5.0302431640625, 5.041775390625, 5.04192529296875, 5.03936376953125, 5.04321044921875, 5.04516943359375, 5.0504189453125]",tokens/s,12.510822151086272,kWh,0.00014741810006583405,1.6261426392409403e-05,9.773055040659881e-05,0.00026141007686484224,tokens/kWh,241000.65596389808,,s,630,50.35390933990483,0.07992684022207108,0.0016263087462490171,0.07978440093994141,0.08073017425537109,0.08100840187072754,0.0907986516571045,"[0.09009795379638671, 0.07983679962158204, 0.07913139343261719, 0.07940096282958985, 0.07815782165527344, 0.07822940826416015, 0.0782193603515625, 0.078220703125, 0.07957769775390625, 0.07882659149169922, 0.07983197021484376, 0.08048767852783204, 0.08144767761230469, 0.08093901062011719, 0.08079154968261719, 0.08019149017333985, 0.07952384185791016, 0.07915110778808594, 0.079005859375, 0.07858966064453125, 0.07826377868652344, 0.07829106903076172, 0.07826057434082032, 0.07906118774414063, 0.07957504272460937, 0.07974419403076172, 0.07996720123291015, 0.07966089630126953, 0.07918348693847656, 0.0800475845336914, 0.07966572570800781, 0.07997679901123046, 0.07953350067138672, 0.08033302307128906, 0.08031407928466797, 0.0803623046875, 0.07994963073730468, 0.0795994873046875, 0.079312255859375, 0.07915801239013671, 0.0786961898803711, 0.07883392333984375, 0.07956275177001954, 0.07912652587890626, 0.07924726104736328, 0.07979769897460938, 0.07948070526123047, 0.0796363525390625, 0.07944694519042969, 0.07950335693359376, 0.07945996856689454, 0.07953548431396484, 0.0791800308227539, 0.0799176025390625, 0.07961417388916016, 0.08055385589599609, 0.07976080322265625, 0.08048108673095702, 0.08061532592773438, 0.08028470611572265, 0.08044847869873047, 0.08010079956054687, 0.07981728363037109, 0.09040691375732422, 0.0797610855102539, 0.07901830291748047, 0.07853782653808594, 0.07948790740966796, 0.07969586944580079, 0.0803265609741211, 0.07986585235595703, 0.07914710235595702, 0.07866770935058594, 0.07820889282226562, 0.07871084594726563, 0.07962582397460938, 0.07911068725585937, 0.07914701080322266, 0.07920845031738281, 0.07920025634765625, 0.07917183685302734, 0.07912793731689453, 0.07920883178710937, 0.07929792022705077, 0.07991910552978515, 0.07993801879882813, 0.0795792007446289, 0.08020182037353515, 0.0802467498779297, 0.08029801940917969, 0.07980409240722657, 0.07908924865722657, 0.07864940643310547, 0.07845935821533204, 0.0797984619140625, 0.07912857818603515, 0.07913881683349609, 0.07978803253173829, 0.07934976196289062, 0.07927808380126954, 0.07933132934570312, 0.07939385223388672, 0.07943023681640625, 0.07934307098388672, 0.07934239959716796, 0.07907743835449219, 0.08021520233154297, 0.07974950408935547, 0.08026963043212891, 0.08020543670654297, 0.08066063690185547, 0.08035775756835938, 0.08039337921142578, 0.08001417541503907, 0.08047615814208985, 0.07976691436767579, 0.07922525024414062, 0.0788005142211914, 0.07908207702636719, 0.07964672088623047, 0.07915110778808594, 0.07935590362548828, 0.08024269104003906, 0.07956438446044922, 0.07960368347167969, 0.07954886627197266, 0.0909136962890625, 0.07975030517578124, 0.07904700469970703, 0.07863346862792969, 0.07813680267333985, 0.0781542739868164, 0.07813894653320312, 0.07817670440673828, 0.07852256011962891, 0.07825785827636719, 0.07970623779296875, 0.0811021728515625, 0.0820898208618164, 0.08072480010986328, 0.08011571502685547, 0.07954227447509765, 0.07963775634765625, 0.0793094711303711, 0.08006665802001953, 0.08013414764404297, 0.08029103851318359, 0.07992985534667969, 0.07916777801513672, 0.07878041839599609, 0.07830118560791016, 0.07910118103027344, 0.07952051544189453, 0.07923856353759766, 0.07929007720947266, 0.07934649658203125, 0.07928793334960937, 0.0796626205444336, 0.080163330078125, 0.07976796722412109, 0.07927808380126954, 0.07892889404296875, 0.07864217376708985, 0.08010342407226563, 0.07989862060546875, 0.08035737609863282, 0.08026515197753906, 0.08032588958740235, 0.08036831665039063, 0.07983731079101562, 0.07996131134033203, 0.07966799926757813, 0.07933539581298828, 0.07894348907470704, 0.07899830627441407, 0.07982508850097657, 0.0793268814086914, 0.07991670227050782, 0.07929523468017578, 0.07986764526367188, 0.07938585662841798, 0.0797330551147461, 0.07946694183349609, 0.07943539428710937, 0.0794361572265625, 0.07958649444580078, 0.07923795318603516, 0.07938422393798829, 0.07985587310791016, 0.09231692504882813, 0.07994854736328125, 0.07913369750976562, 0.07868927764892578, 0.07813938903808594, 0.07815068817138672, 0.0781055679321289, 0.07957504272460937, 0.07924121856689453, 0.0800747528076172, 0.08013961791992187, 0.07988108825683594, 0.0813175048828125, 0.08137248229980469, 0.080851806640625, 0.07975116729736328, 0.07903641510009765, 0.07857376098632812, 0.07821907043457031, 0.07826214599609375, 0.07862035369873047, 0.07943212890625, 0.07892787170410157, 0.07948393249511719, 0.07995254516601563, 0.08011766052246094, 0.07986217498779297, 0.07991897583007812, 0.07975030517578124, 0.07978697967529297, 0.07913235473632813, 0.07980473327636718, 0.07949517059326172, 0.08015462493896484, 0.08021334075927734, 0.08054589080810547, 0.0803904037475586, 0.08030035400390625, 0.07974015808105468, 0.08002336120605469, 0.07931737518310547, 0.07888060760498047, 0.07929654693603516, 0.07972319793701171, 0.07903437042236328, 0.07937433624267579, 0.0798427505493164, 0.07963091278076172, 0.0799101104736328, 0.07971228790283204, 0.07943836975097657, 0.07943523406982422, 0.07975593566894532, 0.07969213104248046, 0.07927085113525391, 0.08001824188232422, 0.07915853118896485, 0.08049238586425782, 0.0800551986694336, 0.08063385772705078, 0.08030374145507813, 0.0804389419555664, 0.08012374114990234, 0.09141382598876953, 0.08002185821533203, 0.08014396667480468, 0.07872137451171875, 0.07835276794433593, 0.07822688293457031, 0.07821539306640625, 0.07954783630371094, 0.07968566131591796, 0.07981759643554688, 0.07973241424560547, 0.08091481781005859, 0.08248928070068359, 0.08135014343261719, 0.08002611541748048, 0.07973897552490235, 0.07972054290771484, 0.08009900665283202, 0.07978182220458985, 0.08003398132324219, 0.07923436737060546, 0.07880537414550781, 0.07829535675048828, 0.07951097869873047, 0.08021868896484376, 0.07990006256103516, 0.0799115219116211, 0.07994496154785156, 0.07981542205810546, 0.07931257629394531, 0.07981702423095703, 0.0798678741455078, 0.07914035034179688, 0.07868470764160156, 0.07869235229492187, 0.08028364562988281, 0.08030604553222656, 0.08019558715820313, 0.07999225616455079, 0.08033145904541016, 0.08025007629394532, 0.08028189086914063, 0.08121218872070313, 0.08032431793212891, 0.0807936019897461, 0.07995590209960937, 0.08093292999267578, 0.08037737274169922, 0.07993801879882813, 0.07954431915283203, 0.07938623809814453, 0.07976383972167969, 0.07957708740234375, 0.07993753814697266, 0.08020169830322266, 0.0799185562133789, 0.079919677734375, 0.08022637176513672, 0.0799201889038086, 0.07936019134521484, 0.080312255859375, 0.0798686752319336, 0.0792063980102539, 0.09174160003662109, 0.07967603302001953, 0.07965692901611328, 0.07970390319824219, 0.08052662658691406, 0.0801370849609375, 0.07929264068603516, 0.07885318756103515, 0.07823798370361328, 0.07822354888916015, 0.07820057678222657, 0.07970444488525391, 0.08125657653808593, 0.08053555297851563, 0.07988396453857421, 0.07951801300048827, 0.07964262390136718, 0.07960364532470703, 0.07969554901123047, 0.07913446044921875, 0.07857392120361328, 0.0796690902709961, 0.07980841827392578, 0.0798274917602539, 0.0807520980834961, 0.0814966049194336, 0.08076841735839843, 0.08079363250732421, 0.08022688293457031, 0.08075059509277344, 0.08001945495605468, 0.07939686584472656, 0.07903782653808594, 0.07852611541748047, 0.07960800170898437, 0.07940995025634766, 0.08020095825195313, 0.08016070556640625, 0.0800483856201172, 0.08010604858398437, 0.08002559661865234, 0.08002355194091797, 0.0799005126953125, 0.07917788696289063, 0.07884390258789062, 0.07922278594970703, 0.07972589111328125, 0.07949954986572266, 0.08042720031738282, 0.07976163482666015, 0.08033894348144531, 0.08038604736328125, 0.08007679748535156, 0.08026521301269532, 0.07992272186279296, 0.07931910705566406, 0.07984783935546876, 0.08001945495605468, 0.08007884979248046, 0.08112102508544922, 0.08041260528564453, 0.08043730926513672, 0.08044786834716797, 0.0905169906616211, 0.07988098907470703, 0.07905814361572265, 0.07854156494140625, 0.078127197265625, 0.07993539428710937, 0.08004521942138672, 0.08055232238769532, 0.07952166748046875, 0.07885798645019532, 0.07877235412597657, 0.07993417358398437, 0.08122492980957031, 0.0801794204711914, 0.07962461090087891, 0.07893622589111328, 0.07850393676757812, 0.07894425964355468, 0.07940096282958985, 0.08001757049560547, 0.07966057586669922, 0.07890946960449219, 0.0787561264038086, 0.08002515411376954, 0.08090672302246094, 0.08028924560546875, 0.08001792144775391, 0.0799676513671875, 0.0796760025024414, 0.07985574340820313, 0.0801923828125, 0.08081455993652344, 0.07990735626220703, 0.07950745391845702, 0.07902108764648437, 0.07953314971923828, 0.08015039825439453, 0.07985123443603516, 0.08022045135498047, 0.0797470703125, 0.07919849395751953, 0.07940681457519531, 0.07957504272460937, 0.07987200164794922, 0.07989475250244141, 0.07920822143554687, 0.07910195159912109, 0.07983417510986328, 0.08016108703613281, 0.0800540771484375, 0.08036637115478516, 0.08010099029541015, 0.07943939208984375, 0.07945075225830078, 0.08021836853027343, 0.0809717788696289, 0.0801976318359375, 0.08017305755615234, 0.08043724822998047, 0.08026432037353516, 0.08085366058349609, 0.0809969253540039, 0.081751708984375, 0.09186930847167969, 0.07993417358398437, 0.07917913818359375, 0.07864588928222656, 0.07821721649169922, 0.07819667053222656, 0.0795282211303711, 0.07971331024169923, 0.07975011444091797, 0.08005814361572265, 0.08011878204345703, 0.08027852630615234, 0.08190060424804688, 0.08132608032226563, 0.08101779174804688, 0.08007679748535156, 0.07911587524414063, 0.07872486114501953, 0.07832233428955078, 0.07956291198730468, 0.07890927886962891, 0.07869347381591797, 0.07953910064697266, 0.08014854431152343, 0.08002265930175781, 0.0804644775390625, 0.08047433471679688, 0.08017100524902344, 0.07976959991455078, 0.0789749755859375, 0.07875993347167969, 0.0800747528076172, 0.07989615631103515, 0.08021033477783203, 0.08095750427246094, 0.08083990478515625, 0.0807279052734375, 0.08086208343505859, 0.08145340728759766, 0.08090156555175781, 0.0799453125, 0.08009769439697266, 0.07968380737304688, 0.07917158508300781, 0.07867391967773438, 0.0796646728515625, 0.0794078369140625, 0.08028339385986329, 0.08017919921875, 0.08005632019042969, 0.08019926452636719, 0.08022991943359375, 0.08008377838134766, 0.08003964996337891, 0.07977200317382813, 0.07927808380126954, 0.07923276519775391, 0.07960765075683594, 0.07930921936035157, 0.08043929290771484, 0.08007472229003906, 0.07968921661376953, 0.08046800231933594, 0.09183026885986328, 0.07977907562255859, 0.07901055908203125, 0.07855481719970703, 0.07816223907470703, 0.07810662078857422, 0.07890739440917968, 0.07918931579589844, 0.07859884643554688, 0.0787364501953125, 0.07937308502197266, 0.0818951644897461, 0.0820956802368164, 0.08084566497802734, 0.07971635437011719, 0.07928012847900391, 0.07885414123535156, 0.07961599731445312, 0.07972156524658203, 0.0800794906616211, 0.08022406768798829, 0.07953065490722656, 0.07969996643066406, 0.07986300659179688, 0.08014498901367187, 0.08061939239501953, 0.0803636474609375, 0.07975318145751953, 0.07896886444091797, 0.07898726654052735, 0.07974412536621094, 0.07909667205810547, 0.0796846694946289, 0.07964125061035156, 0.07913504028320313, 0.07992038726806641, 0.08028419494628906, 0.07993161773681641, 0.07993164825439453, 0.08001091003417969, 0.07980841827392578, 0.07941548919677735, 0.07896678161621094, 0.07992848205566407, 0.07925545501708985, 0.08062457275390625, 0.08003775787353516, 0.08039437103271484, 0.08087564849853515, 0.08075807952880859, 0.08096173095703126, 0.08053388977050781, 0.08044544219970703, 0.08043724822998047, 0.08029177856445313, 0.08084076690673828, 0.08060415649414063, 0.08093583679199219, 0.0804431381225586, 0.08016111755371094, 0.08036377716064454, 0.08077839660644531, 0.08018185424804687, 0.09226863861083984, 0.07981046295166015, 0.07913267517089843, 0.07861170959472656, 0.0789651870727539, 0.07921241760253907, 0.07872557067871094, 0.07857772827148438, 0.07933740997314453, 0.07881501007080079, 0.07882160186767578, 0.08096153259277344, 0.08265113830566406, 0.08085084533691406, 0.08018953704833984, 0.07964425659179687, 0.07888220977783203, 0.07990704345703124, 0.07981302642822266, 0.08010790252685547, 0.0806171875, 0.0797185287475586, 0.07941260528564453, 0.07927503967285156, 0.08004508972167969, 0.08015740966796875, 0.07992515563964844, 0.07999702453613282, 0.07960781097412109, 0.07912614440917969, 0.07954204559326172, 0.08007331085205079, 0.0793431396484375, 0.07967382049560547, 0.07915126037597656, 0.07978173065185547, 0.07982899475097656, 0.07977983856201172, 0.08039014434814454, 0.07994303894042969, 0.07993158721923828, 0.08005280303955079, 0.07960294342041016, 0.0798043212890625, 0.08002633666992187, 0.07957833862304688, 0.08020867156982422, 0.08045932769775391, 0.08050316619873046, 0.08062566375732422, 0.08050809478759766, 0.08041075134277344, 0.08160272216796875, 0.08037026977539062, 0.08035855865478515, 0.08109756469726563, 0.08089600372314452, 0.08047926330566406, 0.08068204498291015, 0.08080905914306641, 0.08036434936523437, 0.08018498992919922, 0.08094579315185547]",tokens/s,12.51144167868479,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,882.40128,6477.971456,0.0,6075.449344,6044.13184,s,1,16.3670693359375,16.3670693359375,0.0,16.3670693359375,16.3670693359375,16.3670693359375,16.3670693359375,[16.3670693359375],,kWh,0.00022743583187499706,2.5080634643177143e-05,7.445533734199913e-05,0.0003269718038601733,,MB,1381.449728,7002.259456,0.0,6587.154432,6469.997056,s,10,10.48437890625,1.048437890625,0.003429153526291769,1.0495554809570313,1.0516908691406248,1.0527063354492188,1.0535187084960937,"[1.04142919921875, 1.0447486572265625, 1.0452081298828124, 1.0495302734375, 1.0487425537109376, 1.0514652099609374, 1.0498809814453125, 1.0537218017578125, 1.0500714111328124, 1.0495806884765626]",tokens/s,244.17278533055688,kWh,3.053086866666717e-05,3.3670404960575322e-06,2.0323738481200018e-05,5.422164764392472e-05,tokens/kWh,4721361.506407185,MB,1405.186048,7014.842368,0.0,6599.737344,6469.999616,s,10,50.405091796875,5.0405091796875,0.011056548022387995,5.0430498046875005,5.05389560546875,5.05392900390625,5.05395572265625,"[5.0208828125, 5.0261611328125, 5.03004541015625, 5.03851220703125, 5.03860107421875, 5.0478564453125, 5.05388818359375, 5.05396240234375, 5.04768359375, 5.04749853515625]",tokens/s,12.498737281122432,kWh,0.0001475052801279159,1.62698357199944e-05,9.76723559155998e-05,0.0002614474717635101,tokens/kWh,240966.1855785168,,s,630,50.402565246582085,0.08000407181997148,0.0015973632789853243,0.07985838317871094,0.08069083938598634,0.08102485733032226,0.09048430770874023,"[0.09023567962646484, 0.0799128646850586, 0.07913414764404297, 0.07864998626708984, 0.079593505859375, 0.0792616958618164, 0.08029388427734375, 0.08044121551513672, 0.07979779052734375, 0.07916400146484374, 0.07870214080810548, 0.07838662719726562, 0.07866220855712891, 0.0796819839477539, 0.07908147430419922, 0.07867359924316407, 0.07900192260742188, 0.07952178955078125, 0.07909980773925782, 0.07996742248535156, 0.07926876831054687, 0.07922064208984375, 0.07921395111083984, 0.08025743865966797, 0.07992556762695313, 0.0804554214477539, 0.07990297698974609, 0.0801436767578125, 0.07926649475097657, 0.07892774200439454, 0.07842009735107422, 0.07884339141845703, 0.07968134307861328, 0.07905535888671875, 0.07893030548095703, 0.07994051361083984, 0.07909264373779297, 0.0794439697265625, 0.07990605163574219, 0.0795041275024414, 0.07946240234375, 0.0794419174194336, 0.07916902160644532, 0.0799742431640625, 0.07999759674072265, 0.08039833831787109, 0.08006451416015625, 0.08049225616455079, 0.08045724487304687, 0.08046463775634766, 0.07965030670166015, 0.08000972747802734, 0.07925484466552735, 0.07888285064697266, 0.07857628631591797, 0.07953612518310547, 0.07976140594482421, 0.07927587127685547, 0.07941136169433594, 0.07996326446533203, 0.07947148895263671, 0.08006626892089844, 0.08019526672363281, 0.09190399932861328, 0.08002355194091797, 0.07921004486083984, 0.07878902435302734, 0.0782786865234375, 0.07832361602783203, 0.07829513549804687, 0.07838508605957031, 0.07967727661132812, 0.079327392578125, 0.07983238220214844, 0.0799178237915039, 0.08266957092285156, 0.08093081665039062, 0.08031161499023437, 0.07992384338378906, 0.08030547332763673, 0.0802086410522461, 0.07965695953369141, 0.07926579284667969, 0.0790302734375, 0.07868118286132812, 0.0783656005859375, 0.07855104064941407, 0.079779296875, 0.07937283325195313, 0.07930265808105469, 0.0793364486694336, 0.07925167846679687, 0.07938438415527344, 0.07933561706542969, 0.08055478668212891, 0.07986176300048828, 0.07918182373046875, 0.07980441284179687, 0.0795360336303711, 0.08001545715332031, 0.08003379058837891, 0.08039218902587891, 0.07993856048583985, 0.08003218841552734, 0.07939129638671875, 0.07991439819335938, 0.07941158294677735, 0.07875574493408204, 0.07982067108154296, 0.07950739288330078, 0.07913465881347656, 0.07965283203125, 0.07971695709228516, 0.07957462310791015, 0.079519775390625, 0.07941158294677735, 0.0794642562866211, 0.07948095703125, 0.07961811065673828, 0.07998995208740234, 0.07950978851318359, 0.07950569915771484, 0.07940326690673828, 0.08006246185302734, 0.0796785888671875, 0.08040444946289063, 0.0904742431640625, 0.07978419494628906, 0.0790487060546875, 0.07867596435546875, 0.0796932830810547, 0.07925814056396484, 0.08006800079345704, 0.08018793487548828, 0.07968707275390625, 0.07916127777099609, 0.07907196807861328, 0.07866777801513672, 0.07960985565185547, 0.07920662689208985, 0.07913021087646484, 0.0791626205444336, 0.0789636459350586, 0.07914659118652344, 0.07954409790039063, 0.07929417419433593, 0.079174560546875, 0.07922688293457031, 0.07920947265625, 0.07924838256835938, 0.07999209594726563, 0.08023091125488281, 0.08028591918945313, 0.08031231689453125, 0.08024473571777344, 0.07992115020751953, 0.07985971069335937, 0.07908550262451172, 0.0786220474243164, 0.07934639739990235, 0.07959142303466797, 0.07910550689697265, 0.07917622375488281, 0.07970611572265625, 0.07936409759521484, 0.07943987274169922, 0.0793631362915039, 0.0793773422241211, 0.07931903839111328, 0.07956070709228516, 0.07939891052246094, 0.07937843322753907, 0.07942144012451172, 0.08012748718261718, 0.07974502563476563, 0.08055644989013672, 0.08047948455810547, 0.08058966064453126, 0.08039628601074218, 0.08044544219970703, 0.08051302337646485, 0.08062934112548828, 0.08025081634521485, 0.08074835205078125, 0.08056489562988281, 0.08010342407226563, 0.08012799835205078, 0.07998397064208984, 0.07970601654052735, 0.09045967864990234, 0.07978646087646485, 0.07906310272216797, 0.0786279067993164, 0.07959027099609375, 0.07977369689941406, 0.07947468566894532, 0.07890534210205079, 0.07833805084228515, 0.078276611328125, 0.07841990661621094, 0.07977375793457031, 0.08139571380615235, 0.0804163818359375, 0.07978841400146484, 0.07924495697021484, 0.07918946838378907, 0.07971520233154297, 0.07903622436523437, 0.07906937408447266, 0.07903641510009765, 0.07987513732910156, 0.08002655792236328, 0.08004937744140625, 0.08103810882568359, 0.08090828704833984, 0.08082227325439453, 0.08030003356933593, 0.08082774353027344, 0.08032937622070313, 0.07997622680664063, 0.07959983825683593, 0.07917922973632813, 0.07924790191650391, 0.0787166748046875, 0.07936595153808594, 0.07966259002685547, 0.079999267578125, 0.08028636932373047, 0.08032630157470703, 0.0802305908203125, 0.07969737243652343, 0.07974947357177735, 0.07957231903076172, 0.07913983917236328, 0.07928758239746093, 0.07977648162841797, 0.07929241943359375, 0.07944355010986329, 0.08037036895751953, 0.08056572723388672, 0.08019993591308594, 0.0805429458618164, 0.07988098907470703, 0.0794085464477539, 0.08031292724609375, 0.07974502563476563, 0.08047206115722656, 0.08022016143798828, 0.0803082275390625, 0.08043520355224609, 0.08052428436279296, 0.08117555236816407, 0.09048841857910156, 0.0800645751953125, 0.079012451171875, 0.07869350433349609, 0.07815769958496094, 0.07971942138671875, 0.08010934448242188, 0.08022447967529298, 0.0801239013671875, 0.07978803253173829, 0.07919411468505859, 0.07974240112304687, 0.08092681884765625, 0.08022876739501954, 0.07961811065673828, 0.07901798248291016, 0.07836624145507813, 0.07906556701660156, 0.07947174072265625, 0.07919705963134765, 0.07986278533935547, 0.07908249664306641, 0.07972045135498047, 0.08029366302490235, 0.08027356719970703, 0.08000019073486328, 0.07995798492431641, 0.0799097900390625, 0.07940470123291016, 0.07892144012451172, 0.07974294281005859, 0.08035798645019532, 0.08024889373779297, 0.0805041275024414, 0.08047475433349609, 0.08025692749023437, 0.08096169281005859, 0.08044544219970703, 0.07999282836914062, 0.08059903717041016, 0.07969586944580079, 0.07912963104248047, 0.07902674865722656, 0.07965532684326172, 0.07936204528808594, 0.07990271759033203, 0.08016281890869141, 0.07995597076416015, 0.0801443862915039, 0.08052531433105468, 0.07984742736816407, 0.07944729614257813, 0.08029055786132812, 0.07973683166503906, 0.07925536346435547, 0.07940525054931641, 0.07977369689941406, 0.07961190032958984, 0.0804409637451172, 0.08026969909667969, 0.08003359985351563, 0.08044153594970703, 0.08001475524902343, 0.09130028533935547, 0.07987155151367188, 0.08062937927246094, 0.07875052642822265, 0.07820902252197266, 0.07824774169921875, 0.07826815795898437, 0.07828018951416016, 0.07827113342285157, 0.07827996826171875, 0.07995875549316406, 0.08205545806884766, 0.08229478454589843, 0.081152099609375, 0.07974198150634766, 0.07935065460205078, 0.07905619049072266, 0.07972313690185547, 0.07984659576416016, 0.07986860656738282, 0.08045791625976563, 0.08057241821289063, 0.08046797180175781, 0.08087344360351563, 0.08076866912841797, 0.08056050872802735, 0.08117862701416016, 0.08030617523193359, 0.07990886688232422, 0.07926742553710937, 0.07892323303222656, 0.07930300903320313, 0.0794609603881836, 0.07947673797607421, 0.07959142303466797, 0.07995516967773438, 0.08018351745605469, 0.0801890869140625, 0.08021417236328125, 0.07985228729248046, 0.0794702377319336, 0.0801119384765625, 0.07944560241699218, 0.07904214477539062, 0.07947555541992188, 0.07951939392089843, 0.07944841766357422, 0.0802877426147461, 0.08047206115722656, 0.08010956573486328, 0.08013005065917969, 0.08017305755615234, 0.0801075210571289, 0.08010281372070313, 0.08004873657226562, 0.08014806365966796, 0.08005059051513672, 0.08005964660644531, 0.08101964569091796, 0.08056166076660157, 0.0810294418334961, 0.08110304260253906, 0.08102912139892578, 0.09261055755615234, 0.07990681457519531, 0.07912857818603515, 0.07871282958984376, 0.07822950744628906, 0.07813932800292969, 0.0782131805419922, 0.07981465911865235, 0.07979827117919921, 0.07981846618652344, 0.08029170989990235, 0.08165408325195313, 0.08257955169677735, 0.08080384063720703, 0.07979574584960937, 0.07999446105957031, 0.07979500579833984, 0.08015789031982422, 0.08014527893066406, 0.08020582580566406, 0.07982489776611328, 0.07923712158203125, 0.07868825531005859, 0.07949107360839844, 0.08025052642822265, 0.07996041870117188, 0.08001741027832031, 0.07981014251708984, 0.07931330871582032, 0.07980445098876954, 0.07971836853027343, 0.0792922592163086, 0.07913897705078125, 0.07985266876220704, 0.07981116485595703, 0.07958953857421874, 0.07970559692382813, 0.08053004455566407, 0.08017855834960938, 0.080112060546875, 0.07977728271484374, 0.07928015899658203, 0.08033254241943359, 0.07976028442382813, 0.07993138885498047, 0.08004752349853515, 0.08047046661376953, 0.08009356689453125, 0.08090191650390625, 0.0809697265625, 0.08092876434326172, 0.08122716522216797, 0.0808802261352539, 0.08033660888671874, 0.08096192169189453, 0.08060508728027344, 0.08045977783203125, 0.08045772552490234, 0.0811104965209961, 0.08054585266113282, 0.08000669097900391, 0.08043206024169922, 0.0799969253540039, 0.09206598663330078, 0.07993138885498047, 0.07948204803466796, 0.07879353332519531, 0.07824793243408203, 0.07825145721435547, 0.07824368286132813, 0.07825657653808593, 0.07975555419921875, 0.07977753448486329, 0.08018150329589843, 0.08243762969970703, 0.08287078094482422, 0.08078662109375, 0.07993222045898438, 0.07956636810302735, 0.07892582702636719, 0.08017497253417968, 0.08017362976074219, 0.07987625885009765, 0.07920626831054688, 0.0787557144165039, 0.07916966247558593, 0.07956070709228516, 0.08056832122802735, 0.08073011016845703, 0.08058051300048828, 0.07999049377441406, 0.07929280090332032, 0.07924736022949219, 0.079499267578125, 0.07963648223876953, 0.07939276885986328, 0.07906918334960937, 0.07912242889404297, 0.07971635437011719, 0.08036892700195312, 0.08024745941162109, 0.08113910675048829, 0.08032288360595703, 0.07961634826660156, 0.07946793365478516, 0.07970671844482421, 0.08025843048095703, 0.07999929809570312, 0.08024710083007812, 0.08020166778564453, 0.08056018829345703, 0.08099958038330078, 0.08109347534179688, 0.08180735778808594, 0.0810580825805664, 0.08005123138427735, 0.08040723419189454, 0.08037785339355469, 0.08097382354736328, 0.08052735900878906, 0.08069068908691407, 0.08049068450927735, 0.08026963043212891, 0.08041683197021485, 0.07999481964111328, 0.08111923217773437, 0.09149644470214843, 0.07966268920898438, 0.07899372863769531, 0.07860438537597657, 0.07841334533691406, 0.07954013061523438, 0.07896323394775391, 0.08011897277832031, 0.08005308532714844, 0.08070956420898437, 0.07980038452148437, 0.08003145599365234, 0.08090857696533203, 0.0809349136352539, 0.08013414764404297, 0.07949311828613281, 0.07899136352539063, 0.07840767669677734, 0.07917724609375, 0.07947225952148437, 0.07934159851074218, 0.07949497222900391, 0.0797122573852539, 0.0802968292236328, 0.08024451446533203, 0.08082876586914063, 0.0801607666015625, 0.07960364532470703, 0.0789873275756836, 0.07906508636474609, 0.07954422760009766, 0.07907046508789063, 0.0793424301147461, 0.08003900909423828, 0.08039311981201172, 0.08063795471191407, 0.08068505859375, 0.08091193389892579, 0.08027289581298828, 0.08001631927490234, 0.07997644805908204, 0.08039014434814454, 0.08060905456542969, 0.08056444549560547, 0.08059494018554687, 0.080932861328125, 0.07996415710449219, 0.08033484649658203, 0.07978189086914063, 0.08099372863769531, 0.08035993957519531, 0.08016400146484375, 0.07954093170166016, 0.07989679718017578, 0.0797306900024414, 0.0803799057006836, 0.07985356903076171, 0.07996825408935547, 0.07991622161865235, 0.07988921356201172, 0.08031027221679687, 0.08015052795410156, 0.08057433319091797, 0.0917017593383789, 0.07984947204589844, 0.07932313537597656, 0.07985529327392578, 0.07969535827636719, 0.08071250915527343, 0.08008688354492187, 0.07974265289306641, 0.07927827453613281, 0.07889129638671875, 0.07859379577636719, 0.07964697265625, 0.08085024261474609, 0.08017990112304688, 0.08004402923583985, 0.07949021148681641, 0.07932342529296875, 0.07945193481445313, 0.07973558044433594, 0.0798570556640625, 0.07912841796875, 0.07889177703857422, 0.07902518463134765, 0.08003683471679687, 0.08018057250976562, 0.08042054748535156, 0.08069219207763671, 0.08001455688476562, 0.07958198547363281, 0.08019747161865234, 0.08036112213134766, 0.080212158203125, 0.07995378875732422, 0.08026771545410157, 0.08037091064453125, 0.0800627212524414, 0.0804214096069336, 0.08095712280273437, 0.08053791809082031, 0.08007186889648438, 0.07964096069335938, 0.08002095794677734, 0.08016585540771484, 0.07956684875488282, 0.07946627044677734, 0.07970211029052734, 0.07962022399902344, 0.08048973083496094, 0.0798461456298828, 0.08032006072998046, 0.08017759704589844, 0.07996781158447265, 0.07952428436279296, 0.08019132995605469, 0.08005443572998047, 0.080240478515625, 0.07964688110351563, 0.07998675537109375, 0.08003167724609375, 0.0803737564086914, 0.07984947204589844, 0.08062537384033203, 0.08005455780029297]",tokens/s,12.49936381051007,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,888.193024,6477.971456,0.0,6075.449344,6044.13184,s,1,16.562880859375,16.562880859375,0.0,16.562880859375,16.562880859375,16.562880859375,16.562880859375,[16.562880859375],,kWh,0.00022722092586666633,2.5056990761597962e-05,7.325172526800178e-05,0.0003255296418962661,,MB,1409.236992,7002.259456,0.0,6587.154432,6470.128128,s,10,10.552498779296876,1.0552498779296875,0.004819478221211384,1.0561134033203126,1.0587876098632814,1.0606580871582032,1.0621544689941407,"[1.043472412109375, 1.0516373291015626, 1.053352294921875, 1.0555338134765626, 1.05667333984375, 1.055553466796875, 1.0574630126953124, 1.05791259765625, 1.0583719482421876, 1.062528564453125]",tokens/s,242.5965691673433,kWh,3.075094414874996e-05,3.3913068090055083e-06,2.045615525380004e-05,5.459840621155551e-05,tokens/kWh,4688781.555418714,MB,1446.363136,7016.93952,0.0,6599.737344,6470.130688,s,10,50.707111328125,5.0707111328125,0.010480176876410011,5.069326171875,5.085816650390624,5.087083374023438,5.088096752929688,"[5.06318994140625, 5.05121826171875, 5.0674892578125, 5.07008984375, 5.0685625, 5.0786943359375, 5.06351806640625, 5.0704638671875, 5.08553515625, 5.08835009765625]",tokens/s,12.424292835836752,kWh,0.00014830769459041657,1.6359504808633257e-05,9.831807865439985e-05,0.0002629852780534497,tokens/kWh,239557.13592148584,,s,630,50.70450842285157,0.08048334670293898,0.0015362020598393583,0.08047851181030273,0.08107624740600586,0.08143396797180176,0.09084004501342774,"[0.09041859436035156, 0.08032035064697265, 0.07989734649658203, 0.079580322265625, 0.08018646240234376, 0.08027667236328125, 0.08043782043457032, 0.08039833831787109, 0.08072191619873047, 0.0806645736694336, 0.08007679748535156, 0.07950540924072266, 0.07897293090820312, 0.07878860473632812, 0.07883155059814453, 0.07877228546142578, 0.07919516754150391, 0.08010851287841797, 0.07942348480224609, 0.07963394927978516, 0.08012438201904297, 0.0797278060913086, 0.07972128295898437, 0.07978530883789063, 0.07955033874511719, 0.07929730987548828, 0.07885209655761719, 0.07893769836425782, 0.08024105834960937, 0.07995587158203125, 0.07982704162597656, 0.08051097869873047, 0.08006451416015625, 0.08089190673828126, 0.08023407745361329, 0.07997481536865235, 0.08049254608154296, 0.0801812515258789, 0.08051507568359376, 0.0801443862915039, 0.08049254608154296, 0.08058589172363281, 0.08064905548095704, 0.08071987152099609, 0.08086937713623046, 0.08096492767333985, 0.08057107543945312, 0.0806146240234375, 0.08066127777099609, 0.08061542510986328, 0.08067276763916016, 0.08070963287353515, 0.08112537384033203, 0.08111923217773437, 0.08066233825683594, 0.08078307342529296, 0.08067488098144532, 0.08071564483642578, 0.08073587036132812, 0.08114473724365234, 0.0806297607421875, 0.08085094451904297, 0.08113900756835937, 0.09224025726318359, 0.08062566375732422, 0.07969561767578125, 0.07924492645263671, 0.07870527648925782, 0.07871488189697265, 0.0786978530883789, 0.07876620483398437, 0.0786693115234375, 0.08007917022705079, 0.07980307006835938, 0.08035910034179687, 0.08188877105712891, 0.08082428741455078, 0.08020400238037109, 0.07995455932617188, 0.08036966705322265, 0.08000415802001953, 0.08096659088134765, 0.0800844497680664, 0.08025961303710938, 0.08031964874267578, 0.08069411468505859, 0.08011913299560547, 0.08045836639404297, 0.08047135925292968, 0.08009353637695313, 0.08086361694335938, 0.08013414764404297, 0.0805027847290039, 0.08007679748535156, 0.07956057739257813, 0.07904883575439453, 0.0790282211303711, 0.08028498840332031, 0.07971091461181641, 0.07925279998779297, 0.07980649566650391, 0.080065185546875, 0.07983513641357422, 0.08026054382324219, 0.07964934539794923, 0.07986176300048828, 0.08026316833496094, 0.0795832290649414, 0.07899750518798829, 0.0797122573852539, 0.0802324447631836, 0.0815472640991211, 0.07919136047363282, 0.08043795013427735, 0.07994163513183594, 0.07965689849853516, 0.08045164489746094, 0.07987964630126954, 0.08067536163330079, 0.07988633728027343, 0.07940502166748047, 0.07987967681884765, 0.08062534332275391, 0.07980118560791015, 0.07985750579833985, 0.08067906951904297, 0.09060182189941406, 0.08032051086425782, 0.07961196899414062, 0.0793087387084961, 0.07873945617675782, 0.07891149139404297, 0.07881517028808593, 0.07877433776855469, 0.07872905731201171, 0.07874755096435547, 0.07875312042236328, 0.08061840057373047, 0.08160256195068359, 0.08113152313232422, 0.08071923065185548, 0.08018310546875, 0.07945094299316406, 0.0790030746459961, 0.07879673767089844, 0.07883968353271484, 0.08010419464111328, 0.08035234832763671, 0.0805118408203125, 0.08044345855712891, 0.08042620849609375, 0.08151081848144531, 0.08083628845214844, 0.08142713928222656, 0.08036665344238281, 0.07990777587890625, 0.07990412902832031, 0.08047885131835937, 0.08072937774658204, 0.08039292907714844, 0.08039628601074218, 0.08087757110595703, 0.08081203460693359, 0.08088780975341797, 0.08060221099853515, 0.08055903625488281, 0.08058592224121094, 0.08066127777099609, 0.08068505859375, 0.08069856262207031, 0.0805077133178711, 0.08054988861083984, 0.08052915191650391, 0.08054297637939453, 0.08016588592529297, 0.08054310607910156, 0.08108710479736328, 0.08078540802001953, 0.08055350494384765, 0.08057084655761719, 0.08057241821289063, 0.0807567367553711, 0.080216064453125, 0.08064118194580078, 0.08066748809814453, 0.08074649810791015, 0.08058636474609375, 0.08065071868896484, 0.08072998046875, 0.09053981018066407, 0.08022486114501953, 0.0794808349609375, 0.0791180191040039, 0.07862509155273438, 0.07868595123291015, 0.07864281463623046, 0.07863359832763672, 0.07869849395751953, 0.07867391967773438, 0.07865753936767578, 0.0816209945678711, 0.08231439971923828, 0.08131203460693359, 0.08055561828613281, 0.0798852767944336, 0.07936946868896484, 0.07877913665771484, 0.07877407836914062, 0.08014387512207032, 0.07981241607666016, 0.08032694244384765, 0.080355712890625, 0.08075491333007813, 0.08069033813476563, 0.08127897644042968, 0.08118768310546876, 0.0809606704711914, 0.0807125473022461, 0.08054579162597657, 0.08011980438232422, 0.08045977783203125, 0.08036287689208985, 0.08044608306884765, 0.08041062164306641, 0.08090790557861328, 0.08045766448974609, 0.08089440155029297, 0.08080572509765625, 0.08053065490722656, 0.08043926239013671, 0.08096575927734374, 0.0805503692626953, 0.08059056091308593, 0.08082498931884766, 0.0805191650390625, 0.0805022735595703, 0.08096409606933594, 0.08050482940673828, 0.080917724609375, 0.0805343017578125, 0.080611328125, 0.08107622528076172, 0.08067481231689454, 0.08061257934570312, 0.08030889892578125, 0.08052953338623046, 0.08060928344726563, 0.08059494018554687, 0.08078131103515625, 0.08059452819824219, 0.08066425323486329, 0.08067964935302735, 0.09098035430908204, 0.08039132690429687, 0.07965113830566406, 0.07912911987304687, 0.07863900756835937, 0.07866496276855468, 0.07861116790771484, 0.07869586944580079, 0.07879901123046874, 0.07870432281494141, 0.07868707275390625, 0.08188921356201172, 0.08247433471679687, 0.08107644653320313, 0.08036402893066406, 0.07991401672363281, 0.0793465576171875, 0.07891903686523437, 0.0787524185180664, 0.07877164459228515, 0.08015936279296874, 0.07982080078125, 0.08044931030273438, 0.08044976043701171, 0.08098611450195313, 0.08098611450195313, 0.08102873229980469, 0.08056601715087891, 0.08002009582519531, 0.07979212951660156, 0.08043724822998047, 0.08044134521484375, 0.08040019226074219, 0.08078559875488281, 0.08044467163085937, 0.08090611267089844, 0.08041065979003906, 0.08081865692138672, 0.08045606231689453, 0.08089190673828126, 0.08055398559570312, 0.08092428588867187, 0.08051055908203125, 0.08113334655761718, 0.08054476928710938, 0.08056428527832031, 0.08056825256347656, 0.08096768188476562, 0.08051097869873047, 0.08051084899902344, 0.08089408111572266, 0.08067059326171876, 0.08057868957519532, 0.08072396850585938, 0.08064109039306641, 0.08067094421386718, 0.08054243469238281, 0.08065776062011719, 0.08074697875976562, 0.08080108642578125, 0.08059584045410156, 0.08065023803710937, 0.080642333984375, 0.09107180786132812, 0.08031005096435546, 0.07965789031982422, 0.0791488037109375, 0.07865369415283203, 0.07871692657470702, 0.07863890838623047, 0.0787212142944336, 0.07871078491210938, 0.07870057678222656, 0.07868617248535156, 0.08126464080810547, 0.08313651275634766, 0.08170684814453125, 0.0808900146484375, 0.08025497436523438, 0.07949651336669922, 0.07909241485595703, 0.07875929260253907, 0.08031014251708984, 0.07982157135009765, 0.08035533142089844, 0.08032054138183593, 0.08084886169433594, 0.08144406127929688, 0.08114393615722656, 0.08118051147460938, 0.08046880340576172, 0.07995391845703125, 0.07960502624511719, 0.08051990509033204, 0.08038556671142578, 0.0803207015991211, 0.08080617523193359, 0.08044735717773438, 0.08102310180664063, 0.08043682861328125, 0.08152035522460938, 0.08081887817382813, 0.08084617614746094, 0.08077378845214844, 0.08053759765625, 0.08060054779052735, 0.08060326385498047, 0.08022447967529298, 0.0805370864868164, 0.08097862243652344, 0.08101840209960938, 0.08140569305419922, 0.08145378875732422, 0.08088127899169922, 0.08153536224365235, 0.08086048126220703, 0.080949951171875, 0.08060928344726563, 0.08101673889160156, 0.08090009307861327, 0.08097513580322266, 0.08062387084960937, 0.08069792175292968, 0.08070511627197266, 0.08110530853271485, 0.08123728179931641, 0.09139852905273438, 0.08025939178466797, 0.08018089294433593, 0.07959942626953125, 0.07903263854980469, 0.0786313247680664, 0.0786678695678711, 0.07872019195556641, 0.078670654296875, 0.07874912261962891, 0.078697021484375, 0.08269209289550782, 0.08301696014404297, 0.08083942413330078, 0.08047001647949219, 0.08020694732666016, 0.07960259246826172, 0.07910521697998046, 0.07946092987060546, 0.07975936126708984, 0.07922847747802735, 0.07875839996337891, 0.07904188537597656, 0.08090914916992188, 0.08074854278564453, 0.0815308837890625, 0.0809566421508789, 0.0802548828125, 0.07968598175048829, 0.07968822479248047, 0.07959897613525391, 0.07956665802001953, 0.08036025238037109, 0.07997235107421875, 0.08039833831787109, 0.08070547485351562, 0.08087251281738281, 0.08059801483154297, 0.0812564468383789, 0.08071507263183594, 0.08039814758300781, 0.07966194915771484, 0.08044863891601563, 0.07978073883056641, 0.08035855865478515, 0.07992530822753906, 0.08085155487060547, 0.08100601959228515, 0.0809070053100586, 0.08115602874755859, 0.0804598388671875, 0.08037939453125, 0.08054768371582031, 0.08013072204589844, 0.07973683166503906, 0.08090569305419922, 0.08028566741943359, 0.07979241943359375, 0.08102531433105468, 0.0809349136352539, 0.08045772552490234, 0.08069324493408203, 0.08081743621826172, 0.09166595458984375, 0.08025663757324218, 0.0802060775756836, 0.08020848083496093, 0.08024227142333984, 0.080808349609375, 0.08053907012939453, 0.080611328125, 0.08055865478515625, 0.08120428466796875, 0.08021440124511718, 0.08027193450927735, 0.08063977813720703, 0.08119062042236327, 0.08052102661132812, 0.08031276702880859, 0.07982310485839844, 0.0794603500366211, 0.08068505859375, 0.0801976318359375, 0.07999094390869141, 0.08020269012451171, 0.07988909149169922, 0.0795363540649414, 0.08062566375732422, 0.08064323425292969, 0.08037808227539063, 0.08005081939697266, 0.07944745635986328, 0.07938518524169921, 0.08027545928955078, 0.07963219451904296, 0.08037725067138672, 0.08021036529541016, 0.08061167907714843, 0.08001471710205078, 0.08043126678466797, 0.08086358642578124, 0.08021004486083984, 0.08013005065917969, 0.08029740905761719, 0.0797061767578125, 0.07975270080566406, 0.0805283203125, 0.07985366058349609, 0.08039215850830078, 0.08004402923583985, 0.08103260803222656, 0.08030063629150391, 0.08056422424316406, 0.08048435211181641, 0.08010956573486328, 0.08096562957763671, 0.08037926483154297, 0.07972518157958984, 0.07959347534179688, 0.08042086029052735, 0.08003993225097657, 0.08049868774414062, 0.08120934295654297, 0.08052915191650391, 0.08014179229736328, 0.08111286163330078, 0.09093734741210938, 0.08017635345458984, 0.07945661163330078, 0.07898774719238282, 0.078561279296875, 0.08032256317138672, 0.08016835021972656, 0.08016957092285157, 0.08013005065917969, 0.08049459075927734, 0.08056832122802735, 0.08143257904052735, 0.08266342163085938, 0.08031436920166016, 0.08050457763671875, 0.07998242950439453, 0.08026700592041015, 0.080314208984375, 0.08058144378662109, 0.08045977783203125, 0.08076406097412109, 0.08040857696533203, 0.08073712158203125, 0.08102864074707031, 0.08143510437011718, 0.08075794982910156, 0.08074694061279297, 0.08052486419677735, 0.07995475006103515, 0.08063795471191407, 0.08064364624023437, 0.08068544006347657, 0.08043936157226562, 0.0804167709350586, 0.08040345764160156, 0.08136601257324219, 0.08073216247558594, 0.08038809967041016, 0.08079974365234376, 0.08032579040527343, 0.08043401336669923, 0.0807014389038086, 0.08063142395019532, 0.08071331024169921, 0.08061417388916016, 0.08063180541992188, 0.08092876434326172, 0.08058246612548828, 0.0806627197265625, 0.08099183654785157, 0.08047798156738281, 0.0801655044555664, 0.08065638732910156, 0.08068502044677735, 0.08073567962646484, 0.08095327758789063, 0.08031298828125, 0.0808529281616211, 0.08035948944091797, 0.07989043426513671, 0.08106550598144531, 0.08086370849609376, 0.08167833709716797, 0.09099292755126953, 0.08023654174804687, 0.07946979522705078, 0.0790323486328125, 0.07886073303222656, 0.08010578918457031, 0.07946444702148438, 0.08022537231445312, 0.08014051055908203, 0.08019554901123047, 0.0805298843383789, 0.08137926483154297, 0.08256915283203126, 0.08131743621826172, 0.07981343841552735, 0.08017510223388671, 0.0796385269165039, 0.08032784271240234, 0.0802958755493164, 0.08042534637451172, 0.08068150329589843, 0.08067481231689454, 0.08062361907958984, 0.0809881591796875, 0.08139366149902344, 0.080729248046875, 0.08067763519287109, 0.08049180603027344, 0.08034182739257813, 0.08078540802001953, 0.08044748687744141, 0.08047817230224609, 0.08079135894775391, 0.08077129364013672, 0.08054323577880859, 0.08142899322509765, 0.08106393432617187, 0.08100454711914062, 0.08062290954589844, 0.08066143798828125, 0.08060018920898437, 0.08067852783203125, 0.08048947143554687, 0.08092262268066407, 0.08043682861328125, 0.0809161605834961, 0.0805642547607422, 0.08146598052978515, 0.08076016235351563, 0.08067145538330078, 0.08069734191894531, 0.08061542510986328, 0.08083251190185547, 0.08071356964111329, 0.0807138900756836, 0.08087462615966796, 0.0809062728881836, 0.08066339111328125, 0.0806111068725586, 0.08095766448974609, 0.08075468444824219, 0.0811335678100586, 0.08069939422607422]",tokens/s,12.424930634295844,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,15411.95776,9514.647552,0.0,9112.12544,9086.72256,s,1,35.67498046875,35.67498046875,0.0,35.67498046875,35.67498046875,35.67498046875,35.67498046875,[35.67498046875],,kWh,0.0007873433421000072,8.68374545541197e-05,0.0002555746489039945,0.0011297554455581213,,MB,4090.257408,9688.711168,0.0,9265.217536,9236.043264,s,10,1.2915294799804689,0.12915294799804689,0.0006083691145304167,0.1291079864501953,0.1296813262939453,0.13015472717285156,0.13053344787597657,"[0.1306281280517578, 0.1295761260986328, 0.12914515686035155, 0.12868952941894532, 0.128297607421875, 0.12930752563476564, 0.12907081604003906, 0.12885714721679686, 0.12864659118652344, 0.12931085205078124]",tokens/s,1982.1460057100003,kWh,3.7812196344550343e-06,4.170040283093415e-07,2.506957133769197e-06,6.705180796533572e-06,tokens/kWh,38179432.85471828,MB,4106.268672,9707.585536,0.0,9281.994752,9177.325568,s,10,81.2787734375,8.127877343749999,0.02324779442282911,8.121349365234375,8.157078955078125,8.171228686523438,8.182548471679688,"[8.1072783203125, 8.122548828125, 8.12473046875, 8.11179150390625, 8.11024560546875, 8.18537841796875, 8.1539345703125, 8.12014990234375, 8.1323642578125, 8.1103515625]",tokens/s,7.751101220596592,kWh,0.00023725003117846212,2.616987481797426e-05,0.00010801554795083035,0.0003714354539472667,tokens/kWh,169612.24172462604,,s,630,81.27627260589588,0.12900995651729524,0.0013314511878987395,0.12868812561035156,0.13030177917480468,0.1320559341430664,0.13410214401245116,"[0.12901580810546875, 0.1284828796386719, 0.12851040649414064, 0.12792550659179688, 0.12807350158691405, 0.12782486724853515, 0.12803890991210937, 0.12905392456054687, 0.12807817077636718, 0.12881964111328126, 0.12792012786865234, 0.12856524658203125, 0.13129641723632812, 0.1288323516845703, 0.12878153991699218, 0.12822198486328126, 0.1277799072265625, 0.12788121795654298, 0.12792262268066407, 0.12828927612304689, 0.12795494079589845, 0.12785049438476562, 0.12844979858398436, 0.12972108459472656, 0.12852742004394532, 0.12788626861572266, 0.12772281646728514, 0.13188986206054687, 0.13115586853027345, 0.12891555786132813, 0.12910560607910157, 0.12972802734375, 0.1292808074951172, 0.12908953857421876, 0.12929571533203124, 0.12889564514160157, 0.12889497375488282, 0.12894537353515625, 0.1293934326171875, 0.12865126037597657, 0.12873727416992187, 0.1281160888671875, 0.1283236846923828, 0.12923753356933593, 0.12856524658203125, 0.12819046020507813, 0.12922607421875, 0.12933340454101563, 0.1296360626220703, 0.12884457397460938, 0.1286860809326172, 0.12898902893066405, 0.12828038024902344, 0.1279814682006836, 0.1289814453125, 0.1289234619140625, 0.12778243255615235, 0.1273125457763672, 0.12805119323730468, 0.12903628540039064, 0.12771526336669922, 0.1279201889038086, 0.12849130249023438, 0.1288826904296875, 0.1284505615234375, 0.12846847534179687, 0.1281131591796875, 0.12820480346679688, 0.12932217407226562, 0.13086598205566408, 0.12977932739257814, 0.12845706176757812, 0.12840499877929687, 0.1281868438720703, 0.12811065673828126, 0.13288038635253907, 0.12962200927734374, 0.12884687805175782, 0.1288509063720703, 0.1282826232910156, 0.12833900451660157, 0.1285293426513672, 0.1289253387451172, 0.13022242736816406, 0.13040585327148438, 0.1296790008544922, 0.12910887145996094, 0.1285980224609375, 0.12896832275390624, 0.1328040313720703, 0.12860409545898438, 0.12834431457519532, 0.12839299011230468, 0.1288733367919922, 0.1280749053955078, 0.12803977966308594, 0.12793660736083984, 0.12779110717773437, 0.12805696105957032, 0.12875106811523437, 0.1300141143798828, 0.1286614990234375, 0.128321533203125, 0.12971827697753907, 0.13115391540527344, 0.1302855682373047, 0.12829647827148438, 0.12924362182617188, 0.12994149780273437, 0.12894601440429687, 0.12970819091796876, 0.12847283935546874, 0.12819891357421875, 0.12819769287109375, 0.1284023742675781, 0.1295125732421875, 0.12899417114257813, 0.1282860107421875, 0.12784441375732422, 0.1278572769165039, 0.12824713134765625, 0.1279309768676758, 0.12790096282958985, 0.1291702117919922, 0.12871253967285157, 0.12811689758300782, 0.12808998107910155, 0.1282826232910156, 0.1283681640625, 0.12820498657226562, 0.12948069763183595, 0.12994998168945313, 0.12933718872070313, 0.1288756866455078, 0.13046464538574218, 0.1290364227294922, 0.1290067901611328, 0.12910675048828124, 0.12980633544921874, 0.13024032592773438, 0.1284773712158203, 0.12838482666015624, 0.12826438903808593, 0.12790898895263672, 0.12759334564208985, 0.12908953857421876, 0.13443458557128907, 0.12885125732421876, 0.12781046295166015, 0.12777267456054686, 0.12752889251708985, 0.1300680694580078, 0.12902447509765624, 0.1285668487548828, 0.12877459716796874, 0.12857058715820313, 0.12850051879882812, 0.12826419067382813, 0.1283031005859375, 0.12831724548339843, 0.12809376525878907, 0.1294252166748047, 0.13078816223144532, 0.1289048614501953, 0.12826249694824218, 0.12929023742675783, 0.13258956909179687, 0.12856320190429688, 0.12825926208496094, 0.12874578857421876, 0.1300485076904297, 0.12825804138183594, 0.12888473510742188, 0.12846080017089845, 0.12797132873535155, 0.12798770904541015, 0.1288123779296875, 0.12862057495117188, 0.1293850555419922, 0.1282724151611328, 0.13032188415527343, 0.12992131042480468, 0.12829312133789061, 0.12772943878173829, 0.1293068542480469, 0.1297029113769531, 0.1296046142578125, 0.12867584228515624, 0.12854885864257812, 0.12817344665527344, 0.12830508422851564, 0.12831814575195313, 0.12858329772949217, 0.12838951110839844, 0.1278566436767578, 0.1281739501953125, 0.1281045684814453, 0.12778495788574218, 0.12787097930908203, 0.1290260467529297, 0.12917555236816405, 0.12937831115722656, 0.12916085815429687, 0.12819241333007814, 0.127574462890625, 0.12736921691894532, 0.1319342041015625, 0.1282085723876953, 0.12894586181640624, 0.12872154235839844, 0.12853602600097655, 0.1325552978515625, 0.12824166870117187, 0.1284075469970703, 0.1283108215332031, 0.128119140625, 0.12826226806640625, 0.12860354614257813, 0.12843887329101564, 0.1340449981689453, 0.12798432159423828, 0.12761087799072265, 0.12934962463378907, 0.12939637756347655, 0.12885641479492188, 0.1287799072265625, 0.12823741149902343, 0.12774765014648437, 0.1318696594238281, 0.129238525390625, 0.1290224609375, 0.12881634521484375, 0.128262939453125, 0.12865721130371094, 0.12808767700195312, 0.1279306869506836, 0.1277807388305664, 0.12836441040039062, 0.12787763214111328, 0.12936601257324218, 0.1284505615234375, 0.12883491516113282, 0.12907177734375, 0.13209805297851562, 0.12863644409179686, 0.1279943389892578, 0.1282927703857422, 0.12857308959960936, 0.12981497192382813, 0.12810035705566405, 0.12785868835449218, 0.12781568145751954, 0.12832357788085938, 0.12880021667480468, 0.12954672241210938, 0.1277031021118164, 0.1306578826904297, 0.130416259765625, 0.128131103515625, 0.12825808715820314, 0.12954634094238282, 0.129505126953125, 0.1290998077392578, 0.12836122131347658, 0.12863014221191407, 0.1288805389404297, 0.12807221984863282, 0.12854249572753906, 0.1280782470703125, 0.12883334350585937, 0.12829714965820313, 0.1281076202392578, 0.12932579040527345, 0.1288131866455078, 0.1282184295654297, 0.12846534729003906, 0.12787862396240235, 0.1285720672607422, 0.128676025390625, 0.12843212890625, 0.12909773254394533, 0.12831333923339844, 0.12780508422851564, 0.12888304138183593, 0.12849766540527344, 0.12805097961425782, 0.12834815979003905, 0.13075479125976563, 0.1288970184326172, 0.1283759002685547, 0.1283484191894531, 0.12843894958496094, 0.128691650390625, 0.12817465209960938, 0.1282305603027344, 0.1279903335571289, 0.1283382110595703, 0.1280918426513672, 0.12870025634765625, 0.12895858764648438, 0.12956092834472657, 0.12902809143066407, 0.12843417358398437, 0.1280307159423828, 0.13369754028320313, 0.12908534240722655, 0.12870664978027344, 0.12782915496826172, 0.12832435607910156, 0.12829501342773436, 0.12771942138671874, 0.12751197052001953, 0.13200445556640625, 0.12831744384765625, 0.12827853393554686, 0.1287752685546875, 0.12881190490722655, 0.1284403533935547, 0.12857958984375, 0.1381429138183594, 0.1288446044921875, 0.12861439514160156, 0.12908953857421876, 0.12856935119628907, 0.12880487060546875, 0.1284567108154297, 0.1290846710205078, 0.13380876159667968, 0.128993408203125, 0.12929798889160157, 0.12966957092285156, 0.12895436096191407, 0.13036886596679687, 0.1342421417236328, 0.128615234375, 0.13264067077636718, 0.12922479248046875, 0.12997970581054688, 0.13004396057128906, 0.13032832336425781, 0.12844122314453124, 0.12883482360839843, 0.12888291931152343, 0.13331100463867188, 0.12944589233398437, 0.12899942016601562, 0.12821299743652342, 0.12874290466308594, 0.12885452270507813, 0.12876371765136718, 0.128599365234375, 0.1336328887939453, 0.1305886688232422, 0.13251174926757814, 0.12903424072265626, 0.13011289978027343, 0.12894476318359374, 0.12841165161132811, 0.12832563781738282, 0.13376022338867188, 0.12981890869140625, 0.1295467529296875, 0.13003570556640626, 0.13003062438964844, 0.12988070678710936, 0.129038330078125, 0.12899568176269532, 0.13409628295898438, 0.12927445983886718, 0.12898013305664063, 0.12885673522949218, 0.12896000671386718, 0.12856390380859375, 0.12867788696289062, 0.1286614990234375, 0.13525120544433594, 0.13054588317871094, 0.12912403869628905, 0.12889497375488282, 0.1288308868408203, 0.13071011352539064, 0.12906495666503906, 0.13470658874511718, 0.1285392303466797, 0.1277992935180664, 0.12839321899414063, 0.1276968994140625, 0.12769894409179688, 0.12745884704589844, 0.12909207153320312, 0.13344111633300781, 0.12920252990722655, 0.12796112060546874, 0.1289707794189453, 0.12840512084960937, 0.13099046325683594, 0.12854873657226562, 0.1283699188232422, 0.1338008270263672, 0.1283477783203125, 0.12983949279785156, 0.1283662109375, 0.12816831970214843, 0.1285482635498047, 0.12919868469238283, 0.12866748046875, 0.13410453796386718, 0.12877484130859376, 0.1302159423828125, 0.12946636962890626, 0.12849891662597657, 0.12855375671386718, 0.12930047607421874, 0.12954010009765626, 0.133607421875, 0.12891116333007813, 0.12856256103515626, 0.12865731811523437, 0.12853955078125, 0.12822294616699217, 0.12888815307617188, 0.12902192687988281, 0.13289759826660155, 0.12808822631835937, 0.128901123046875, 0.12832563781738282, 0.12817202758789062, 0.12792534637451172, 0.12824668884277343, 0.12891654968261718, 0.13296876525878906, 0.12800473022460937, 0.12904447937011718, 0.12790166473388673, 0.1276366424560547, 0.12776483154296875, 0.129227294921875, 0.12868199157714844, 0.13281280517578126, 0.12946144104003907, 0.13610887145996095, 0.12924755859375, 0.1291998748779297, 0.12829954528808593, 0.1281228790283203, 0.13272064208984374, 0.131827392578125, 0.12974111938476562, 0.12796723175048827, 0.12775772857666015, 0.12920687866210936, 0.1297469482421875, 0.12877568054199218, 0.12904293823242188, 0.12896791076660155, 0.128768798828125, 0.13257113647460939, 0.12909158325195313, 0.12865887451171876, 0.1282524871826172, 0.12817408752441406, 0.1280813446044922, 0.12812495422363282, 0.12793475341796876, 0.12780083465576172, 0.1284943389892578, 0.12879461669921874, 0.12889088439941407, 0.12818386840820312, 0.12808143615722656, 0.12925216674804688, 0.12892784118652345, 0.130813720703125, 0.1281326446533203, 0.12785529327392578, 0.12857919311523439, 0.12801881408691407, 0.12777677154541014, 0.12887600708007813, 0.12809884643554686, 0.12800204467773438, 0.12902774047851562, 0.12865367126464844, 0.12914688110351563, 0.12784639739990233, 0.13039753723144532, 0.13054150390625, 0.1282134094238281, 0.12789708709716796, 0.1295653076171875, 0.12922489929199218, 0.1288212432861328, 0.12849958801269531, 0.12919180297851562, 0.12947010803222656, 0.12880342102050782, 0.1290948791503906, 0.12907948303222655, 0.1291551055908203, 0.12806121826171876, 0.12803561401367186, 0.12929766845703125, 0.12899725341796875, 0.1280287322998047, 0.1304120330810547, 0.12965728759765624, 0.12866085815429687, 0.12875436401367188, 0.12897628784179688, 0.12897509765625, 0.12826658630371093, 0.12912640380859375, 0.1293291473388672, 0.12853443908691406, 0.1278403549194336, 0.13029954528808593, 0.12929776000976562, 0.12914381408691405, 0.12877174377441405, 0.12848162841796876, 0.12863897705078126, 0.12811878967285156, 0.12820664978027344, 0.1278732147216797, 0.1288662109375, 0.1284217987060547, 0.12844050598144532, 0.12948480224609374, 0.13002464294433594, 0.1284923553466797, 0.12831948852539063, 0.12816998291015624, 0.13160652160644531, 0.12959539794921876, 0.12873692321777344, 0.12881546020507811, 0.12863897705078126, 0.1280921630859375, 0.12808192443847657, 0.12838822937011718, 0.12905555725097656, 0.12913369750976564, 0.12895123291015625, 0.12881100463867187, 0.1283706817626953, 0.12792012786865234, 0.12853861999511718, 0.12890521240234376, 0.1325322265625, 0.12896870422363282, 0.12852394104003906, 0.13084092712402343, 0.1284136962890625, 0.129544189453125, 0.12884991455078126, 0.12883705139160156, 0.12872966003417968, 0.12968960571289062, 0.1289871368408203, 0.1287209014892578, 0.12805325317382812, 0.13185638427734375, 0.13368853759765625, 0.12964944458007813, 0.12834310913085936, 0.12886317443847656, 0.12977766418457032, 0.12955014038085938, 0.1284035186767578, 0.12845657348632813, 0.12916761779785157, 0.1285647430419922, 0.13025946044921874, 0.12929638671875, 0.1282150421142578, 0.12800396728515626, 0.12853042602539064, 0.12904185485839845, 0.1292335968017578, 0.12871270751953126, 0.12965791320800782, 0.1286479034423828, 0.12838114929199218, 0.12836863708496093, 0.12869017028808594, 0.12842189025878906, 0.12885321044921874, 0.1286128387451172, 0.12955885314941407, 0.1288863067626953, 0.1285978240966797, 0.12768732452392578, 0.1306087646484375, 0.12900973510742186, 0.12839149475097655, 0.12826608276367188, 0.1289320068359375, 0.12879872131347656, 0.12869017028808594, 0.12829075622558594, 0.12828834533691405, 0.1285186309814453, 0.1283829803466797, 0.12869631958007813, 0.12967730712890624, 0.12870655822753907, 0.12792384338378907, 0.12859347534179688, 0.12759123229980468, 0.1321553955078125, 0.129939453125, 0.12882534790039063, 0.12940083312988282, 0.12883148193359376, 0.128606201171875, 0.12800764465332032, 0.12763507080078124, 0.12815657043457032, 0.12816773986816407, 0.12788140869140624, 0.12870559692382813, 0.12812179565429688, 0.1276211166381836, 0.12829405212402345, 0.12790624237060547, 0.1315530242919922, 0.12804981994628906, 0.12811672973632812, 0.12847718811035155, 0.12849087524414063]",tokens/s,7.751339718232822,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,15429.464064,9514.647552,0.0,9112.12544,9086.72256,s,1,35.56941015625,35.56941015625,0.0,35.56941015625,35.56941015625,35.56941015625,35.56941015625,[35.56941015625],,kWh,0.0007770693025958261,8.570941138069244e-05,0.00025364936958599885,0.0011164280835625174,,MB,4097.59744,9692.905472,0.0,9269.41184,9235.912192,s,10,1.2673804244995117,0.12673804244995118,0.0007615157513761456,0.1266313591003418,0.12747468032836912,0.12800903816223144,0.12843652442932127,"[0.12661583709716798, 0.1273559341430664, 0.12601200103759766, 0.12601299285888673, 0.12661363220214844, 0.12698863983154296, 0.12684873962402343, 0.12664688110351563, 0.12854339599609374, 0.12574237060546875]",tokens/s,2019.9144238881102,kWh,3.7015678527954106e-06,4.08217401886628e-07,2.4466264298734333e-06,6.556411684555472e-06,tokens/kWh,39045748.2410757,MB,4097.59744,9705.488384,0.0,9281.994752,9177.194496,s,10,80.09968408203125,8.009968408203125,0.015088616429008501,8.0064140625,8.03034658203125,8.035456982421875,8.039545302734375,"[7.99405322265625, 8.00393896484375, 7.99670947265625, 8.0292109375, 8.00888916015625, 8.0405673828125, 8.02219189453125, 8.009033203125, 8.00226220703125, 7.99282763671875]",tokens/s,7.86519956002333,kWh,0.00023016543946637136,2.538838629170505e-05,0.00010531362644492629,0.00036086745220300265,tokens/kWh,174579.33547456624,,s,630,80.09698155212415,0.12713806595575242,0.0011090328611455444,0.12692251205444335,0.12810054016113281,0.12901227035522458,0.1316412945556641,"[0.1273814697265625, 0.12711526489257813, 0.1261629409790039, 0.12624281311035157, 0.1273133087158203, 0.12745692443847656, 0.12808697509765626, 0.1274961929321289, 0.126814208984375, 0.12614796447753907, 0.12685171508789062, 0.12694723510742187, 0.12709487915039064, 0.1310161895751953, 0.1270271682739258, 0.1267262725830078, 0.12687506866455078, 0.1263356170654297, 0.12613632202148437, 0.1265892791748047, 0.12685679626464844, 0.1264582748413086, 0.12623468780517577, 0.12608057403564454, 0.1267019500732422, 0.1268326416015625, 0.1266339874267578, 0.12615023803710937, 0.12680438232421876, 0.1280184326171875, 0.12699852752685548, 0.1266465606689453, 0.12723990631103516, 0.12674025726318358, 0.12698758697509765, 0.12693965148925782, 0.1267798080444336, 0.12618476867675782, 0.12671865844726563, 0.1352838134765625, 0.12699890899658203, 0.12654646301269531, 0.1261506576538086, 0.12618876647949218, 0.12594419097900392, 0.12712796783447267, 0.1264733123779297, 0.12634819030761718, 0.1262936019897461, 0.12594322967529298, 0.1258710708618164, 0.12653568267822266, 0.1264742431640625, 0.1264168930053711, 0.1258506851196289, 0.12605449676513672, 0.12718985748291015, 0.12636502075195313, 0.12666496276855468, 0.12649132537841798, 0.12616204833984376, 0.12706403350830078, 0.12749446105957032, 0.12640326690673828, 0.12700611114501953, 0.12632313537597656, 0.12748770904541015, 0.12654771423339845, 0.12648118591308594, 0.12664358520507812, 0.126349853515625, 0.12627779388427735, 0.12703024291992188, 0.12938447570800782, 0.12998332214355468, 0.12864874267578125, 0.12724169921875, 0.1269213409423828, 0.1269327697753906, 0.12715427398681642, 0.12729583740234374, 0.12732367706298828, 0.1271220474243164, 0.12775558471679688, 0.12700080108642578, 0.12687203216552734, 0.1261702117919922, 0.12628438568115236, 0.12709101104736328, 0.1269452819824219, 0.12813926696777345, 0.12906495666503906, 0.1270571823120117, 0.1275749740600586, 0.12791343688964843, 0.12690054321289063, 0.12662783813476564, 0.12702044677734375, 0.1265260467529297, 0.1267445755004883, 0.12668723297119142, 0.12678758239746094, 0.1261075210571289, 0.12602175903320312, 0.1260216293334961, 0.12684031677246094, 0.1264378890991211, 0.1272279052734375, 0.12686080169677735, 0.12639193725585937, 0.12700057220458985, 0.12593641662597657, 0.1264389419555664, 0.12700319671630858, 0.12679296112060548, 0.12656307220458984, 0.12823960876464843, 0.12866307067871094, 0.12802096557617187, 0.1261665267944336, 0.12687615966796875, 0.12675286102294922, 0.12657584381103515, 0.12712825775146486, 0.12708863830566405, 0.12681011199951173, 0.12707244873046875, 0.1271492462158203, 0.12799024200439454, 0.1267490234375, 0.1264250259399414, 0.1266395492553711, 0.1264951705932617, 0.12751609802246094, 0.1269744338989258, 0.12686342620849608, 0.12658649444580078, 0.1278221435546875, 0.12624310302734376, 0.1288700408935547, 0.1267285461425781, 0.12671590423583984, 0.1265459213256836, 0.1264085464477539, 0.12683074951171874, 0.1267056655883789, 0.12623439788818358, 0.12649212646484376, 0.12774272155761718, 0.12605644989013673, 0.1265826873779297, 0.12642063903808592, 0.12969619750976563, 0.12724018859863281, 0.12664832305908202, 0.12700569915771484, 0.12670873260498047, 0.12698419189453125, 0.1265070114135742, 0.12765980529785156, 0.12750460815429687, 0.1276538848876953, 0.12685107421875, 0.1268695068359375, 0.12651315307617186, 0.12677471923828126, 0.1268618850708008, 0.12659209442138672, 0.12822537231445313, 0.1285636444091797, 0.12741398620605468, 0.12682102203369142, 0.12649398040771484, 0.12637462615966796, 0.12695756530761718, 0.12700262451171876, 0.12661145782470704, 0.12666675567626953, 0.12627744293212892, 0.1262511978149414, 0.12645986938476564, 0.12634646606445313, 0.12670588684082032, 0.1264359359741211, 0.12680806732177735, 0.12740013122558594, 0.126440673828125, 0.12661558532714845, 0.12665315246582032, 0.1267520980834961, 0.12740704345703124, 0.13073277282714843, 0.12756992340087892, 0.13391667175292968, 0.12828044128417967, 0.12852032470703126, 0.1266912612915039, 0.12696784210205078, 0.12765734100341797, 0.12700943756103517, 0.127283203125, 0.12748390197753906, 0.1264285125732422, 0.1269111328125, 0.12699836730957031, 0.12792642974853516, 0.12743885040283204, 0.127, 0.12741484832763672, 0.12752896118164062, 0.12804710388183593, 0.1273526077270508, 0.12685862731933595, 0.12714275360107422, 0.12720457458496093, 0.12767878723144532, 0.12668720245361328, 0.1283621826171875, 0.12620832061767578, 0.126329345703125, 0.12626739501953124, 0.12688540649414062, 0.12669503784179686, 0.1267311019897461, 0.12713164520263673, 0.12779052734375, 0.12744351959228514, 0.12686109161376954, 0.1269475555419922, 0.1269145278930664, 0.12996607971191407, 0.12825808715820314, 0.12778272247314454, 0.12801394653320314, 0.1270789794921875, 0.12720499420166015, 0.12665689849853515, 0.12712860870361328, 0.12711011505126954, 0.12781362915039063, 0.1274838104248047, 0.12807177734375, 0.12647014617919922, 0.1269944305419922, 0.12697599792480468, 0.1271214065551758, 0.1297407684326172, 0.1270200653076172, 0.1270626907348633, 0.12695177459716797, 0.12647014617919922, 0.12610355377197266, 0.12635533142089844, 0.1268289260864258, 0.1266527328491211, 0.12646310424804688, 0.1270280990600586, 0.12668313598632813, 0.12647014617919922, 0.1261052780151367, 0.12620143890380858, 0.1269533462524414, 0.12745814514160156, 0.12659264373779297, 0.12695696258544922, 0.1267721939086914, 0.12588572692871094, 0.12657123565673828, 0.1266135025024414, 0.12603187561035156, 0.12651519775390624, 0.12962406921386718, 0.129765380859375, 0.12626124572753905, 0.12599874877929687, 0.12643302154541017, 0.12639087677001953, 0.1260681610107422, 0.1260524444580078, 0.1273285446166992, 0.12689017486572265, 0.12656163024902345, 0.1265582046508789, 0.12714803314208983, 0.12712143707275392, 0.12653017425537108, 0.12633302307128907, 0.12646924591064454, 0.1268088607788086, 0.12698828887939453, 0.12694732666015626, 0.1275269088745117, 0.12708191680908204, 0.12681295776367188, 0.12670953369140625, 0.12867584228515624, 0.12706985473632812, 0.12705599975585938, 0.12707247924804688, 0.12750364685058593, 0.12734742736816407, 0.1289478759765625, 0.12710934448242187, 0.12727875518798829, 0.1271853790283203, 0.12824575805664062, 0.131968994140625, 0.12917132568359374, 0.12873539733886719, 0.12653791809082032, 0.1272677764892578, 0.1270976028442383, 0.1276041259765625, 0.1277836456298828, 0.1273990707397461, 0.12818946838378906, 0.12787680053710937, 0.12736051177978516, 0.1272440948486328, 0.12684563446044922, 0.12763340759277345, 0.12793011474609375, 0.13045928955078126, 0.12786959838867187, 0.12727494049072266, 0.1292054443359375, 0.1276219482421875, 0.12700057220458985, 0.1275166702270508, 0.12822457885742186, 0.12743727874755859, 0.1272402877807617, 0.12652352142333984, 0.12743651580810547, 0.1265869140625, 0.12716451263427733, 0.12733865356445312, 0.1272661437988281, 0.12703196716308593, 0.12788121795654298, 0.12698995208740235, 0.12779344177246094, 0.1276601257324219, 0.12728656005859376, 0.12784713745117188, 0.12753011322021485, 0.12685116577148436, 0.12667088317871095, 0.13176089477539063, 0.12839730834960938, 0.13793894958496095, 0.12670771026611327, 0.12704358673095703, 0.12802047729492189, 0.12769023895263673, 0.12676457977294922, 0.12667161560058593, 0.12675852966308593, 0.12651529693603517, 0.12631059265136718, 0.12735651397705078, 0.127429443359375, 0.12791744232177735, 0.12681475067138673, 0.12658892822265624, 0.12710047912597655, 0.12707679748535156, 0.12688489532470704, 0.12679676818847657, 0.12799180603027344, 0.12716255950927735, 0.12783926391601563, 0.12894288635253906, 0.12690179443359376, 0.1264722900390625, 0.1268823013305664, 0.12772953796386718, 0.12704073333740235, 0.12659420776367186, 0.12650787353515625, 0.12695142364501952, 0.12605193328857422, 0.12606710052490233, 0.12601139068603515, 0.130050048828125, 0.12721942138671874, 0.12803631591796874, 0.12702598571777343, 0.12678963470458984, 0.1276165771484375, 0.12728729248046874, 0.1273548812866211, 0.12733625793457032, 0.12933798217773437, 0.1268326416015625, 0.12980767822265624, 0.12662239837646486, 0.12772351837158202, 0.12590080261230469, 0.1266087646484375, 0.13063845825195314, 0.12823551940917968, 0.12693708801269532, 0.12694927978515624, 0.12649072265625, 0.12613651275634766, 0.12723900604248048, 0.12646905517578125, 0.12733443450927734, 0.1269835205078125, 0.12676729583740234, 0.12685155487060548, 0.1273031692504883, 0.12651119995117188, 0.12697026824951171, 0.12723814392089844, 0.12745113372802735, 0.12757401275634767, 0.12670361328125, 0.1275719680786133, 0.12673433685302735, 0.1263472671508789, 0.13134848022460938, 0.12862864685058595, 0.12798780822753905, 0.12734873962402343, 0.12977766418457032, 0.12712754821777345, 0.13095936584472656, 0.1274245147705078, 0.1265806427001953, 0.12655977630615234, 0.12706857299804689, 0.12661980438232423, 0.126814208984375, 0.12678876495361327, 0.1263600616455078, 0.12606499481201172, 0.1276395492553711, 0.1270456314086914, 0.12662518310546875, 0.12664681243896483, 0.1281210174560547, 0.12693472290039062, 0.12677561950683594, 0.1273999328613281, 0.12654313659667968, 0.12720611572265625, 0.12687769317626954, 0.1276436462402344, 0.1269061737060547, 0.1270478057861328, 0.12651634979248047, 0.12751299285888673, 0.1265014114379883, 0.12649676513671876, 0.12600857543945312, 0.12694156646728516, 0.12642092895507812, 0.12624940490722655, 0.12598252868652343, 0.13017266845703124, 0.12819500732421876, 0.12683452606201173, 0.12673600006103516, 0.12765853118896484, 0.12688361358642578, 0.12700857543945313, 0.1269764175415039, 0.12707241821289061, 0.12794361877441407, 0.1278450241088867, 0.12795315551757813, 0.12631756591796875, 0.1269401626586914, 0.12640828704833984, 0.12739762878417968, 0.12921055603027343, 0.1286619873046875, 0.12707568359375, 0.1274985885620117, 0.1269516143798828, 0.12673856353759766, 0.1263636474609375, 0.12680397033691407, 0.12656175994873048, 0.12671148681640626, 0.1266778564453125, 0.12635065460205078, 0.1270730209350586, 0.1265373764038086, 0.12705939483642578, 0.12638499450683593, 0.12718899536132813, 0.12811468505859375, 0.12698985290527343, 0.12707273864746094, 0.12867543029785156, 0.12768624114990235, 0.12744310760498048, 0.1268740768432617, 0.12679087829589844, 0.12648729705810546, 0.1276677474975586, 0.12744393920898436, 0.13217791748046875, 0.12668457794189453, 0.12649078369140626, 0.1277456283569336, 0.12681507110595702, 0.12650905609130858, 0.12602982330322265, 0.12705702209472655, 0.1268581085205078, 0.12712246704101562, 0.1271981735229492, 0.12643497467041015, 0.12620035552978515, 0.12641168212890624, 0.1269236831665039, 0.12746342468261718, 0.12788265228271484, 0.12654847717285156, 0.12697814178466796, 0.12760617828369142, 0.1269540786743164, 0.12835635375976562, 0.12684083557128906, 0.1268338851928711, 0.12726351928710938, 0.12733859252929688, 0.12717206573486328, 0.12684537506103516, 0.12611174774169923, 0.1271072998046875, 0.12628377532958984, 0.12617501068115233, 0.1264324188232422, 0.12656111907958983, 0.13249740600585938, 0.12731609344482422, 0.12731788635253907, 0.12666060638427734, 0.12700998687744142, 0.1271222381591797, 0.12770655822753907, 0.12710150146484375, 0.12654736328125, 0.1273099822998047, 0.12616108703613282, 0.12679177856445312, 0.12601769256591797, 0.1268260498046875, 0.1265668487548828, 0.12691260528564452, 0.13004205322265625, 0.1265247344970703, 0.12616336059570313, 0.12633245086669923, 0.12598524475097655, 0.1269425277709961, 0.12656861114501952, 0.12615116882324218, 0.1264534683227539, 0.1260865249633789, 0.12591417694091797, 0.12611980438232423, 0.12613510131835937, 0.12611993408203126, 0.1265885467529297, 0.1263947219848633, 0.12694671630859375, 0.12636224365234375, 0.12589859008789062, 0.12767826843261718, 0.1268411865234375, 0.1263984603881836, 0.12703334045410156, 0.12644710540771484, 0.12715853118896484, 0.12813087463378905, 0.12670816040039062, 0.12616246032714845, 0.1298068084716797, 0.1289415740966797, 0.1271701431274414, 0.12720630645751954, 0.1264394226074219, 0.1261670379638672, 0.12676710510253905, 0.12669747161865236, 0.12608102416992187, 0.12719213104248048, 0.12749420928955077, 0.12662834930419922, 0.12629811096191407, 0.1261992950439453, 0.12643827056884765, 0.1270576629638672, 0.12663629150390626, 0.12693504333496095, 0.12643023681640625, 0.1272432327270508, 0.12605782318115236, 0.12809896850585936, 0.12623872375488282, 0.12566307067871094, 0.12626959991455078, 0.12677529907226562, 0.1264680938720703, 0.12751193237304687, 0.12634515380859376, 0.12621839904785156, 0.1272816925048828, 0.12634521484375, 0.12617276763916016, 0.12625142669677733, 0.12934089660644532, 0.12840194702148439, 0.12667494201660157, 0.12659302520751953, 0.12694086456298828, 0.1265994873046875, 0.12710912322998047, 0.12750643157958985, 0.1268770217895508, 0.12827101135253907, 0.12608102416992187, 0.12613001251220704, 0.12749430084228516]",tokens/s,7.865464937527269,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,15403.491328,9514.647552,0.0,9112.12544,9086.72256,s,1,34.9668671875,34.9668671875,0.0,34.9668671875,34.9668671875,34.9668671875,34.9668671875,[34.9668671875],,kWh,0.0007718459986416558,8.513310138533588e-05,0.0002498051998439943,0.0011067842998709859,,MB,4071.497728,9692.905472,0.0,9269.41184,9235.912192,s,10,1.2458959655761719,0.12458959655761719,0.00033897061058341067,0.12463086318969727,0.12497428665161132,0.12504403953552246,0.12509984184265138,"[0.1251137924194336, 0.1246322250366211, 0.12421228790283204, 0.1248854751586914, 0.12401967620849609, 0.12422998046875, 0.12462950134277344, 0.12495878601074219, 0.12444921875, 0.12476502227783202]",tokens/s,2054.74619930735,kWh,3.6564206590624805e-06,4.0308998931129827e-07,2.4285123594749646e-06,6.488023007848744e-06,tokens/kWh,39457320.0018418,MB,4071.497728,9705.488384,0.0,9281.994752,9177.194496,s,10,78.62046923828125,7.8620469238281245,0.014088319696460561,7.860199462890625,7.876029443359375,7.8816880615234375,7.886214956054688,"[7.8873466796875, 7.8588828125, 7.85870458984375, 7.8317080078125, 7.86151611328125, 7.87477197265625, 7.86459765625, 7.8569609375, 7.8731572265625, 7.8528232421875]",tokens/s,8.013180360074033,kWh,0.0002296176407634357,2.5328101414365802e-05,0.00010366132251232538,0.0003586070646901269,tokens/kWh,175679.7514695881,,s,630,78.61785769653318,0.12479025031195747,0.0012856297746681017,0.12447553634643554,0.12574313659667968,0.12712816505432128,0.12962261932373048,"[0.12879667663574218, 0.12507945251464844, 0.12413689422607421, 0.12488566589355468, 0.12421308898925781, 0.12554051208496095, 0.12512870025634765, 0.12436479949951172, 0.12848947143554687, 0.12372940826416015, 0.12805087280273436, 0.12529529571533204, 0.12560540771484374, 0.12456816101074218, 0.12498534393310547, 0.12429312133789062, 0.1293516845703125, 0.12501593780517578, 0.12488419342041016, 0.12815782165527342, 0.12364669036865235, 0.123648193359375, 0.12519615936279296, 0.1277480926513672, 0.12460646057128906, 0.12968960571289062, 0.12832925415039062, 0.1255503692626953, 0.12420777893066406, 0.12434614562988282, 0.12468147277832031, 0.12418764495849609, 0.12489273834228516, 0.1294586181640625, 0.12414361572265625, 0.12511231994628907, 0.12458751678466796, 0.12434620666503907, 0.12434896087646484, 0.12404246520996094, 0.12398223876953125, 0.12463520050048828, 0.12523145294189453, 0.12783395385742188, 0.12424832153320313, 0.12400595092773438, 0.12405599975585938, 0.12373606109619141, 0.12638406372070313, 0.12431356811523438, 0.12430944061279296, 0.12416630554199219, 0.12793856048583985, 0.1260154266357422, 0.12420841979980468, 0.12401538848876953, 0.12358560180664062, 0.12353327941894532, 0.12407087707519532, 0.12406578826904296, 0.12415385437011718, 0.1236336669921875, 0.12361933135986328, 0.12448767852783203, 0.12550064086914062, 0.12406658935546876, 0.12414745330810546, 0.1242892837524414, 0.123846435546875, 0.12506134033203126, 0.12468329620361328, 0.12421731567382813, 0.12415219116210938, 0.1243367691040039, 0.12539033508300781, 0.12501181030273437, 0.12449372863769531, 0.12434828948974609, 0.1254834213256836, 0.12380524444580078, 0.12429312133789062, 0.1240555191040039, 0.1251401596069336, 0.12471065521240235, 0.1243354263305664, 0.1259109115600586, 0.12431852722167969, 0.12435171508789063, 0.12441849517822266, 0.12717673492431641, 0.1252291488647461, 0.12470294189453125, 0.1251318054199219, 0.12537340545654296, 0.12450736236572266, 0.123878173828125, 0.1246920623779297, 0.12428726196289062, 0.1257555236816406, 0.12493209838867188, 0.12448358154296875, 0.12445426940917968, 0.12470467376708984, 0.12495487976074218, 0.12432867431640625, 0.12843597412109375, 0.12502627563476562, 0.12540850830078126, 0.12444547271728515, 0.1242787857055664, 0.12403839874267578, 0.12375276947021484, 0.12445455932617187, 0.12365699005126952, 0.12323635101318359, 0.12445503997802734, 0.12403699493408203, 0.12379312133789062, 0.12354383850097657, 0.12399616241455078, 0.12389753723144531, 0.1240331802368164, 0.12443177795410157, 0.1330531768798828, 0.12496214294433594, 0.12423849487304688, 0.12429004669189453, 0.1251788787841797, 0.12439756774902344, 0.12470681762695313, 0.12583731079101562, 0.12648038482666016, 0.12520448303222656, 0.12473139190673828, 0.12459145355224609, 0.12498601531982421, 0.12427414703369141, 0.12455171203613281, 0.124498046875, 0.12487872314453125, 0.12457369232177734, 0.12400415802001953, 0.1244665298461914, 0.12487721252441407, 0.12439596557617187, 0.12470272064208984, 0.12471705627441407, 0.124736572265625, 0.1260568313598633, 0.1251006088256836, 0.12443395233154297, 0.12441846466064453, 0.1243005142211914, 0.12419942474365234, 0.12620166778564454, 0.12475247955322266, 0.12411885070800781, 0.12552979278564452, 0.12463113403320313, 0.1245291519165039, 0.12424809265136719, 0.12380960083007812, 0.12461666870117187, 0.12436892700195312, 0.12459417724609376, 0.125517822265625, 0.12497920227050781, 0.12759040069580077, 0.12673433685302735, 0.12458393859863282, 0.12409561920166015, 0.1246319351196289, 0.12464742279052735, 0.1240821762084961, 0.12436479949951172, 0.12475801849365234, 0.12461491394042969, 0.12481100463867187, 0.12440975952148438, 0.12393206024169921, 0.12405644989013671, 0.12387081909179687, 0.12432959747314454, 0.1235769271850586, 0.12638931274414061, 0.124689697265625, 0.12426729583740234, 0.12463187408447265, 0.1238919677734375, 0.12473388671875, 0.1250426254272461, 0.12461881256103516, 0.12420851135253906, 0.12388825225830079, 0.12478582763671875, 0.12400316619873047, 0.12457369232177734, 0.12417584228515625, 0.12380140686035156, 0.12404956817626953, 0.12460499572753907, 0.12893910217285157, 0.1245798110961914, 0.12444767761230469, 0.12551964569091797, 0.12471625518798828, 0.12448025512695313, 0.1242708511352539, 0.12397920227050781, 0.12409299468994141, 0.12674867248535157, 0.1247266845703125, 0.1241871337890625, 0.12491280364990234, 0.12409935760498046, 0.12419497680664063, 0.12431123352050781, 0.12363187408447265, 0.12402227020263672, 0.12391804504394531, 0.1237287368774414, 0.12428288269042968, 0.12441126251220704, 0.12414220428466796, 0.12375984191894532, 0.12324098968505859, 0.12347622680664062, 0.12418252563476563, 0.12453683471679687, 0.12393881225585937, 0.12403276824951172, 0.12368287658691406, 0.12367424011230468, 0.12366194915771485, 0.12403193664550781, 0.1242597427368164, 0.12403977966308594, 0.12536748504638673, 0.12419519805908204, 0.12348461151123047, 0.12394294738769532, 0.12303766632080078, 0.1234493408203125, 0.12503600311279298, 0.12359661102294922, 0.12378505706787109, 0.12407465362548828, 0.12543587493896485, 0.12440137481689453, 0.12402857971191407, 0.12395404815673829, 0.12431900787353516, 0.12437484741210937, 0.12452470397949218, 0.12384259033203125, 0.12454643249511718, 0.12435724639892579, 0.12371353912353515, 0.12408627319335938, 0.12331622314453125, 0.1245608673095703, 0.12408796691894532, 0.12454150390625, 0.12697222137451172, 0.12425625610351562, 0.1243556137084961, 0.12419785308837891, 0.123615234375, 0.12408140563964844, 0.12423155212402344, 0.12450316619873047, 0.12424166107177734, 0.12412306976318359, 0.12393199920654296, 0.12381053161621093, 0.12378521728515625, 0.12388931274414063, 0.1238421401977539, 0.12411942291259766, 0.12570662689208983, 0.1250160675048828, 0.13485670471191405, 0.12438511657714844, 0.12427251434326173, 0.1241337890625, 0.12742374420166017, 0.12491225433349609, 0.12443186950683593, 0.12371814727783204, 0.13392063903808593, 0.12449394989013672, 0.12532553863525392, 0.12408604431152344, 0.12425177764892578, 0.123768798828125, 0.12488668823242187, 0.12424063873291015, 0.12382617950439453, 0.12402243041992188, 0.1244653091430664, 0.12418463897705079, 0.12469036865234374, 0.12415926361083984, 0.12524022674560548, 0.12486835479736329, 0.12442662048339843, 0.12479475402832031, 0.12514825439453126, 0.1256181411743164, 0.12515618896484376, 0.1248210220336914, 0.12494905853271485, 0.1242880630493164, 0.12447840118408203, 0.12437503814697265, 0.12572672271728516, 0.1250774383544922, 0.12416620635986328, 0.12435785675048829, 0.12518627166748048, 0.12423225402832032, 0.12424940490722657, 0.12555129241943358, 0.12432556915283204, 0.1245514907836914, 0.1264906234741211, 0.12600067138671875, 0.12510646057128907, 0.12484627532958985, 0.12621209716796875, 0.12585164642333985, 0.12496492767333985, 0.12528428649902343, 0.12463062286376952, 0.12574176025390624, 0.12493520355224609, 0.12419961547851563, 0.12391014099121093, 0.12478851318359375, 0.12486268615722657, 0.12497090911865234, 0.12490966033935547, 0.1281433563232422, 0.1250140151977539, 0.12568540954589844, 0.12485791778564453, 0.12499404907226562, 0.12472704315185547, 0.12533404541015625, 0.12454297637939453, 0.12441600036621094, 0.12399616241455078, 0.12499683380126952, 0.12407004547119141, 0.12478463745117188, 0.12433881378173828, 0.12399411010742188, 0.12388761901855469, 0.12505619049072267, 0.12492473602294922, 0.1276272659301758, 0.1247677459716797, 0.12487264251708985, 0.12452102661132812, 0.124653564453125, 0.12456550598144531, 0.12436998748779297, 0.124607421875, 0.12831861877441406, 0.12502095794677734, 0.12651679992675782, 0.1248749771118164, 0.12445680236816406, 0.12406009674072266, 0.12446514892578125, 0.12510352325439453, 0.12443504333496094, 0.1243872299194336, 0.1243842544555664, 0.1247023696899414, 0.12443647766113282, 0.123695068359375, 0.12397980499267579, 0.12429337310791015, 0.1251499557495117, 0.12515430450439452, 0.12564179229736328, 0.12478870391845703, 0.12469862365722656, 0.12463407897949219, 0.1246712646484375, 0.12458882904052734, 0.1249873275756836, 0.12425984191894532, 0.12619622039794923, 0.12418252563476563, 0.12482355499267578, 0.12438323211669922, 0.12409980773925781, 0.12388227081298828, 0.12454297637939453, 0.12464332580566406, 0.124087646484375, 0.12342134094238282, 0.1312665557861328, 0.12515122985839844, 0.12530687713623048, 0.12389170837402344, 0.12476620483398437, 0.1253232650756836, 0.12557437133789062, 0.12536911773681642, 0.12494012451171875, 0.1248953628540039, 0.12456524658203125, 0.1238612823486328, 0.12397763061523437, 0.12424185943603516, 0.12483190155029297, 0.1245157470703125, 0.12462271881103516, 0.12867027282714844, 0.12572483062744141, 0.12549324798583986, 0.12559503936767577, 0.1252044448852539, 0.12445760345458984, 0.12461183929443359, 0.12467890930175782, 0.12449382019042969, 0.12357360076904297, 0.12346230316162109, 0.1239486083984375, 0.12372831726074218, 0.12429910278320312, 0.12394921875, 0.12408729553222657, 0.12464633941650391, 0.12482975769042969, 0.1242234878540039, 0.12923423767089845, 0.12500582122802734, 0.12469664001464843, 0.1242131805419922, 0.12394086456298828, 0.12331008148193359, 0.12633888244628907, 0.12411737823486328, 0.12385424041748047, 0.12549571228027342, 0.12407001495361328, 0.1236806411743164, 0.12463887786865234, 0.12490991973876953, 0.12420230102539062, 0.12370710754394532, 0.12359164428710938, 0.12395471954345703, 0.12371571350097656, 0.12490697479248047, 0.12417036437988281, 0.12396230316162109, 0.12416326141357421, 0.12361737823486328, 0.12487529754638672, 0.12515296173095702, 0.12552198028564454, 0.12443878173828125, 0.12431900787353516, 0.12542230224609374, 0.12530073547363282, 0.12504268646240235, 0.12441712188720704, 0.12575856018066406, 0.1241884765625, 0.12443798065185546, 0.1237591323852539, 0.12463104248046875, 0.12359833526611329, 0.12418889617919922, 0.12339807891845703, 0.12345353698730469, 0.12418278503417969, 0.12379545593261719, 0.1279078369140625, 0.1252162551879883, 0.12386547088623047, 0.12414988708496094, 0.12384255981445312, 0.12457984161376953, 0.12518195343017577, 0.125242431640625, 0.12471501159667969, 0.13083949279785156, 0.12438527679443359, 0.12377088165283204, 0.12425730895996094, 0.12737635040283204, 0.12526697540283202, 0.12439036560058593, 0.12863449096679688, 0.12578854370117187, 0.12470272064208984, 0.1244241943359375, 0.12406342315673828, 0.12529264068603516, 0.12463116455078126, 0.12464070129394532, 0.12397583770751953, 0.12402092742919922, 0.12389612579345703, 0.12394108581542969, 0.12435842895507812, 0.12445491027832031, 0.12426239776611328, 0.1251368942260742, 0.12553158569335937, 0.12536678314208985, 0.13443638610839845, 0.12442998504638672, 0.12413836669921875, 0.12527001953125, 0.12656025695800782, 0.12489933013916016, 0.12375421142578125, 0.12669776153564452, 0.1254374084472656, 0.12386787414550782, 0.12485718536376954, 0.12497602844238281, 0.12431993865966796, 0.12435545349121094, 0.12572752380371094, 0.12392675018310546, 0.12382527923583984, 0.12417932891845704, 0.12403689575195312, 0.12450838470458984, 0.12481241607666016, 0.1247300796508789, 0.1252947540283203, 0.12420272064208984, 0.12455474853515625, 0.12493065643310547, 0.12466518402099609, 0.12437181091308594, 0.1247927017211914, 0.12413350677490234, 0.1266851806640625, 0.12499967956542969, 0.1245880355834961, 0.12580812835693359, 0.12622694396972656, 0.12441766357421875, 0.12550182342529298, 0.12427263641357422, 0.12563795471191405, 0.12449046325683594, 0.1237176284790039, 0.12393039703369141, 0.12444416046142578, 0.12419881439208984, 0.12432803344726563, 0.12792902374267578, 0.12724018859863281, 0.12467190551757812, 0.12554045104980469, 0.12527206420898437, 0.12443817901611329, 0.12503695678710938, 0.12475794982910156, 0.12592332458496094, 0.12499353790283203, 0.1241374740600586, 0.12422080230712891, 0.12452492523193359, 0.12443673706054688, 0.12472665405273438, 0.12706880187988281, 0.12662374114990235, 0.12473958587646485, 0.12438665771484375, 0.12414224243164063, 0.12416204833984375, 0.12433353424072266, 0.12423977661132812, 0.12398457336425782, 0.12459190368652344, 0.12421113586425782, 0.12426838684082031, 0.1243078384399414, 0.1252147216796875, 0.12390809631347656, 0.12417987060546876, 0.12455587005615235, 0.1252430419921875, 0.12434671783447265, 0.12670105743408203, 0.1253606414794922, 0.12425830078125, 0.12401609802246094, 0.12511293029785156, 0.12420816040039062, 0.12490624237060546, 0.12534595489501954, 0.1265208282470703, 0.12407366180419922, 0.12400109100341797, 0.12451171112060547, 0.12464342498779298, 0.12458029174804687, 0.1256468505859375, 0.12447267150878906, 0.12461122894287109, 0.12433385467529297, 0.12364412689208984, 0.12376019287109374, 0.12463763427734376, 0.12384432220458984, 0.12404560089111329, 0.12373804473876954, 0.12436863708496093, 0.12476242828369141, 0.12541951751708985, 0.1243135986328125, 0.12468838500976563, 0.12428492736816406, 0.12479878234863281, 0.12393695831298829, 0.12402278137207032]",tokens/s,8.013446543300823,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,15411.892224,9514.647552,0.0,9112.12544,9086.72256,s,1,35.11408984375,35.11408984375,0.0,35.11408984375,35.11408984375,35.11408984375,35.11408984375,[35.11408984375],,kWh,0.0007721185902499845,8.516338912401175e-05,0.0002498838110180021,0.0011071657903919984,,MB,4078.239744,9688.711168,0.0,9265.217536,9236.043264,s,10,1.299132049560547,0.12991320495605468,0.00082887488257735,0.12989806365966797,0.13085565795898438,0.13126568145751952,0.13159370025634765,"[0.12925631713867186, 0.13167570495605468, 0.1303369598388672, 0.1290718994140625, 0.13076454162597656, 0.13017942810058594, 0.13008493041992186, 0.12906556701660157, 0.12898550415039062, 0.12971119689941407]",tokens/s,1970.5464127884175,kWh,3.791354145833435e-06,4.17987436521566e-07,2.505817300467612e-06,6.715158882822612e-06,tokens/kWh,38122701.855178505,MB,4082.352128,9707.585536,0.0,9281.994752,9177.325568,s,10,81.53667626953124,8.153667626953125,0.020023741285182937,8.157866455078125,8.180196728515625,8.182497045898437,8.184337299804687,"[8.1587001953125, 8.179685546875, 8.15703271484375, 8.160697265625, 8.1268857421875, 8.18479736328125, 8.16862939453125, 8.1358505859375, 8.12523291015625, 8.13916455078125]",tokens/s,7.726584266415816,kWh,0.00023689008548333332,2.6129250515917818e-05,0.0001069053535617327,0.0003699246895609838,tokens/kWh,170304.93443075297,,s,630,81.53393526458734,0.12941894486442443,0.0009580716703053776,0.12923592376708987,0.13032338104248048,0.1312029899597168,0.1332917366027832,"[0.12888406372070313, 0.12890377807617187, 0.1303033905029297, 0.12878335571289062, 0.1281596221923828, 0.1288272247314453, 0.12867942810058594, 0.12893058776855468, 0.1289871368408203, 0.128505859375, 0.12942530822753906, 0.12870460510253906, 0.12893696594238283, 0.12888986206054687, 0.12857772827148437, 0.12917721557617187, 0.128972900390625, 0.13066044616699218, 0.13003605651855468, 0.12894578552246094, 0.1299661102294922, 0.1294581756591797, 0.13380198669433593, 0.1295791015625, 0.12960552978515624, 0.12904039001464843, 0.12945318603515624, 0.12947135925292969, 0.129783203125, 0.12928880310058594, 0.12960357666015626, 0.12980633544921874, 0.1295585021972656, 0.13058822631835937, 0.13030857849121094, 0.1298534393310547, 0.12976649475097657, 0.12990547180175782, 0.13045974731445312, 0.12996607971191407, 0.13031015014648437, 0.12943565368652343, 0.12966502380371095, 0.13152857971191406, 0.12916748046875, 0.12920626831054688, 0.1295311737060547, 0.12880551147460936, 0.1291961212158203, 0.13029093933105468, 0.12885069274902344, 0.1292103729248047, 0.12914183044433594, 0.12925843811035156, 0.13019247436523437, 0.12957603454589844, 0.1291589813232422, 0.12927590942382813, 0.12871827697753907, 0.12910649108886718, 0.12949293518066407, 0.12946847534179687, 0.12926780700683593, 0.12996383666992187, 0.12972189331054687, 0.12967575073242188, 0.12886636352539063, 0.1302357177734375, 0.12902879333496095, 0.1299578857421875, 0.12936192321777343, 0.12922061157226564, 0.12941212463378907, 0.12913717651367188, 0.1291751708984375, 0.12917837524414064, 0.12936402893066407, 0.13174954223632812, 0.13179324340820311, 0.13002957153320313, 0.12927999877929688, 0.1298157501220703, 0.13016761779785158, 0.13001930236816406, 0.1298116455078125, 0.130103515625, 0.12945455932617186, 0.1302550048828125, 0.12933529663085938, 0.12924505615234375, 0.1292842254638672, 0.12917266845703124, 0.12947100830078126, 0.13125059509277343, 0.13103091430664063, 0.12915696716308595, 0.12972047424316407, 0.12891258239746095, 0.1322217559814453, 0.12986778259277343, 0.13002455139160157, 0.1296446990966797, 0.12934156799316407, 0.12929437255859375, 0.12989500427246095, 0.12926771545410157, 0.1293741760253906, 0.1294418182373047, 0.12937330627441407, 0.1292991027832031, 0.13034518432617187, 0.12996595764160157, 0.12958070373535155, 0.13036166381835937, 0.1299986572265625, 0.12988259887695314, 0.1306315460205078, 0.13009100341796875, 0.12922998046875, 0.13303439331054687, 0.12974330139160156, 0.12950119018554687, 0.12961920166015625, 0.12948147583007813, 0.1297533721923828, 0.12974604797363282, 0.12964630126953125, 0.129756103515625, 0.12885711669921876, 0.12898812866210937, 0.1304310760498047, 0.12990249633789064, 0.12972061157226564, 0.12882899475097656, 0.12941941833496093, 0.1298096923828125, 0.12991970825195312, 0.13016473388671876, 0.12890931701660155, 0.13029493713378906, 0.12930560302734376, 0.12895587158203126, 0.1290960693359375, 0.12968333435058593, 0.1287225341796875, 0.1290997772216797, 0.12929849243164063, 0.12891587829589843, 0.12897491455078125, 0.12893746948242188, 0.12945254516601562, 0.12841574096679687, 0.12863693237304688, 0.12838613891601564, 0.1310235137939453, 0.12990223693847655, 0.12868377685546875, 0.12967948913574218, 0.13023715209960937, 0.13097573852539063, 0.12926681518554686, 0.12948159790039063, 0.12922674560546876, 0.13014944458007813, 0.12889117431640626, 0.12882191467285156, 0.12883763122558595, 0.12894412231445312, 0.129544189453125, 0.13027122497558594, 0.1322782745361328, 0.1294192657470703, 0.12959266662597657, 0.12948895263671875, 0.1287563171386719, 0.1329213409423828, 0.1299087371826172, 0.12911541748046876, 0.12922979736328125, 0.12905856323242187, 0.12920013427734375, 0.12939468383789063, 0.12874470520019532, 0.12920895385742187, 0.12888441467285155, 0.12900601196289063, 0.12985279846191405, 0.12889497375488282, 0.12935055541992188, 0.1291117401123047, 0.12875808715820314, 0.13284739685058594, 0.12861648559570313, 0.12899880981445314, 0.13288278198242187, 0.13022026062011718, 0.1293844451904297, 0.12981494140625, 0.1288702087402344, 0.1286611785888672, 0.12923846435546876, 0.12861231994628905, 0.12889181518554688, 0.12916281127929688, 0.12875721740722657, 0.12915936279296875, 0.12925357055664063, 0.12846549987792968, 0.12923216247558594, 0.12897084045410157, 0.1291331787109375, 0.12936805725097655, 0.12911514282226563, 0.12923338317871094, 0.129638427734375, 0.12843673706054687, 0.12928518676757814, 0.12895327758789063, 0.12889907836914063, 0.12903593444824218, 0.12879046630859375, 0.12936181640625, 0.12852479553222657, 0.12833381652832032, 0.1285281524658203, 0.12865869140625, 0.12867587280273438, 0.1285877685546875, 0.13024111938476562, 0.12876118469238282, 0.13186512756347657, 0.13013221740722655, 0.12922003173828125, 0.12963714599609374, 0.12942437744140625, 0.1298319396972656, 0.12976332092285156, 0.13014630126953125, 0.12951756286621094, 0.13154713439941407, 0.1297838134765625, 0.1294766082763672, 0.12960357666015626, 0.1292779541015625, 0.12935110473632813, 0.12872966003417968, 0.1331825866699219, 0.1296285400390625, 0.12981094360351564, 0.12987596130371093, 0.12928367614746095, 0.1338681640625, 0.12875091552734375, 0.12877690124511718, 0.1284648895263672, 0.12861839294433594, 0.12890867614746093, 0.12909642028808593, 0.12935110473632813, 0.12946080017089845, 0.1299264373779297, 0.12998310852050782, 0.12998358154296874, 0.12954640197753906, 0.12977171325683592, 0.1286941375732422, 0.1300279083251953, 0.12886802673339845, 0.1286273651123047, 0.129119873046875, 0.1299767303466797, 0.12953004455566405, 0.1284770202636719, 0.12887799072265624, 0.12950997924804689, 0.12866297912597657, 0.12863459777832031, 0.12853488159179688, 0.12897488403320312, 0.12875733947753906, 0.12832853698730468, 0.12831695556640624, 0.12848141479492187, 0.12758191680908204, 0.127814208984375, 0.12802467346191407, 0.12877619934082032, 0.12887040710449219, 0.1286307830810547, 0.1294416961669922, 0.12934153747558594, 0.1290296630859375, 0.13008534240722655, 0.12911820983886718, 0.12853453063964843, 0.12846284484863282, 0.12959738159179687, 0.1288192596435547, 0.12941107177734376, 0.12862258911132812, 0.12799795532226563, 0.12844236755371094, 0.12830105590820312, 0.12860415649414061, 0.12853248596191405, 0.1307298889160156, 0.13045152282714845, 0.13073330688476562, 0.12837887573242188, 0.1289346923828125, 0.12902809143066407, 0.12866355895996093, 0.12879852294921876, 0.12916342163085937, 0.12865968322753907, 0.13092803955078125, 0.1302841339111328, 0.12972349548339843, 0.12927888488769532, 0.13048147583007813, 0.1298009033203125, 0.13152207946777345, 0.13238525390625, 0.13005413818359374, 0.13036936950683595, 0.13015238952636718, 0.12914688110351563, 0.1336539764404297, 0.12969232177734374, 0.12908554077148438, 0.1297100830078125, 0.12946432495117188, 0.12981219482421874, 0.1291776580810547, 0.12914285278320312, 0.12936323547363282, 0.1294652099609375, 0.13043507385253905, 0.13006137084960936, 0.12955743408203124, 0.1291177215576172, 0.12994134521484374, 0.12925567626953124, 0.13012351989746093, 0.13005657958984376, 0.1296468505859375, 0.12975050354003906, 0.13276339721679686, 0.12915586853027344, 0.12866764831542968, 0.1291304931640625, 0.12850994873046875, 0.12875088500976561, 0.12897354125976562, 0.12895027160644532, 0.12969778442382812, 0.12956636047363282, 0.129427490234375, 0.1296387176513672, 0.12970095825195313, 0.12941200256347657, 0.1292529296875, 0.12928044128417968, 0.1297939910888672, 0.13032249450683595, 0.12998377990722657, 0.12972515869140624, 0.12952528381347655, 0.13006690979003907, 0.1305927734375, 0.13077615356445313, 0.13084544372558593, 0.1307503662109375, 0.13006211853027344, 0.13077757263183593, 0.13014016723632812, 0.13002546691894531, 0.12962570190429687, 0.12997398376464844, 0.1291840362548828, 0.1294581756591797, 0.12960563659667967, 0.1326868438720703, 0.1298032684326172, 0.12971212768554688, 0.12946022033691407, 0.12905882263183593, 0.12967526245117186, 0.1304122314453125, 0.1295506591796875, 0.13383021545410156, 0.1296588134765625, 0.12947686767578126, 0.1292147216796875, 0.12914700317382813, 0.12938841247558594, 0.12943359375, 0.13233766174316405, 0.13036326599121092, 0.12957708740234375, 0.13016073608398437, 0.1288068084716797, 0.12861967468261717, 0.13350384521484376, 0.129611328125, 0.129229248046875, 0.12898329162597658, 0.12951321411132813, 0.12953395080566407, 0.12954214477539064, 0.12865650939941406, 0.12859481811523438, 0.12897689819335936, 0.12953599548339845, 0.12896051025390626, 0.1293937225341797, 0.130546630859375, 0.13015040588378907, 0.12891545104980467, 0.13333631896972656, 0.12929714965820313, 0.12871055603027343, 0.12908758544921875, 0.13280870056152344, 0.12921189880371095, 0.12878285217285157, 0.12900537109375, 0.12968365478515625, 0.12884335327148438, 0.12917535400390626, 0.1287174072265625, 0.1290742645263672, 0.12917625427246093, 0.12835658264160157, 0.1290127716064453, 0.12904547119140625, 0.12863693237304688, 0.12880021667480468, 0.12877020263671876, 0.12946675109863282, 0.12912364196777343, 0.1296627197265625, 0.1288297882080078, 0.12999580383300782, 0.12905101013183592, 0.1288197479248047, 0.12943775939941407, 0.13114480590820313, 0.12914781188964844, 0.12958457946777344, 0.12848796081542968, 0.1289400634765625, 0.12848477172851563, 0.12834243774414061, 0.1289156494140625, 0.12923196411132812, 0.12827127075195313, 0.12916943359375, 0.13173143005371094, 0.12949095153808593, 0.12870246887207032, 0.12864285278320312, 0.1301588134765625, 0.12867088317871095, 0.12887274169921875, 0.1284791717529297, 0.12865571594238281, 0.12826448059082032, 0.12790169525146483, 0.12868199157714844, 0.12867379760742187, 0.12922796630859376, 0.1288671417236328, 0.13065420532226563, 0.13014834594726563, 0.1289523162841797, 0.130050048828125, 0.12853042602539064, 0.12875149536132813, 0.12834169006347657, 0.1336365509033203, 0.1285201873779297, 0.12860415649414061, 0.12788735961914063, 0.12807577514648438, 0.12804441833496094, 0.12866934204101563, 0.12842658996582032, 0.12816423034667968, 0.1284517822265625, 0.1295137939453125, 0.1288089599609375, 0.12872489929199218, 0.129104736328125, 0.1280796203613281, 0.12936338806152345, 0.1289815673828125, 0.12872726440429688, 0.12931980895996092, 0.1331180114746094, 0.1292480926513672, 0.12936338806152345, 0.12933177185058595, 0.1294615936279297, 0.12870416259765624, 0.12830157470703124, 0.1284752960205078, 0.1284947509765625, 0.12905375671386718, 0.12916610717773438, 0.12967832946777344, 0.1286614990234375, 0.12850994873046875, 0.12827186584472655, 0.12824627685546874, 0.1289758758544922, 0.12944892883300782, 0.128884765625, 0.1289824981689453, 0.1281295623779297, 0.12892115783691407, 0.12835256958007812, 0.12862229919433593, 0.1288011474609375, 0.1313028106689453, 0.12862850952148439, 0.12896940612792968, 0.12846441650390625, 0.12876249694824218, 0.12856935119628907, 0.12829490661621093, 0.12861209106445312, 0.13085516357421875, 0.12899693298339843, 0.13041719055175782, 0.12953794860839843, 0.12879161071777342, 0.12892051696777343, 0.1290260467529297, 0.12950119018554687, 0.1292736358642578, 0.12978346252441406, 0.12899952697753905, 0.12959925842285155, 0.12810096740722657, 0.1281393585205078, 0.12829270935058593, 0.1285858917236328, 0.12875929260253907, 0.12842445373535155, 0.13177987670898436, 0.13015756225585937, 0.12885282897949218, 0.12856410217285155, 0.12827395629882812, 0.12876783752441406, 0.12937484741210936, 0.13011148071289064, 0.12909158325195313, 0.1285609893798828, 0.12844253540039063, 0.12833792114257814, 0.1285220489501953, 0.1287313232421875, 0.12873507690429686, 0.12846710205078124, 0.12987965393066406, 0.12926361083984375, 0.12898054504394532, 0.1289282531738281, 0.1288007049560547, 0.1300675506591797, 0.12906179809570312, 0.12933631896972655, 0.12987103271484374, 0.13179991149902343, 0.1293956756591797, 0.12980653381347657, 0.1290832061767578, 0.1290520935058594, 0.1287433319091797, 0.12987596130371093, 0.12865327453613282, 0.12824057006835937, 0.12775603485107423, 0.12967018127441407, 0.12926870727539064, 0.12843148803710938, 0.128483642578125, 0.1286188201904297, 0.12843113708496093, 0.12825289916992189, 0.128736572265625, 0.12914352416992186, 0.12903421020507813, 0.12910496520996093, 0.12959187316894533, 0.12910231018066406, 0.1287795867919922, 0.12903689575195312, 0.13023846435546876, 0.12952787780761718, 0.1296096649169922, 0.12891751098632812, 0.12866511535644531, 0.12901628112792968, 0.12857151794433594, 0.12983692932128907, 0.12802458190917967, 0.12881011962890626, 0.12889176940917968, 0.13186457824707032, 0.1287863311767578, 0.12867388916015626, 0.1290997772216797, 0.12868998718261718, 0.12905416870117187, 0.13033135986328126, 0.12897689819335936, 0.12939260864257812, 0.1298043212890625, 0.12918553161621094, 0.12824794006347656, 0.1289239959716797, 0.12894390869140626, 0.12935577392578124, 0.13005413818359374, 0.13060505676269532, 0.12906700134277344, 0.12931919860839844]",tokens/s,7.7268440184516365,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,15437.4144,9514.647552,0.0,9112.12544,9086.72256,s,1,34.84575390625,34.84575390625,0.0,34.84575390625,34.84575390625,34.84575390625,34.84575390625,[34.84575390625],,kWh,0.0007661105763791709,8.450050286630656e-05,0.0002490490881279974,0.001099660167373475,,MB,4097.265664,9688.711168,0.0,9265.217536,9236.043264,s,10,1.3085373077392577,0.13085373077392579,0.0005722204608170864,0.1308676986694336,0.1314777099609375,0.13171550903320312,0.13190574829101562,"[0.13097459411621093, 0.1308837432861328, 0.13085165405273438, 0.13066514587402345, 0.13142486572265624, 0.13112037658691406, 0.13026176452636717, 0.129745849609375, 0.13195330810546874, 0.130656005859375]",tokens/s,1956.382890162205,kWh,3.838915568181795e-06,4.2325638622786373e-07,2.5308461805195046e-06,6.793018134929163e-06,tokens/kWh,37685752.47630625,MB,4097.265664,9707.585536,0.0,9281.994752,9177.325568,s,10,82.3491591796875,8.23491591796875,0.015494935429301,8.234020507812499,8.25600087890625,8.258122998046876,8.259820693359375,"[8.2279638671875, 8.255529296875, 8.2443515625, 8.2320634765625, 8.2116123046875, 8.223681640625, 8.2139443359375, 8.2359775390625, 8.2602451171875, 8.2437900390625]",tokens/s,7.650351336621755,kWh,0.00023951180220223482,2.641945243429335e-05,0.00010854332601208051,0.0003744745806486087,tokens/kWh,168235.7181384137,,s,630,82.34661648559566,0.13070891505650112,0.0011172083733076638,0.13047962951660158,0.13166869354248048,0.1324348815917969,0.1342525340270996,"[0.13100790405273438, 0.13064358520507813, 0.1306422119140625, 0.13075840759277343, 0.13082310485839843, 0.13112428283691407, 0.13014521789550781, 0.130993408203125, 0.13055462646484375, 0.1300930633544922, 0.13069430541992189, 0.13033497619628906, 0.13080335998535156, 0.13073651123046875, 0.13050630187988282, 0.13025286865234376, 0.13002029418945313, 0.13149343872070313, 0.13042445373535155, 0.1305424041748047, 0.13077023315429687, 0.1308800048828125, 0.1308214111328125, 0.13025680541992188, 0.12993951416015626, 0.13428933715820313, 0.13033369445800783, 0.1313095703125, 0.1316860809326172, 0.1304149169921875, 0.13066831970214843, 0.13030621337890624, 0.13241561889648437, 0.12987794494628907, 0.12992716979980468, 0.13047193908691407, 0.13095936584472656, 0.13345791625976564, 0.13080380249023438, 0.13006838989257813, 0.12986572265625, 0.12970716857910156, 0.13242658996582032, 0.13156112670898437, 0.12993367004394532, 0.12986572265625, 0.13011065673828126, 0.12987680053710937, 0.12944898986816405, 0.12981961059570313, 0.1297305603027344, 0.13146112060546875, 0.12997392272949218, 0.1303504638671875, 0.12999974060058594, 0.13023785400390625, 0.13025267028808593, 0.1304625244140625, 0.1299693145751953, 0.13012371826171876, 0.12988099670410155, 0.13014425659179688, 0.13025074768066405, 0.13016940307617186, 0.13001266479492188, 0.13034918212890625, 0.13067916870117188, 0.13121229553222657, 0.130731201171875, 0.13107781982421876, 0.1318667449951172, 0.1301927947998047, 0.1304950714111328, 0.13133999633789062, 0.1313152313232422, 0.13100927734375, 0.13047932434082032, 0.13065501403808594, 0.13020176696777344, 0.13038946533203125, 0.13091500854492188, 0.1316246337890625, 0.13160150146484376, 0.1307882537841797, 0.13048626708984376, 0.13066854858398438, 0.1307455291748047, 0.13064381408691406, 0.13098287963867186, 0.13157785034179686, 0.13120716857910156, 0.13034495544433594, 0.13049417114257814, 0.13118223571777343, 0.13027186584472655, 0.13018089294433594, 0.13166348266601563, 0.13099583435058593, 0.1313817596435547, 0.13054371643066406, 0.13079530334472655, 0.1314779815673828, 0.1303175048828125, 0.13049932861328126, 0.13088371276855468, 0.13088365173339844, 0.13052642822265625, 0.13049116516113282, 0.14290687561035156, 0.1309434814453125, 0.13042796325683595, 0.13068588256835936, 0.13202572631835938, 0.1324031982421875, 0.1315416259765625, 0.13057228088378905, 0.13025619506835937, 0.13312889099121095, 0.13079452514648438, 0.13067263793945313, 0.13124432373046874, 0.1304931182861328, 0.13002546691894531, 0.1299884490966797, 0.13035536193847655, 0.13045053100585938, 0.13091981506347655, 0.13124252319335938, 0.12960981750488282, 0.12942335510253905, 0.13005722045898438, 0.13049481201171875, 0.13032106018066406, 0.1318289337158203, 0.1316339874267578, 0.13159622192382814, 0.1314091796875, 0.13006700134277344, 0.13008099365234374, 0.1305128936767578, 0.13085638427734375, 0.13073670959472655, 0.13273631286621093, 0.1301613464355469, 0.13026713562011719, 0.13170626831054688, 0.13093539428710937, 0.13060096740722657, 0.1305866241455078, 0.13160163879394532, 0.13130735778808594, 0.13078378295898438, 0.13030851745605468, 0.1304799346923828, 0.1300145263671875, 0.13066329956054687, 0.1321246795654297, 0.13046914672851562, 0.13070521545410158, 0.1302611541748047, 0.13001190185546874, 0.13168435668945314, 0.13019544982910156, 0.1303133087158203, 0.1308446044921875, 0.13083541870117188, 0.13009861755371094, 0.13039628601074219, 0.12988844299316407, 0.12967327880859375, 0.12939231872558593, 0.13030245971679688, 0.13036134338378907, 0.13145497131347655, 0.1300614013671875, 0.13008192443847658, 0.13262188720703125, 0.13000111389160157, 0.13037187194824218, 0.132746337890625, 0.1307142333984375, 0.13056614685058593, 0.12996975708007813, 0.1296859130859375, 0.14013031005859375, 0.1303973388671875, 0.13040463256835938, 0.1323607635498047, 0.13201402282714844, 0.1302364501953125, 0.1302855682373047, 0.1297100830078125, 0.1350287322998047, 0.1323802490234375, 0.13041017150878906, 0.13094505310058593, 0.12980499267578124, 0.12962818908691406, 0.1295067901611328, 0.1296474609375, 0.1294803466796875, 0.1306600341796875, 0.1314163818359375, 0.13054473876953124, 0.12954226684570314, 0.1300878448486328, 0.13001100158691406, 0.12941859436035155, 0.12940960693359374, 0.13038397216796874, 0.1312640380859375, 0.130261474609375, 0.13034214782714842, 0.12951817321777342, 0.1293521270751953, 0.12915274047851563, 0.12955033874511718, 0.13183795166015624, 0.1334087677001953, 0.1300070343017578, 0.13155043029785157, 0.13101341247558593, 0.1341624298095703, 0.1301331481933594, 0.12972732543945312, 0.13143186950683594, 0.13116473388671876, 0.13129522705078125, 0.12987184143066408, 0.1298411865234375, 0.1300746307373047, 0.13055999755859374, 0.1312788543701172, 0.13113548278808593, 0.13092658996582032, 0.1302029113769531, 0.13019004821777344, 0.1303709716796875, 0.12998512268066406, 0.1304022979736328, 0.1316741180419922, 0.13364364624023437, 0.13088832092285158, 0.13142425537109376, 0.13047744750976562, 0.12943218994140626, 0.1296847381591797, 0.13072808837890626, 0.1318651885986328, 0.13136058044433593, 0.1304265594482422, 0.13165193176269532, 0.1301422119140625, 0.13071565246582031, 0.13125357055664064, 0.13014697265625, 0.13074607849121095, 0.12963226318359375, 0.13268934631347656, 0.1298294677734375, 0.1295214385986328, 0.1319224395751953, 0.13216563415527344, 0.13160601806640626, 0.13058489990234376, 0.12982240295410155, 0.12952761840820312, 0.13222572326660156, 0.13047193908691407, 0.1304144287109375, 0.12979557800292968, 0.1301995849609375, 0.12991346740722656, 0.12933891296386718, 0.12945864868164061, 0.129652099609375, 0.12928472900390625, 0.13139149475097656, 0.13018931579589843, 0.13037942504882813, 0.13029017639160156, 0.13043206787109374, 0.12965309143066406, 0.12925177001953125, 0.12932710266113281, 0.12972377014160155, 0.129376708984375, 0.13013215637207032, 0.13011875915527343, 0.12936015319824218, 0.1292023620605469, 0.12924124145507812, 0.13107391357421874, 0.1308240966796875, 0.1301705322265625, 0.13035104370117187, 0.13258805847167968, 0.13020402526855468, 0.13026223754882812, 0.12960569763183594, 0.1311374053955078, 0.13140377807617187, 0.13154185485839845, 0.13057154846191407, 0.1304334716796875, 0.1297364196777344, 0.1299134063720703, 0.13066648864746094, 0.12980429077148437, 0.13003570556640626, 0.130197509765625, 0.13021133422851563, 0.13142658996582032, 0.130214111328125, 0.12985958862304686, 0.13031015014648437, 0.13023846435546876, 0.1304078369140625, 0.13039266967773439, 0.13064601135253906, 0.1294192657470703, 0.1294471435546875, 0.1306099853515625, 0.130804931640625, 0.1305912628173828, 0.13036569213867188, 0.13160755920410155, 0.13068304443359374, 0.1299648895263672, 0.13018931579589843, 0.13038975524902344, 0.12984962463378907, 0.13021385192871093, 0.13016595458984376, 0.13312403869628905, 0.13370048522949218, 0.13034495544433594, 0.1308221435546875, 0.13134230041503905, 0.1307906494140625, 0.13263133239746094, 0.13061251831054688, 0.13028038024902344, 0.13244166564941406, 0.130719970703125, 0.13090605163574218, 0.12996818542480468, 0.13012786865234374, 0.13047193908691407, 0.12969778442382812, 0.1302298889160156, 0.1302142791748047, 0.13171098327636718, 0.1297869110107422, 0.13020054626464844, 0.13021388244628906, 0.1304289245605469, 0.13014019775390626, 0.13017698669433594, 0.12992684936523438, 0.13042515563964843, 0.13004106140136718, 0.13031094360351564, 0.13050604248046874, 0.1300702667236328, 0.12960572814941407, 0.130072509765625, 0.1301527099609375, 0.13045571899414063, 0.13194700622558594, 0.12970947265625, 0.13085757446289062, 0.1313116149902344, 0.1293701171875, 0.13002156066894532, 0.12961293029785156, 0.13087100219726563, 0.1307670135498047, 0.1312049560546875, 0.13012783813476564, 0.13024867248535157, 0.13125311279296875, 0.13107171630859374, 0.12992947387695314, 0.12976034545898438, 0.13020620727539062, 0.13045599365234375, 0.13027325439453125, 0.12996199035644532, 0.13002064514160155, 0.13124822998046876, 0.13096975708007813, 0.13038803100585938, 0.13060687255859374, 0.1303700408935547, 0.12972047424316407, 0.12984300231933593, 0.13059706115722655, 0.13017283630371093, 0.13073622131347656, 0.13038182067871093, 0.1300574035644531, 0.13256172180175782, 0.12943154907226562, 0.13000090026855468, 0.13051699829101562, 0.1302855682373047, 0.1299578857421875, 0.1298303680419922, 0.130449951171875, 0.13406822204589844, 0.129451904296875, 0.13097996520996094, 0.13136691284179688, 0.13113343811035155, 0.13036543273925782, 0.130334716796875, 0.12986309814453126, 0.12976176452636717, 0.1318278045654297, 0.13087440490722657, 0.13050364685058594, 0.12997222900390626, 0.12971749877929686, 0.1302678680419922, 0.1299331817626953, 0.12955255126953125, 0.12974441528320313, 0.13038230895996095, 0.13067878723144533, 0.13008895874023438, 0.12952371215820313, 0.13087907409667968, 0.12999516296386718, 0.12972265625, 0.12937802124023437, 0.13009100341796875, 0.13040559387207032, 0.12979624938964843, 0.12977568054199218, 0.13062200927734374, 0.1295149688720703, 0.12966709899902343, 0.13005363464355468, 0.13193923950195313, 0.1302159423828125, 0.13365863037109374, 0.13476454162597656, 0.13017018127441407, 0.13007087707519532, 0.1309371795654297, 0.13120838928222656, 0.1309226531982422, 0.13135859680175782, 0.1308065643310547, 0.13023426818847655, 0.13007061767578126, 0.1300070343017578, 0.13109616088867188, 0.1308041229248047, 0.13001455688476563, 0.12996675109863282, 0.1315669708251953, 0.13036813354492188, 0.13051043701171874, 0.13193257141113282, 0.12988185119628906, 0.12994943237304687, 0.13017254638671874, 0.13007452392578125, 0.1314436798095703, 0.130182373046875, 0.1301900177001953, 0.1309266815185547, 0.13070745849609375, 0.13033882141113282, 0.12994880676269532, 0.13379017639160157, 0.13107241821289062, 0.12987802124023437, 0.13025485229492187, 0.13060505676269532, 0.1299016571044922, 0.13027133178710937, 0.12979216003417968, 0.1300773468017578, 0.13343539428710938, 0.12973776245117188, 0.12960581970214843, 0.1305380859375, 0.13106930541992187, 0.130306884765625, 0.1302810516357422, 0.13093478393554686, 0.13241590881347656, 0.13135871887207032, 0.13097164916992188, 0.13006658935546875, 0.13004783630371095, 0.13041615295410156, 0.13068540954589844, 0.13113690185546875, 0.13089447021484374, 0.13050416564941406, 0.13141874694824218, 0.13038946533203125, 0.13072848510742188, 0.13134197998046876, 0.13089418029785158, 0.13025856018066406, 0.130474365234375, 0.13043096923828126, 0.13099203491210937, 0.1304617919921875, 0.13101779174804687, 0.13138978576660157, 0.1303721923828125, 0.13038540649414063, 0.13242214965820312, 0.13229200744628905, 0.12992515563964843, 0.13018888854980468, 0.13400982666015626, 0.13114703369140626, 0.13104348754882814, 0.13119340515136718, 0.13210543823242188, 0.1312255401611328, 0.13066940307617186, 0.13086105346679688, 0.131093505859375, 0.1316680908203125, 0.13088365173339844, 0.13083116149902344, 0.13054156494140626, 0.13050674438476562, 0.13009443664550782, 0.13075885009765625, 0.13158181762695312, 0.13082035827636718, 0.13050422668457032, 0.13159829711914062, 0.13161946105957031, 0.1311144714355469, 0.13095193481445314, 0.13124403381347657, 0.13085209655761718, 0.13024128723144532, 0.13067205810546875, 0.13076947021484375, 0.13140089416503906, 0.13101139831542968, 0.13071881103515626, 0.13279119873046874, 0.13113139343261718, 0.13068464660644533, 0.13098422241210939, 0.13101260375976562, 0.13059481811523438, 0.1301071014404297, 0.13024620056152345, 0.13457481384277345, 0.1337855987548828, 0.13059686279296875, 0.13177232360839844, 0.1317064971923828, 0.1308939208984375, 0.13029466247558594, 0.13296141052246094, 0.13298471069335938, 0.13061212158203125, 0.13157933044433595, 0.13060479736328126, 0.13064285278320312, 0.13013772583007813, 0.130127685546875, 0.13164553833007814, 0.13115560913085939, 0.13085321044921874, 0.13074652099609374, 0.13071728515625, 0.13033135986328126, 0.1309751739501953, 0.13080429077148437, 0.13063487243652344, 0.12995989990234375, 0.1297744903564453, 0.13007872009277344, 0.13004185485839845, 0.13000909423828125, 0.12987152099609375, 0.1305214385986328, 0.13055938720703125, 0.13023193359375, 0.13131671142578125, 0.13180706787109375, 0.13030006408691405, 0.13323609924316407, 0.13134422302246093, 0.13136271667480467, 0.13061209106445312, 0.1312063751220703, 0.13107644653320313, 0.13058688354492187, 0.13056838989257813, 0.13047602844238282, 0.13068067932128907, 0.13138755798339843, 0.1309696044921875, 0.13049037170410155, 0.13068492126464842, 0.1301749725341797, 0.13007872009277344, 0.1311272888183594, 0.13115171813964843, 0.13090147399902344, 0.13097235107421876, 0.13031015014648437, 0.13033013916015626, 0.1303659210205078, 0.13041868591308595, 0.13030184936523437, 0.1305293731689453, 0.13039552307128907, 0.1311463623046875, 0.13063536071777343, 0.12970025634765625, 0.12977766418457032, 0.1380331573486328, 0.13022618103027345]",tokens/s,7.650587563730701,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,878.137344,6477.971456,0.0,6075.449344,6044.13184,s,1,16.3768125,16.3768125,0.0,16.3768125,16.3768125,16.3768125,16.3768125,[16.3768125],,kWh,0.0002280931858041754,2.5153309891967757e-05,7.434755947800797e-05,0.0003275940551741511,,MB,1373.32736,7002.259456,0.0,6587.154432,6469.997056,s,10,10.50996923828125,1.050996923828125,0.007943041603667518,1.0509966430664062,1.0551911743164062,1.062996466064453,1.0692406994628907,"[1.0386219482421875, 1.04500244140625, 1.0450501708984374, 1.049676025390625, 1.0708017578125, 1.0515601806640624, 1.05043310546875, 1.052427734375, 1.052939208984375, 1.0534566650390624]",tokens/s,243.57825812425025,kWh,3.0571760802915075e-05,3.369707081784789e-06,2.0283571782399313e-05,5.422503966709917e-05,tokens/kWh,4721066.163743666,MB,1398.034432,7014.842368,0.0,6599.737344,6469.999616,s,10,50.304998046875006,5.0304998046875,0.010653448843516765,5.03640478515625,5.040350244140625,5.042222973632812,5.043721157226563,"[5.01510791015625, 5.0147421875, 5.0174560546875, 5.02524267578125, 5.03574462890625, 5.03706494140625, 5.03993408203125, 5.03842822265625, 5.037181640625, 5.044095703125]",tokens/s,12.523606489616718,kWh,0.00014734542814250252,1.6254074696315733e-05,9.769318926560189e-05,0.00026129269210442015,tokens/kWh,241108.92460330797,,s,630,50.30246337890622,0.07984517996651787,0.0016071631841528223,0.07971578979492189,0.08047777023315429,0.08089388656616212,0.09079604545593263,"[0.09136054229736328, 0.0797390365600586, 0.07890108489990234, 0.07852310180664063, 0.07816191864013672, 0.078210205078125, 0.0796107177734375, 0.07925254058837891, 0.08033548736572266, 0.08003616333007812, 0.07955865478515625, 0.07885395050048828, 0.07851561737060547, 0.07967619323730468, 0.07901593780517578, 0.07854064178466796, 0.07823171234130859, 0.07823353576660157, 0.07935801696777343, 0.07932486724853516, 0.07904902648925781, 0.07964262390136718, 0.07919805145263673, 0.08022598266601562, 0.08050326538085938, 0.08015574645996094, 0.08019241333007812, 0.08024432373046875, 0.07964246368408204, 0.07921311950683593, 0.0789865951538086, 0.07866019439697265, 0.07834835052490234, 0.07834828948974609, 0.07875520324707032, 0.07977638244628907, 0.0791695327758789, 0.07975526428222657, 0.07935951995849609, 0.07941986846923828, 0.07944131469726562, 0.0793892822265625, 0.07998464202880859, 0.07971839904785157, 0.0804324493408203, 0.0797886734008789, 0.08049056243896484, 0.080146240234375, 0.08047634887695312, 0.08039218902587891, 0.08002355194091797, 0.07977983856201172, 0.07911746978759765, 0.07877894592285156, 0.07850978851318359, 0.07945401763916016, 0.07965773010253906, 0.07929212951660156, 0.07938690948486328, 0.07984886169433594, 0.07931759643554688, 0.07989247894287109, 0.07943987274169922, 0.09172579193115235, 0.08001741027832031, 0.07924703979492187, 0.07875820922851562, 0.07823155212402344, 0.07821234893798829, 0.07820365142822265, 0.0797589111328125, 0.0797188491821289, 0.08006553649902344, 0.07949619293212891, 0.07911373138427734, 0.0791390380859375, 0.07925113677978515, 0.0792254409790039, 0.07924940490722657, 0.07922892761230468, 0.07893401336669922, 0.07839539337158204, 0.07863231658935547, 0.07949945831298828, 0.07891193389892578, 0.07934563446044922, 0.07947881317138672, 0.07981600189208984, 0.07983708953857421, 0.08072003173828125, 0.08024928283691406, 0.08021609497070313, 0.07968521881103516, 0.07925997161865235, 0.07912815856933594, 0.07867241668701172, 0.07852044677734375, 0.0791695327758789, 0.07955590057373046, 0.07898377227783203, 0.07950141143798828, 0.0795728988647461, 0.07939081573486328, 0.07964057922363281, 0.07942963409423828, 0.07940614318847657, 0.07947347259521484, 0.07929849243164062, 0.07986399841308593, 0.08005961608886719, 0.08010832214355469, 0.08033280181884765, 0.07981670379638672, 0.07996598052978515, 0.0794319076538086, 0.07884156799316407, 0.07853196716308594, 0.0799097900390625, 0.07937814331054688, 0.07884419250488281, 0.0799210205078125, 0.07932447814941407, 0.0797519989013672, 0.07976345825195312, 0.07932710266113281, 0.07996121978759765, 0.09208879852294923, 0.07983952331542969, 0.07904665374755859, 0.0786239013671875, 0.07809519958496093, 0.07808409881591796, 0.07948492431640625, 0.07930879974365235, 0.07962419128417969, 0.07969734191894531, 0.0790545883178711, 0.07888313293457032, 0.07956070709228516, 0.07918569946289063, 0.07914566040039063, 0.07915702056884766, 0.07908377838134766, 0.07910604858398437, 0.07845021057128906, 0.07903206634521484, 0.0795101776123047, 0.07891974639892578, 0.07976099395751954, 0.07943981170654296, 0.08015264129638672, 0.08020009613037109, 0.08040447998046875, 0.08050892639160157, 0.08035906982421875, 0.07995426940917968, 0.07961161804199218, 0.07908966064453125, 0.07860662078857422, 0.07832332611083985, 0.07912387084960938, 0.0796374740600586, 0.07917772674560547, 0.07980032348632812, 0.07930470275878906, 0.07940243530273437, 0.07935852813720704, 0.0799109115600586, 0.08009903717041016, 0.07966134643554687, 0.07902617645263672, 0.07989247894287109, 0.07957202911376954, 0.08003228759765625, 0.08006902313232422, 0.08001471710205078, 0.07963916778564453, 0.07949673461914063, 0.07910652923583984, 0.0792811508178711, 0.07984025573730469, 0.07953612518310547, 0.07911148834228515, 0.07967609405517578, 0.07970355224609375, 0.0795898208618164, 0.07956076812744141, 0.07959347534179688, 0.07959142303466797, 0.0905564193725586, 0.07987200164794922, 0.07913267517089843, 0.07863910675048828, 0.07819878387451172, 0.07845683288574219, 0.07825202941894531, 0.07825398254394532, 0.07812105560302735, 0.0797306900024414, 0.08004198455810548, 0.08031641387939453, 0.08276300811767578, 0.08142908477783203, 0.08021161651611328, 0.08019789123535156, 0.0801180191040039, 0.0797675552368164, 0.07917362976074219, 0.07901789093017578, 0.07851837158203125, 0.0782376937866211, 0.0782720947265625, 0.07867801666259766, 0.07966697692871094, 0.08003756713867187, 0.07994873809814453, 0.07966047668457031, 0.07925408172607422, 0.07932927703857422, 0.07938038635253906, 0.08013423919677734, 0.079710205078125, 0.07926988983154297, 0.08020582580566406, 0.08036761474609375, 0.08046380615234375, 0.08001132965087891, 0.07959552001953125, 0.07909564971923828, 0.07861673736572265, 0.079710205078125, 0.07916275024414063, 0.07991769409179687, 0.0794351043701172, 0.07934130859375, 0.07934009552001953, 0.07947289276123047, 0.07942153930664063, 0.07940013122558594, 0.07948371124267578, 0.07948480224609375, 0.0793785629272461, 0.07942144012451172, 0.07948812866210937, 0.08108121490478516, 0.07987158203125, 0.08002349090576172, 0.08029436492919922, 0.08047615814208985, 0.08041267395019532, 0.0800947494506836, 0.0795508804321289, 0.09036771392822265, 0.07970809936523438, 0.07924384307861328, 0.07999577331542969, 0.0800788803100586, 0.08003670501708984, 0.07966665649414062, 0.07902057647705078, 0.07852345275878907, 0.07819699096679687, 0.0781646728515625, 0.079351806640625, 0.08009478759765624, 0.0798089599609375, 0.07990003204345703, 0.07979891204833985, 0.07948902130126953, 0.07892991638183594, 0.08011366271972656, 0.07942931365966797, 0.07880531311035156, 0.07963561248779297, 0.08016694641113281, 0.08020665740966797, 0.08031148529052734, 0.08080467224121093, 0.08069449615478516, 0.07988684844970703, 0.07985151672363282, 0.07949545288085938, 0.07898521423339844, 0.0790487060546875, 0.07897702026367187, 0.07977321624755859, 0.07975897979736328, 0.0794583969116211, 0.07980425262451171, 0.08014326477050782, 0.0798023681640625, 0.0792117462158203, 0.07981449890136719, 0.0798662109375, 0.0791107177734375, 0.07894172668457031, 0.080052734375, 0.08008499145507812, 0.08119033813476563, 0.08008089447021484, 0.07997062683105469, 0.08090156555175781, 0.08088658905029297, 0.08042848205566407, 0.08088569641113282, 0.08046006774902344, 0.08012348937988281, 0.07984409332275391, 0.07915715026855469, 0.07926153564453126, 0.08045769500732422, 0.07924559783935547, 0.0798306884765625, 0.08023222351074219, 0.07989715576171875, 0.0913631362915039, 0.07980595397949218, 0.07906937408447266, 0.07856380462646484, 0.0796917724609375, 0.07968972778320313, 0.08003788757324219, 0.08006585693359375, 0.07952249908447266, 0.07888451385498046, 0.07845718383789063, 0.07924326324462891, 0.08009728240966797, 0.07980032348632812, 0.07989981079101563, 0.07963062286376953, 0.078946044921875, 0.07915100860595703, 0.0794202880859375, 0.07892758178710937, 0.07954621124267579, 0.07962057495117188, 0.07896015930175782, 0.07963053131103516, 0.08057475280761718, 0.08045362854003907, 0.0807767333984375, 0.08015510559082031, 0.080223388671875, 0.08028246307373046, 0.08027750396728515, 0.07996771240234375, 0.07950911712646484, 0.0796968002319336, 0.07910562896728515, 0.08006697845458985, 0.07977152252197266, 0.07994163513183594, 0.08023430633544922, 0.07985593414306641, 0.07965491485595703, 0.08010956573486328, 0.07949622344970703, 0.07915372467041015, 0.07979459381103515, 0.07989218902587891, 0.07920054626464844, 0.07920162963867187, 0.07985810852050781, 0.07986198425292969, 0.08030786895751953, 0.07994198608398438, 0.07981187438964844, 0.08022233581542969, 0.07966687774658203, 0.07918685150146484, 0.08043011474609375, 0.07998563385009766, 0.08003366088867188, 0.08022438049316406, 0.08038092803955078, 0.08096665954589843, 0.08052105712890625, 0.09227327728271484, 0.0799736328125, 0.07942425537109375, 0.0787435531616211, 0.07818009948730469, 0.07815731048583985, 0.07824361419677735, 0.07948531341552735, 0.07972297668457032, 0.08008838653564453, 0.08009574127197265, 0.08125196838378906, 0.08245033264160156, 0.0805893783569336, 0.0801115493774414, 0.08021635437011719, 0.08002969360351563, 0.07970816040039062, 0.07906508636474609, 0.078671875, 0.07818035125732421, 0.078169921875, 0.0790079345703125, 0.0801416015625, 0.08062608337402344, 0.08020185852050782, 0.08078937530517578, 0.08030854034423827, 0.07963648223876953, 0.07930879974365235, 0.07972249603271485, 0.07902207946777344, 0.07843840026855468, 0.07833712005615234, 0.07980242919921875, 0.08088662719726562, 0.08075212860107422, 0.08096409606933594, 0.0806046371459961, 0.08086752319335938, 0.07983926391601562, 0.08028316497802734, 0.08082640075683593, 0.08040252685546875, 0.08080012512207031, 0.08032387542724609, 0.07994435119628907, 0.07935747528076172, 0.07881600189208984, 0.07924297332763672, 0.08012425231933594, 0.08022220611572266, 0.07971331024169923, 0.08011465454101563, 0.08029293060302735, 0.07993644714355469, 0.07959961700439454, 0.07994163513183594, 0.07924940490722657, 0.07924291229248047, 0.07981910705566406, 0.07931903839111328, 0.08003743743896484, 0.09031718444824219, 0.07985337829589843, 0.07904249572753906, 0.07893631744384766, 0.07827027130126953, 0.07823379516601563, 0.07821107482910156, 0.07823152160644531, 0.07815171051025391, 0.07816758728027344, 0.07980694580078125, 0.08193228912353516, 0.08312422180175781, 0.08104959869384766, 0.08023859405517578, 0.0795667495727539, 0.07888899230957032, 0.079617919921875, 0.07971826934814454, 0.08010784149169922, 0.08009712219238281, 0.08010562896728515, 0.0797731170654297, 0.0792008285522461, 0.07913881683349609, 0.07989379119873047, 0.08031919860839844, 0.07989043426513671, 0.078993408203125, 0.07926374053955078, 0.07948697662353515, 0.07961599731445312, 0.07930000305175781, 0.08012406158447266, 0.07979666900634766, 0.07918307495117187, 0.07906304168701171, 0.08042371368408203, 0.07981056213378906, 0.0805, 0.07961878204345703, 0.07910809326171875, 0.0797675552368164, 0.08024211120605469, 0.08007942199707031, 0.0808589096069336, 0.08033097839355469, 0.08037763214111328, 0.08031254577636719, 0.081040771484375, 0.0803345947265625, 0.08095555114746093, 0.08089673614501953, 0.08037580871582031, 0.08022128295898437, 0.07974185943603515, 0.07916038513183593, 0.07950841522216796, 0.07965286254882813, 0.08015257263183594, 0.0803674545288086, 0.0799049301147461, 0.07973417663574218, 0.09139718627929687, 0.07979103851318359, 0.07900160217285156, 0.07856694030761718, 0.07818492889404297, 0.08007065582275391, 0.08006259155273437, 0.08026099395751952, 0.07988019561767579, 0.079388671875, 0.0788828125, 0.07995187377929687, 0.0807936019897461, 0.08013823699951172, 0.07995516967773438, 0.07936009979248047, 0.07885084533691407, 0.07902607727050781, 0.0794972152709961, 0.07896656036376953, 0.07896880340576172, 0.07948108673095704, 0.07913683319091797, 0.08005830383300781, 0.08017919921875, 0.07994982147216798, 0.0799662094116211, 0.08002559661865234, 0.07982284545898438, 0.07966310119628907, 0.0799477767944336, 0.08003788757324219, 0.07990271759033203, 0.08084480285644531, 0.0802667236328125, 0.0808904037475586, 0.07984508514404297, 0.08008528137207031, 0.07964828491210937, 0.07959929656982422, 0.0800673599243164, 0.07951318359375, 0.0793359375, 0.0799230728149414, 0.07919004821777344, 0.07986918640136718, 0.0798318099975586, 0.07953817749023437, 0.08041881561279297, 0.07986150360107422, 0.07941474914550781, 0.08049465942382812, 0.079882080078125, 0.07922163391113281, 0.08000675201416016, 0.07950297546386718, 0.07947344207763672, 0.0802628173828125, 0.0797740478515625, 0.080648193359375, 0.07995801544189453, 0.07969382476806641, 0.08067686462402343, 0.0908939208984375, 0.07983103942871093, 0.07907801818847657, 0.07868592071533204, 0.07822102355957031, 0.07888902282714844, 0.07915167999267578, 0.07874079895019531, 0.07879257965087891, 0.0791803207397461, 0.07888665771484375, 0.08115225219726563, 0.08212684631347657, 0.08115200042724609, 0.07979174041748047, 0.07953855895996094, 0.07965081787109375, 0.07976249694824218, 0.07974803161621094, 0.08013209533691407, 0.08013174438476563, 0.07977814483642579, 0.07984333038330078, 0.07939180755615234, 0.07979833221435546, 0.08011251068115234, 0.07996121978759765, 0.07965507507324218, 0.07920098876953124, 0.07966864013671875, 0.07947939300537109, 0.07959142303466797, 0.0796851806640625, 0.07976966094970703, 0.07921497344970703, 0.07945420837402344, 0.08034249877929688, 0.07975580596923829, 0.0797242202758789, 0.08042249298095704, 0.07965283203125, 0.07924143981933594, 0.0797721939086914, 0.07913881683349609, 0.08072396850585938, 0.07998873901367187, 0.08002150726318359, 0.08079769897460938, 0.08105779266357421, 0.08013148498535157, 0.08095394897460938, 0.08097382354736328, 0.08040035247802735, 0.08041065979003906, 0.0809400634765625, 0.08043004608154297, 0.0804125747680664, 0.08005785369873047, 0.08010591888427734, 0.08057462310791015, 0.07988428497314454, 0.07968153381347656, 0.08011775970458984]",tokens/s,12.524237535933143,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 416.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 370.12 MiB is free. Process 31798 has 14.38 GiB memory in use. Of the allocated memory 14.24 GiB is allocated by PyTorch, and 25.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 416.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 370.12 MiB is free. Process 33619 has 14.38 GiB memory in use. Of the allocated memory 14.24 GiB is allocated by PyTorch, and 25.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,888.619008,6477.971456,0.0,6075.449344,6044.13184,s,1,16.2314296875,16.2314296875,0.0,16.2314296875,16.2314296875,16.2314296875,16.2314296875,[16.2314296875],,kWh,0.0002245309819666545,2.4760122598579482e-05,7.148755718999755e-05,0.00032077866175523153,,MB,1394.712576,7002.259456,0.0,6587.154432,6470.128128,s,10,10.572881225585936,1.0572881225585937,0.011510382939492771,1.05495361328125,1.061917297363281,1.0756966247558593,1.0867200866699218,"[1.044297607421875, 1.0498558349609375, 1.0509801025390626, 1.053966796875, 1.0546439208984375, 1.0552633056640626, 1.0894759521484374, 1.057814697265625, 1.058855224609375, 1.057727783203125]",tokens/s,242.1288904489824,kWh,3.071644304458497e-05,3.3875047509478433e-06,2.0359099620599618e-05,5.446304741613243e-05,tokens/kWh,4700434.73777728,MB,1431.629824,7016.93952,0.0,6599.737344,6470.130688,s,10,50.737300292968754,5.073730029296875,0.012481746824631325,5.0690693359375,5.0931044921875,5.095093017578125,5.096683837890625,"[5.0568740234375, 5.08275634765625, 5.0643447265625, 5.06379638671875, 5.075240234375, 5.070181640625, 5.09266259765625, 5.06640576171875, 5.06795703125, 5.09708154296875]",tokens/s,12.41690031519683,kWh,0.00014853017084208052,1.6384042254432047e-05,9.842127318140034e-05,0.0002633354862779129,tokens/kWh,239238.5503771889,,s,630,50.73481617736818,0.08053145424979073,0.001532752384334327,0.08054681396484376,0.08103526306152344,0.08138409996032715,0.09051999145507812,"[0.09048115539550781, 0.08028569793701172, 0.0794603500366211, 0.07897254180908203, 0.07854732513427734, 0.07904009246826171, 0.08002191925048828, 0.0797286376953125, 0.08016815948486328, 0.08041551971435547, 0.08070553588867188, 0.08063795471191407, 0.08053350067138672, 0.08058882904052735, 0.08065558624267578, 0.08061753845214843, 0.08004473876953125, 0.07969792175292968, 0.07974297332763672, 0.07945216369628906, 0.07897663879394531, 0.07876236724853515, 0.07882262420654297, 0.07888566589355468, 0.07954598236083985, 0.08002803039550781, 0.07950950622558593, 0.08005017852783203, 0.07990681457519531, 0.0795277099609375, 0.07965923309326171, 0.08017036437988281, 0.07945811462402344, 0.07892025756835938, 0.0788538589477539, 0.08039273834228515, 0.08007206726074219, 0.08028428649902344, 0.07994303894042969, 0.08055052947998047, 0.08013414764404297, 0.08053913879394531, 0.08020838165283203, 0.08096329498291016, 0.08022214508056641, 0.07987017822265625, 0.0808695068359375, 0.08054579162597657, 0.08059657287597656, 0.0806095962524414, 0.08060733032226562, 0.08066400146484375, 0.08049107360839844, 0.08089600372314452, 0.08089299011230469, 0.08117078399658204, 0.08063241577148438, 0.08062525177001953, 0.08059503936767579, 0.08076934051513672, 0.08061686706542968, 0.08111126708984374, 0.08085747528076172, 0.09188966369628906, 0.08038604736328125, 0.07960108947753906, 0.07914553833007812, 0.0786165771484375, 0.07865049743652344, 0.0786645736694336, 0.07862400054931641, 0.07860476684570313, 0.08017948913574219, 0.0805212173461914, 0.08040243530273437, 0.0816332778930664, 0.08131788635253906, 0.08070899200439453, 0.08031705474853515, 0.07988992309570313, 0.08067731475830078, 0.08023248291015625, 0.08034419250488281, 0.08062985229492188, 0.08065065765380859, 0.08075100708007812, 0.08087133026123047, 0.0806626205444336, 0.08020118713378906, 0.08071123504638672, 0.08080191802978516, 0.08019398498535156, 0.08177638244628906, 0.0804583969116211, 0.08077053070068359, 0.08039065551757812, 0.08083859252929687, 0.08082646179199218, 0.08093695831298828, 0.08082431793212891, 0.08086118316650391, 0.0804659194946289, 0.08086732482910156, 0.08080178833007813, 0.0814336929321289, 0.08086825561523438, 0.08058265686035156, 0.0805027847290039, 0.08055398559570312, 0.08094105529785156, 0.08090943908691406, 0.08096636962890626, 0.08109657287597656, 0.0805909423828125, 0.08069548797607422, 0.08059699249267578, 0.0805962905883789, 0.08063865661621093, 0.08068915557861328, 0.08052928161621094, 0.08068313598632812, 0.08096665954589843, 0.08082246398925781, 0.0806654052734375, 0.0807014389038086, 0.08077919769287109, 0.0918023681640625, 0.08036966705322265, 0.07964262390136718, 0.07915929412841796, 0.07864851379394532, 0.07865017700195312, 0.07860403442382813, 0.07864482879638672, 0.07867254638671875, 0.0786698226928711, 0.08023654174804687, 0.08081407928466797, 0.0826240005493164, 0.08138803100585937, 0.08042495727539062, 0.08010700988769531, 0.07961036682128907, 0.07962973022460937, 0.08024038696289063, 0.08030441284179687, 0.08068511962890625, 0.08070195007324218, 0.08068915557861328, 0.08076287841796875, 0.08041881561279297, 0.08079948425292968, 0.08073856353759766, 0.08056012725830078, 0.0806297607421875, 0.0805884780883789, 0.08016051483154298, 0.08103353881835937, 0.0801908187866211, 0.08084368133544922, 0.08087289428710938, 0.08051334381103516, 0.08109033966064454, 0.0805503692626953, 0.08022425842285157, 0.08099212646484374, 0.08024076843261718, 0.08093081665039062, 0.0809144287109375, 0.0809697265625, 0.08045362854003907, 0.08053759765625, 0.08021196746826172, 0.07995718383789062, 0.0799117431640625, 0.07996604919433593, 0.0796872329711914, 0.07934422302246094, 0.07948047637939454, 0.0803863983154297, 0.0799334716796875, 0.0800296630859375, 0.07994982147216798, 0.07993958282470703, 0.08002969360351563, 0.08019145965576172, 0.0796197738647461, 0.07959149169921875, 0.08052764892578125, 0.09107865905761718, 0.08032848358154297, 0.07956912231445312, 0.07908761596679688, 0.0801402587890625, 0.08026099395751952, 0.08024896240234375, 0.0805990753173828, 0.08054783630371094, 0.08006217956542969, 0.08027279663085937, 0.07996051025390626, 0.08037625885009765, 0.07966899108886719, 0.0791984634399414, 0.07875379180908203, 0.07870646667480469, 0.0795684814453125, 0.0799543685913086, 0.07967558288574218, 0.08036557006835937, 0.07973273468017578, 0.079906494140625, 0.08015084838867187, 0.07970201873779297, 0.07989177703857422, 0.07987884521484374, 0.07960896301269531, 0.07908163452148438, 0.07967203521728515, 0.08015462493896484, 0.07956479644775391, 0.08020787048339843, 0.07973887634277343, 0.080216064453125, 0.0799799346923828, 0.07973538970947265, 0.08073785400390625, 0.08058924865722657, 0.08092467498779297, 0.08013619232177735, 0.08056012725830078, 0.08047411346435547, 0.08069120025634766, 0.08050482940673828, 0.08057651519775391, 0.08057596588134766, 0.08058115386962891, 0.0805183334350586, 0.08090502166748047, 0.08086732482910156, 0.08117862701416016, 0.08057344055175782, 0.08070861053466796, 0.08060108947753906, 0.0806789093017578, 0.08056784057617188, 0.08091670227050782, 0.08103343963623047, 0.08115203094482422, 0.08056832122802735, 0.08061881256103516, 0.08065913391113282, 0.09248925018310547, 0.08125660705566407, 0.07961222076416016, 0.07914905548095703, 0.078617919921875, 0.0785836181640625, 0.07882227325439453, 0.07880089569091797, 0.07872306823730468, 0.0787435531616211, 0.07872716522216797, 0.08089190673828126, 0.08196505737304688, 0.08148738861083984, 0.08091939544677734, 0.0801760025024414, 0.0794181137084961, 0.07894412994384765, 0.07875392150878906, 0.07883161926269532, 0.08013597106933594, 0.0803326416015625, 0.0804110107421875, 0.08074854278564453, 0.08070758056640626, 0.08141414642333984, 0.08130355072021485, 0.08088278198242188, 0.08084982299804687, 0.08045568084716796, 0.08049254608154296, 0.08058879852294921, 0.08053865814208984, 0.08093590545654297, 0.08050041961669922, 0.08088813018798828, 0.08041471862792969, 0.08082227325439453, 0.08050688171386719, 0.08103526306152344, 0.08081369781494141, 0.08029148864746094, 0.08113766479492188, 0.08106057739257813, 0.08066649627685547, 0.08069087982177735, 0.0805481948852539, 0.08095568084716796, 0.08054541015625, 0.08060947418212891, 0.08103865814208984, 0.08113836669921876, 0.0803629150390625, 0.08085759735107421, 0.08068246459960937, 0.08074614715576171, 0.08057750701904297, 0.08065433502197265, 0.08067459106445313, 0.08078768157958985, 0.08030003356933593, 0.08065843200683594, 0.08031436920166016, 0.0905230712890625, 0.0805340805053711, 0.07964057922363281, 0.07919996643066406, 0.07863529968261719, 0.07861798095703125, 0.07865968322753907, 0.0786694107055664, 0.07861673736572265, 0.07867881774902344, 0.07867526245117187, 0.08180540466308593, 0.08256572723388672, 0.08086083221435547, 0.08044156646728516, 0.08001312255859375, 0.08002329254150391, 0.07881552124023437, 0.0787863998413086, 0.07885174560546875, 0.08013638305664063, 0.08033135986328124, 0.08033683013916015, 0.08049260711669921, 0.08127487945556641, 0.08100032043457031, 0.08130086517333984, 0.08066738891601563, 0.08006451416015625, 0.07981641387939453, 0.08042934417724609, 0.08042086029052735, 0.08033484649658203, 0.08078508758544922, 0.08041094207763672, 0.08090751647949218, 0.0804500503540039, 0.08085724639892578, 0.08046396636962891, 0.08051712036132813, 0.08090579223632813, 0.08099587249755859, 0.08093138885498047, 0.08101081848144531, 0.0805071029663086, 0.08093901062011719, 0.08057241821289063, 0.08129535675048828, 0.0805560302734375, 0.08053884887695313, 0.08054156494140625, 0.080681884765625, 0.0806211166381836, 0.08076678466796874, 0.0807778549194336, 0.08052326202392578, 0.08067481231689454, 0.08034473419189453, 0.08070998382568359, 0.08045903778076172, 0.08059097290039062, 0.08066057586669922, 0.08071212768554688, 0.090512451171875, 0.08014908599853515, 0.07951190185546875, 0.07904425811767578, 0.07860185241699219, 0.08018921661376953, 0.07970706939697265, 0.08018534088134766, 0.08017913818359375, 0.08056793975830077, 0.08052313232421875, 0.08081488037109374, 0.0823763198852539, 0.08062892913818359, 0.08072700500488281, 0.08068915557861328, 0.08050688171386719, 0.08083865356445312, 0.08036147308349609, 0.08081305694580078, 0.08062188720703126, 0.08077094268798828, 0.08069203186035157, 0.08039363098144531, 0.08069795227050781, 0.08084886169433594, 0.08071337890625, 0.08076531219482422, 0.08076902770996094, 0.08085913848876954, 0.0807419204711914, 0.08071830749511719, 0.08124761962890625, 0.08073280334472656, 0.08077462768554687, 0.0814249267578125, 0.08037785339355469, 0.0807874526977539, 0.08084035491943359, 0.08156604766845703, 0.0804985580444336, 0.08094528198242187, 0.08091648101806641, 0.08092988586425781, 0.08083344268798828, 0.08085094451904297, 0.08084639739990235, 0.081123779296875, 0.08082963562011719, 0.08093778991699219, 0.08056012725830078, 0.08103526306152344, 0.08057548522949219, 0.08106432342529298, 0.0809191665649414, 0.08095916748046875, 0.08055570983886719, 0.08098047637939453, 0.0807425308227539, 0.08072601318359375, 0.08058013153076173, 0.08098454284667969, 0.08075663757324218, 0.09045225524902344, 0.0802259521484375, 0.07953215789794922, 0.07914313507080079, 0.0786534423828125, 0.07867330932617188, 0.07859260559082032, 0.07869235229492187, 0.07863423919677734, 0.07865625762939453, 0.07900768280029297, 0.08257337951660157, 0.08305868530273437, 0.08166400146484375, 0.08054374694824219, 0.07990067291259766, 0.07943711853027344, 0.07874002838134765, 0.07868019104003907, 0.07885004425048828, 0.08042822265625, 0.07949190521240235, 0.07920230102539062, 0.08170028686523438, 0.08169737243652343, 0.0813792953491211, 0.0809144287109375, 0.08038934326171875, 0.07957379150390625, 0.07916089630126953, 0.08039673614501953, 0.0804656982421875, 0.0803985595703125, 0.08047206115722656, 0.0804554214477539, 0.08053376007080078, 0.08135017395019531, 0.08115558624267578, 0.08108335876464844, 0.08051302337646485, 0.07995305633544922, 0.08043196868896485, 0.0800357437133789, 0.08052336120605469, 0.08053759765625, 0.08057405090332032, 0.08053155517578126, 0.08057683563232422, 0.08094515228271484, 0.08069939422607422, 0.08067072296142579, 0.08076697540283204, 0.08058265686035156, 0.07978598022460938, 0.08055318450927734, 0.07982774353027344, 0.08071577453613281, 0.08030617523193359, 0.08088166046142578, 0.08051712036132813, 0.0803318099975586, 0.08104854583740234, 0.08088780975341797, 0.0915673599243164, 0.08021478271484375, 0.08016486358642579, 0.08016690826416016, 0.08062156677246093, 0.08055359649658203, 0.0802840347290039, 0.08048025512695313, 0.08048969268798828, 0.08055888366699218, 0.080583740234375, 0.08101779174804688, 0.08063795471191407, 0.08068710327148437, 0.08065023803710937, 0.08061542510986328, 0.08023859405517578, 0.08030169677734375, 0.07963426971435547, 0.07934620666503907, 0.08031641387939453, 0.07963814544677734, 0.08056050872802735, 0.08031641387939453, 0.07985507202148437, 0.08021660614013672, 0.080006591796875, 0.08068972778320313, 0.07994684600830078, 0.07953910064697266, 0.07970816040039062, 0.08008704376220703, 0.07950511932373047, 0.07999491119384766, 0.08082450866699219, 0.08035539245605469, 0.07982080078125, 0.08044544219970703, 0.0803094711303711, 0.08040528106689453, 0.07988572692871093, 0.07958553314208984, 0.07983248138427734, 0.08086624145507812, 0.08006451416015625, 0.08000498962402344, 0.08015679931640625, 0.0806789093017578, 0.08057036590576172, 0.08022630310058594, 0.0804617919921875, 0.08075411224365234, 0.07992585754394531, 0.07951292419433593, 0.0803334732055664, 0.08009510040283203, 0.08088988494873046, 0.08019747161865234, 0.08004428863525391, 0.08036351776123046, 0.08097996520996094, 0.08058470153808593, 0.080338623046875, 0.09126441955566406, 0.080161376953125, 0.07941728210449218, 0.0800153579711914, 0.07947644805908204, 0.0802799072265625, 0.08057839965820313, 0.0805664291381836, 0.08055561828613281, 0.0807684783935547, 0.08060819244384766, 0.08028160095214844, 0.08226406097412109, 0.08059436798095704, 0.08058525085449218, 0.08056419372558593, 0.08066873931884766, 0.08086323547363282, 0.08121139526367188, 0.08065023803710937, 0.08068505859375, 0.08068710327148437, 0.08032994842529297, 0.08033155059814454, 0.08121855926513671, 0.08032931518554688, 0.08047862243652344, 0.08144076538085937, 0.08071987152099609, 0.08127474975585937, 0.08078131103515625, 0.08074457550048827, 0.08132608032226563, 0.08073622131347656, 0.080783203125, 0.0809876480102539, 0.08105814361572265, 0.08079933166503907, 0.0807452163696289, 0.0807874526977539, 0.08080147552490234, 0.08156748962402344, 0.0808740463256836, 0.08070963287353515, 0.08094246673583984, 0.08101337432861327, 0.08086937713623046, 0.08090009307861327, 0.08048790740966796, 0.08091292572021484, 0.08052070617675781, 0.08105420684814453, 0.08082780456542969, 0.08089046478271485, 0.08070758056640626, 0.08097382354736328, 0.08055398559570312, 0.08093641662597656, 0.08079564666748047, 0.08161484527587891, 0.08019407653808594, 0.08088575744628906, 0.08019142150878907]",tokens/s,12.417508280655426,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,880.959488,6477.971456,0.0,6075.449344,6044.13184,s,1,16.61744921875,16.61744921875,0.0,16.61744921875,16.61744921875,16.61744921875,16.61744921875,[16.61744921875],,kWh,0.00023311559083750427,2.570710072197138e-05,7.476561536799708e-05,0.0003335883069274727,,MB,1402.728448,7002.259456,0.0,6587.154432,6470.128128,s,10,10.377251586914063,1.0377251586914062,0.004119444769134758,1.0376526489257811,1.0427670166015623,1.0430039916992186,1.0431935717773437,"[1.0290411376953126, 1.0338621826171874, 1.0355106201171875, 1.036467529296875, 1.039693603515625, 1.0374874267578125, 1.0414158935546876, 1.04271435546875, 1.03781787109375, 1.043240966796875]",tokens/s,246.6934504342378,kWh,3.0326203891664817e-05,3.3444434319524646e-06,2.014868278559928e-05,5.381933010921656e-05,tokens/kWh,4756655.266434838,MB,1440.11264,7016.93952,0.0,6599.737344,6470.130688,s,10,50.342291015625,5.0342291015625005,0.012947574995243238,5.037735595703126,5.045789843750001,5.0465294433593755,5.0471211230468755,"[5.00603564453125, 5.02252880859375, 5.04562548828125, 5.01968310546875, 5.04454833984375, 5.0438623046875, 5.03726708984375, 5.0378349609375, 5.04726904296875, 5.03763623046875]",tokens/s,12.514329151298726,kWh,0.0001476985999095848,1.629138896290381e-05,9.707304988060133e-05,0.0002610630387530899,tokens/kWh,241321.02461116528,,s,630,50.33960965728759,0.07990414231315492,0.0015563375116845655,0.0799221305847168,0.08084493255615234,0.08103640327453614,0.08969681533813478,"[0.0893713607788086, 0.08033798217773437, 0.07961837005615234, 0.07921036529541016, 0.07883209228515625, 0.08017334747314453, 0.07944525146484376, 0.0790450210571289, 0.07874800109863281, 0.07860224151611328, 0.07852425384521484, 0.07850588989257813, 0.07859977722167968, 0.07848191833496093, 0.07856550598144531, 0.0785052490234375, 0.07860095977783203, 0.07864729309082032, 0.07861357116699219, 0.08034774780273438, 0.07995785522460938, 0.07975347137451172, 0.07956710052490235, 0.07934070587158203, 0.07898403167724609, 0.07869644927978516, 0.07864857482910156, 0.07868083190917968, 0.07877632141113282, 0.07866777801513672, 0.07862598419189454, 0.07873209381103516, 0.07863910675048828, 0.0786698226928711, 0.0805212173461914, 0.07980646514892578, 0.07932077026367187, 0.08053587341308593, 0.08000102233886719, 0.0797675552368164, 0.0797470703125, 0.07979212951660156, 0.07941939544677734, 0.07921376037597656, 0.07870543670654297, 0.0788214111328125, 0.07892313385009765, 0.07895126342773437, 0.07879590606689453, 0.07891830444335937, 0.07888416290283203, 0.07886019134521484, 0.07868294525146484, 0.07887049865722656, 0.08102912139892578, 0.07998242950439453, 0.07944742584228516, 0.08059136199951172, 0.08014876556396484, 0.08090214538574218, 0.08002764892578125, 0.07964230346679688, 0.08094751739501953, 0.09087590026855469, 0.08030534362792968, 0.07947481536865235, 0.07911804962158203, 0.07869974517822266, 0.07846272277832031, 0.07843840026855468, 0.07873334503173827, 0.07961971282958985, 0.07851840209960938, 0.08023622131347656, 0.07960588836669921, 0.08018685150146485, 0.0796365737915039, 0.07918447875976563, 0.0795895004272461, 0.07891104125976563, 0.07873999786376953, 0.07847119903564453, 0.07858585357666016, 0.0784865951538086, 0.0785083236694336, 0.07856400299072265, 0.0787783660888672, 0.07860224151611328, 0.07867391967773438, 0.08056422424316406, 0.08010342407226563, 0.0805879669189453, 0.07999680328369141, 0.08062457275390625, 0.08001331329345703, 0.08099635314941406, 0.08012995147705078, 0.08056636810302735, 0.08003724670410156, 0.0805771484375, 0.08066252899169922, 0.0800235824584961, 0.07941110229492188, 0.07969171142578126, 0.079085693359375, 0.07870054626464844, 0.07881635284423828, 0.07889193725585937, 0.07880089569091797, 0.07874790191650391, 0.07880063629150391, 0.0786690902709961, 0.07888764953613281, 0.07889647674560547, 0.07902809906005859, 0.08075958251953125, 0.08014848327636719, 0.07937593841552734, 0.08061382293701172, 0.0797939224243164, 0.07924463653564454, 0.08079043579101562, 0.0809144287109375, 0.08075263977050781, 0.08015257263183594, 0.08139724731445312, 0.09085724639892578, 0.08022077178955078, 0.07947984313964844, 0.07892066955566407, 0.07847245025634765, 0.0784271011352539, 0.07844226837158202, 0.0784131851196289, 0.07833462524414063, 0.07846476745605468, 0.08017737579345703, 0.07945625305175781, 0.08071376037597656, 0.08039344024658203, 0.07984204864501954, 0.0791695327758789, 0.07837619018554688, 0.07847090911865234, 0.07846112060546875, 0.07880544281005859, 0.07863744354248046, 0.07860018920898437, 0.07857308959960937, 0.07873174285888672, 0.07857917022705078, 0.0786949462890625, 0.08089958190917969, 0.08021580505371094, 0.07920486450195313, 0.08096092987060546, 0.08025379180908203, 0.08056547546386719, 0.07989532470703126, 0.08086486053466797, 0.08060150146484375, 0.0801641616821289, 0.0810400619506836, 0.08043724822998047, 0.08104291534423828, 0.08032105255126953, 0.08060723114013672, 0.08004402923583985, 0.08100045013427734, 0.08042655944824219, 0.08066502380371093, 0.08075059509277344, 0.08007884979248046, 0.08107615661621094, 0.08035266876220704, 0.08109318542480469, 0.08142803192138671, 0.08011619567871094, 0.07940716552734375, 0.08104959869384766, 0.08047756958007812, 0.08133081817626953, 0.08025526428222657, 0.08059027099609375, 0.07983309173583984, 0.0808594207763672, 0.08009318542480469, 0.08073567962646484, 0.08091216278076171, 0.09024307250976563, 0.08032621002197265, 0.0794972152709961, 0.07883942413330078, 0.07837267303466797, 0.0783839340209961, 0.07839884948730469, 0.07851471710205078, 0.07847248077392578, 0.07843122863769532, 0.08022016143798828, 0.07958889770507813, 0.08014486694335937, 0.07948639678955079, 0.08084867095947265, 0.08018819427490234, 0.08043859100341796, 0.08023625946044922, 0.07928479766845703, 0.07868386840820313, 0.07849440002441406, 0.07857526397705078, 0.07870432281494141, 0.07861875152587891, 0.07861875152587891, 0.07873280334472656, 0.07864604949951172, 0.07874364471435547, 0.08036137390136719, 0.07978182220458985, 0.07899705505371094, 0.08097392272949219, 0.08029647827148438, 0.0793958740234375, 0.08095830535888672, 0.080382080078125, 0.08054386901855469, 0.08062073516845703, 0.07996896362304687, 0.07947853088378906, 0.0797841567993164, 0.07912451171875, 0.0787988510131836, 0.07897293090820312, 0.07882704162597656, 0.0787911376953125, 0.07897293090820312, 0.07892527770996094, 0.07873139190673828, 0.07918358612060547, 0.08042371368408203, 0.07991696166992188, 0.078716796875, 0.07883148956298829, 0.08055014038085938, 0.0799754867553711, 0.07901280212402344, 0.08070108795166016, 0.07998294067382812, 0.07948697662353515, 0.08113362884521484, 0.08082044982910157, 0.08026902770996094, 0.08945664215087891, 0.08039212799072265, 0.07967257690429687, 0.07907615661621094, 0.07839315032958985, 0.07829523468017578, 0.07839148712158203, 0.07851535797119141, 0.07846320343017578, 0.07847161865234376, 0.07852851104736328, 0.0785041275024414, 0.08082003021240235, 0.08045731353759765, 0.08084726715087891, 0.08034480285644531, 0.08038848114013672, 0.08035318756103516, 0.07976140594482421, 0.07920025634765625, 0.07840544128417969, 0.07853485107421875, 0.07859977722167968, 0.07858998107910156, 0.07855449676513672, 0.07857020568847656, 0.07859251403808594, 0.07963113403320313, 0.08002444458007812, 0.07957222747802735, 0.08039244842529297, 0.07969446563720703, 0.07881932830810547, 0.08029388427734375, 0.07964604949951172, 0.08113775634765626, 0.08024736022949219, 0.08065023803710937, 0.07999433898925781, 0.08101273345947266, 0.0805311050415039, 0.08001216125488281, 0.0806297607421875, 0.08079296112060547, 0.08000576019287109, 0.08073625946044923, 0.08072796630859375, 0.0807895965576172, 0.08075059509277344, 0.08072601318359375, 0.08064940643310547, 0.08076985931396484, 0.08068505859375, 0.08071372985839843, 0.08022220611572266, 0.0811704330444336, 0.08038377380371094, 0.0811337890625, 0.08081407928466797, 0.08036147308349609, 0.08113270568847657, 0.08043344116210938, 0.08082268524169922, 0.08979491424560547, 0.080544189453125, 0.07973478698730468, 0.07918313598632812, 0.0784719009399414, 0.07849139404296875, 0.07845503997802734, 0.07849065399169922, 0.07846514892578126, 0.07857635498046875, 0.07849382019042969, 0.07865753936767578, 0.08082431793212891, 0.08051302337646485, 0.08085094451904297, 0.08056752014160157, 0.07997277069091797, 0.08089379119873047, 0.08041935729980469, 0.0809697265625, 0.0805212173461914, 0.079998046875, 0.08085308837890624, 0.08056448364257812, 0.08045545959472657, 0.07983216094970703, 0.07919945526123047, 0.07862451171875, 0.078598876953125, 0.07865753936767578, 0.07866777801513672, 0.07871897888183593, 0.07855427551269531, 0.07863523101806641, 0.07954467010498047, 0.0802306900024414, 0.0794438705444336, 0.07953174591064453, 0.08010765075683594, 0.07955046081542969, 0.0786250228881836, 0.08074034881591798, 0.08011743927001953, 0.07950982666015625, 0.08099635314941406, 0.08032665252685547, 0.08076697540283204, 0.08016690826416016, 0.08108646392822266, 0.08039993286132813, 0.08069686126708984, 0.08009616088867187, 0.08103900909423828, 0.08076528167724609, 0.08071372985839843, 0.08020156860351563, 0.0812136001586914, 0.08056537628173828, 0.08080268859863281, 0.08036576080322265, 0.08071968078613281, 0.08020377349853515, 0.08082022094726563, 0.0900283203125, 0.08041267395019532, 0.07966989135742188, 0.08025881958007812, 0.07961830139160156, 0.08069686126708984, 0.08029436492919922, 0.0796753921508789, 0.07927548980712891, 0.0784936294555664, 0.07847907257080078, 0.07852518463134765, 0.07845094299316406, 0.07850994873046875, 0.07849775695800781, 0.07849132537841796, 0.07872956848144531, 0.08021196746826172, 0.07916748809814453, 0.08094515228271484, 0.08039833831787109, 0.07973836517333985, 0.08023433685302735, 0.0795798110961914, 0.08077926635742187, 0.08047331237792969, 0.07994038391113281, 0.079287841796875, 0.07852067565917968, 0.0786228485107422, 0.0787066879272461, 0.078712158203125, 0.07867868804931641, 0.07861862182617188, 0.07906098937988282, 0.08030413055419922, 0.0796049575805664, 0.07953488159179688, 0.08010761260986328, 0.07958499145507812, 0.07876012420654296, 0.08071987152099609, 0.07985731506347657, 0.07918831634521484, 0.08083763122558593, 0.08020457458496094, 0.08074988555908204, 0.08017919921875, 0.0806286392211914, 0.08006012725830078, 0.08060546875, 0.08080105590820312, 0.0800157470703125, 0.08076943969726562, 0.08018425750732422, 0.08123689270019531, 0.08040045166015625, 0.07956687927246094, 0.08124416351318359, 0.08058879852294921, 0.079425537109375, 0.08113267517089844, 0.08092556762695312, 0.09004425811767579, 0.0803642578125, 0.07971839904785157, 0.08036457824707031, 0.08034156799316407, 0.08034550476074219, 0.08036351776123046, 0.08048982238769531, 0.07969382476806641, 0.07901651000976563, 0.07950748443603516, 0.07891359710693359, 0.07849369812011718, 0.07851132965087891, 0.07869315338134765, 0.07863724517822265, 0.0785098876953125, 0.07858175659179688, 0.07942691040039063, 0.08028598022460938, 0.07967167663574219, 0.07862067413330077, 0.08078083038330078, 0.08016355133056641, 0.07919590759277344, 0.08084467315673828, 0.08024076843261718, 0.08094080352783203, 0.08019379425048828, 0.08091964721679687, 0.08063593292236328, 0.08061353302001953, 0.08050953674316406, 0.08066675567626953, 0.08059903717041016, 0.08074034881591798, 0.08056845092773438, 0.08050265502929688, 0.08000476837158203, 0.0793603515625, 0.07868415832519532, 0.07880016326904297, 0.07870079803466797, 0.07884028625488282, 0.07878383636474609, 0.07875651550292968, 0.07968934631347656, 0.08017549133300782, 0.0795525131225586, 0.07971356964111329, 0.08020563507080078, 0.07978486633300781, 0.07879065704345703, 0.07890739440917968, 0.08067276763916016, 0.07997235107421875, 0.079048095703125, 0.08082492828369141, 0.08011488342285156, 0.07935469055175781, 0.08068726348876953, 0.08015392303466796, 0.0812806396484375, 0.0906153564453125, 0.08045203399658203, 0.07964454650878906, 0.07913075256347656, 0.07840518188476563, 0.07845318603515625, 0.07846918487548828, 0.07848502349853516, 0.078363037109375, 0.07848127746582031, 0.07853897857666016, 0.07890729522705078, 0.08039014434814454, 0.08081798553466797, 0.08041696166992188, 0.08039622497558593, 0.07977513885498047, 0.08083318328857422, 0.08019129943847657, 0.08092598724365234, 0.08057539367675781, 0.08049254608154296, 0.0804653091430664, 0.08056073760986328, 0.07987197113037109, 0.07925504302978516, 0.07967411041259766, 0.07908124542236328, 0.07865958404541015, 0.07872041320800781, 0.07865721893310547, 0.0787239990234375, 0.07861862182617188, 0.08006479644775391, 0.07993023681640625, 0.07936700439453125, 0.07870777893066407, 0.07988114929199219, 0.08074649810791015, 0.08031600189208984, 0.07931536102294921, 0.08104755401611329, 0.08005836486816406, 0.08076812744140625, 0.079939453125, 0.0806526107788086, 0.07996896362304687, 0.08059903717041016, 0.08057142639160156, 0.08077967834472656, 0.08065270233154297, 0.08075689697265626, 0.07997599792480468, 0.08090000152587891, 0.08071222686767578, 0.0812072982788086, 0.08072509002685548, 0.08076175689697265, 0.08069222259521484, 0.0808622055053711, 0.08084793853759766, 0.08088195037841797, 0.08022083282470703, 0.08940134429931641, 0.08038604736328125, 0.07965033721923828, 0.08052169799804687, 0.0803082275390625, 0.0803691177368164, 0.08037551879882812, 0.08033148956298829, 0.07969091033935546, 0.07916835021972657, 0.07851123046875, 0.07850396728515625, 0.07850080108642578, 0.07855718231201173, 0.07844659423828125, 0.07847430419921875, 0.07977670288085938, 0.0799537582397461, 0.07913897705078125, 0.07985104370117188, 0.07987452697753906, 0.079388671875, 0.08044134521484375, 0.07985356903076171, 0.07886844635009765, 0.08058582305908203, 0.08053968048095703, 0.08058354949951171, 0.08044547271728515, 0.08048668670654296, 0.08067417907714844, 0.08069113922119141, 0.08063632202148438, 0.07992729949951172, 0.08051302337646485, 0.08007244873046875, 0.07974527740478515, 0.07917772674560547, 0.07872512054443359, 0.0786698226928711, 0.0786800308227539, 0.07881501007080079, 0.07868402862548828, 0.07983497619628906, 0.08025936126708984, 0.07976166534423829, 0.07962361907958984, 0.08013894653320312, 0.07973264312744141, 0.07884796905517578, 0.0787694091796875, 0.08059811401367188, 0.07982249450683594, 0.07908080291748047, 0.0806550064086914, 0.0801239013671875, 0.08103321838378906, 0.08042700958251953, 0.08078704071044922, 0.08022233581542969, 0.08062799835205078, 0.08017100524902344, 0.08083865356445312]",tokens/s,12.514995731771547,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,15403.962368,9514.647552,0.0,9112.12544,9086.72256,s,1,35.12304296875,35.12304296875,0.0,35.12304296875,35.12304296875,35.12304296875,35.12304296875,[35.12304296875],,kWh,0.0007710414321458302,8.504451619075104e-05,0.0002495143662780003,0.0011056003146145816,,MB,4078.477312,9692.905472,0.0,9269.41184,9235.912192,s,10,1.2643746795654298,0.12643746795654298,0.0008266481532012924,0.12620406341552734,0.12767002182006837,0.12787409629821778,0.1280373558807373,"[0.12762467193603516, 0.12654662322998048, 0.1252915496826172, 0.12807817077636718, 0.12610284423828125, 0.12630528259277343, 0.1258170852661133, 0.12603833770751954, 0.125698974609375, 0.1268711395263672]",tokens/s,2024.7162817906803,kWh,3.6699333617188756e-06,4.047275636755525e-07,2.4366825049000966e-06,6.511343430294525e-06,tokens/kWh,39316003.33180713,MB,4082.7904,9705.488384,0.0,9281.994752,9177.194496,s,10,79.25482861328125,7.925482861328125,0.015173426462082461,7.926976806640624,7.94667998046875,7.94768544921875,7.94848982421875,"[7.9286318359375, 7.94869091796875, 7.92532177734375, 7.906017578125, 7.94645654296875, 7.93572509765625, 7.912375, 7.90211865234375, 7.932205078125, 7.9172861328125]",tokens/s,7.949042487670294,kWh,0.0002307615320328636,2.5454131798180985e-05,0.00010394504148929907,0.0003601607053203436,tokens/kWh,174921.91421594672,,s,630,79.25221926879877,0.12579717344253782,0.0012254419671425979,0.12554127883911131,0.12692436294555665,0.1276180290222168,0.13009891815185548,"[0.1263271026611328, 0.12606870269775391, 0.12606451416015624, 0.12568182373046874, 0.12513670349121095, 0.1253614044189453, 0.12664752197265625, 0.1258278045654297, 0.12613868713378906, 0.12495737457275391, 0.12534758758544923, 0.12616067504882814, 0.12517750549316406, 0.12503091430664062, 0.12513516998291016, 0.1255519027709961, 0.12699942779541015, 0.1254746551513672, 0.1258967056274414, 0.12543612670898438, 0.12557244873046874, 0.12724393463134764, 0.1267208023071289, 0.12601897430419923, 0.12668489837646485, 0.12598470306396484, 0.12687423706054687, 0.12492147064208985, 0.12500243377685546, 0.12512035369873048, 0.12518211364746093, 0.12980239868164062, 0.12647203063964843, 0.12535708618164063, 0.1255864028930664, 0.1269391326904297, 0.1261465606689453, 0.12578966522216797, 0.12534598541259764, 0.12577008056640626, 0.12642281341552736, 0.12564620971679688, 0.12571529388427735, 0.12516780853271484, 0.12621190643310548, 0.12807781982421876, 0.1254933090209961, 0.12570582580566406, 0.12614009857177735, 0.12582160186767577, 0.12522905731201173, 0.12445491027832031, 0.12717670440673828, 0.12555372619628907, 0.12553462219238282, 0.12564918518066406, 0.12557920074462892, 0.12551798248291016, 0.12564470672607422, 0.12479923248291015, 0.12510514831542968, 0.12517273712158203, 0.12558131408691406, 0.1264614715576172, 0.1261470413208008, 0.1257922592163086, 0.12557129669189454, 0.12646963500976563, 0.12613251495361327, 0.1261710433959961, 0.12728125, 0.12605232238769531, 0.12554601287841796, 0.12898162841796876, 0.12586383819580077, 0.1259781723022461, 0.1260668182373047, 0.1369582061767578, 0.1262525100708008, 0.12648300933837892, 0.12630342102050782, 0.12571846771240233, 0.12550374603271483, 0.12627375793457032, 0.1255383071899414, 0.12526019287109375, 0.12556492614746093, 0.12726886749267577, 0.12549324798583986, 0.12647583770751952, 0.12533155059814452, 0.12561775970458985, 0.12495263671875, 0.12494509124755859, 0.12596588897705077, 0.12632886505126953, 0.12469395446777344, 0.12639679718017577, 0.12511901092529296, 0.12595410919189454, 0.1261443862915039, 0.12563043212890626, 0.1255118408203125, 0.12543795013427733, 0.12499539184570313, 0.12707449340820312, 0.12524095916748046, 0.12563394927978516, 0.1248675537109375, 0.1249771499633789, 0.12955661010742187, 0.12600032043457032, 0.1255390090942383, 0.12608306884765624, 0.12756358337402343, 0.12603616333007814, 0.1254133758544922, 0.12588179016113282, 0.1256552963256836, 0.12658675384521484, 0.12538822174072264, 0.12609024047851564, 0.1258287353515625, 0.12697638702392577, 0.1257902069091797, 0.125623779296875, 0.12601347351074219, 0.1259139175415039, 0.12568716430664062, 0.12955523681640624, 0.1261868133544922, 0.12573340606689454, 0.12532313537597656, 0.12525965118408203, 0.1257019500732422, 0.12488716888427734, 0.12557561492919922, 0.1254742431640625, 0.12547318267822266, 0.12600717163085937, 0.12582288360595703, 0.1259440612792969, 0.12640230560302734, 0.1254048309326172, 0.1256085433959961, 0.12612713623046876, 0.1261209259033203, 0.1254316177368164, 0.1253698272705078, 0.12588925170898438, 0.12586803436279298, 0.1251635208129883, 0.1257573471069336, 0.12574217224121093, 0.12750294494628905, 0.12913494873046874, 0.12692272186279296, 0.1252919692993164, 0.1259728012084961, 0.12495702362060547, 0.12505497741699217, 0.12597254180908204, 0.12656813049316407, 0.12505318450927735, 0.12460441589355468, 0.12476227569580078, 0.12570995330810547, 0.12589180755615234, 0.1252526092529297, 0.12493004608154297, 0.12513005065917968, 0.1247607650756836, 0.12495404815673829, 0.1276354217529297, 0.12628428649902343, 0.12507059478759766, 0.12576204681396486, 0.12583340454101563, 0.12679977416992189, 0.12568185424804687, 0.1253020477294922, 0.1252992935180664, 0.1261343002319336, 0.12756966400146486, 0.12487094116210938, 0.12528781127929686, 0.12591590118408202, 0.12527206420898437, 0.12447539520263672, 0.12913568115234375, 0.12544300842285155, 0.13083001708984374, 0.12601366424560548, 0.12531107330322266, 0.12564412689208984, 0.12587503814697265, 0.1256824951171875, 0.12565100860595704, 0.1260082550048828, 0.12472013092041015, 0.12592371368408203, 0.12552995300292968, 0.12518048095703124, 0.12533577728271483, 0.12558537292480468, 0.12554576110839843, 0.12475437164306641, 0.12729776000976561, 0.12720137786865235, 0.12594790649414062, 0.12645756530761718, 0.1251740493774414, 0.12515062713623046, 0.12427938842773438, 0.12457369232177734, 0.12421875, 0.1246787872314453, 0.12459414672851563, 0.12489859008789063, 0.12498214721679687, 0.12460838317871094, 0.12496691131591797, 0.12432550048828125, 0.12474992370605469, 0.1265964813232422, 0.1274028778076172, 0.12582450866699219, 0.12664067077636718, 0.12596540832519532, 0.12522793579101563, 0.12465084838867188, 0.12444022369384766, 0.12445388793945313, 0.12450816345214843, 0.1282455291748047, 0.12565430450439452, 0.12551673889160156, 0.12544818878173827, 0.12479049682617188, 0.12483612823486329, 0.12575846099853516, 0.126129150390625, 0.12468560028076171, 0.12494732666015625, 0.12574908447265626, 0.12488317108154297, 0.12452227020263672, 0.12396940612792968, 0.12448780822753906, 0.12471091461181641, 0.12446063995361328, 0.12495200347900391, 0.12558963012695312, 0.12586380767822267, 0.1244912338256836, 0.12516172790527344, 0.1256799011230469, 0.12499075317382813, 0.12511273956298827, 0.12513292694091796, 0.12506336212158203, 0.12704755401611328, 0.1252721939086914, 0.12480620574951172, 0.12488829040527344, 0.1247609634399414, 0.12666499328613282, 0.13116064453125, 0.1254092788696289, 0.12483379364013672, 0.13022003173828126, 0.12535603332519532, 0.1268326416015625, 0.12661145782470704, 0.12575698852539063, 0.12721196746826172, 0.12646351623535157, 0.12654208374023437, 0.12494457244873047, 0.12675689697265624, 0.12565299224853516, 0.12563375854492187, 0.12520047760009764, 0.1268303680419922, 0.1265489273071289, 0.1250386276245117, 0.1288805694580078, 0.1264425277709961, 0.12597142028808594, 0.12558745574951172, 0.1252080307006836, 0.12621862030029296, 0.12568943786621095, 0.12505760192871093, 0.12499116516113282, 0.12468067169189453, 0.12551545715332033, 0.1251022415161133, 0.12545228576660156, 0.1272647705078125, 0.12596150207519533, 0.12568006134033202, 0.1254028778076172, 0.12633280181884765, 0.12609910583496095, 0.12566770935058594, 0.12584204864501952, 0.12590006256103517, 0.12551395416259767, 0.1372677764892578, 0.12566726684570312, 0.12514918518066406, 0.12883091735839844, 0.1260481948852539, 0.12523363494873047, 0.12642678070068358, 0.1257209243774414, 0.1258881607055664, 0.125210205078125, 0.12513075256347655, 0.1250516815185547, 0.1254191665649414, 0.12633094024658204, 0.12631068420410158, 0.12615475463867187, 0.12579580688476563, 0.12668572998046876, 0.12525363159179687, 0.12558745574951172, 0.12599513244628907, 0.12642221069335938, 0.12626399993896484, 0.12592546844482422, 0.12533907318115234, 0.1252254409790039, 0.12548614501953126, 0.1255044479370117, 0.12504598236083983, 0.12582991790771483, 0.12702105712890624, 0.1257260513305664, 0.12545203399658203, 0.1253297576904297, 0.12507193756103516, 0.12484403228759766, 0.12823960876464843, 0.1265950698852539, 0.12611283111572266, 0.12926792907714843, 0.12672073364257813, 0.125591552734375, 0.1256871337890625, 0.12552873229980469, 0.12553011322021485, 0.12618137359619142, 0.12601548767089843, 0.1261529312133789, 0.1264985885620117, 0.12631654357910158, 0.12599212646484376, 0.12563673400878905, 0.12531804656982423, 0.12649417877197267, 0.12598294067382812, 0.12916268920898438, 0.12683881378173828, 0.1262086410522461, 0.126312255859375, 0.12549683380126953, 0.12565891265869142, 0.12625433349609375, 0.12695468902587892, 0.12553632354736327, 0.12513833618164064, 0.12524169921875, 0.12535276794433595, 0.12486605072021484, 0.12514134216308595, 0.1255690231323242, 0.12530397033691407, 0.1260486068725586, 0.1254118423461914, 0.12556082916259764, 0.12538211059570312, 0.12545692443847656, 0.1257465591430664, 0.12543244934082032, 0.12496281433105469, 0.12588646697998046, 0.1284475555419922, 0.12698483276367187, 0.1252539520263672, 0.12529488372802736, 0.12546018981933593, 0.125085693359375, 0.12498662567138671, 0.12689273834228515, 0.1255895690917969, 0.12544185638427735, 0.12510249328613282, 0.12484130859375, 0.1253619842529297, 0.12520716857910155, 0.12499878692626953, 0.12484928131103516, 0.12547225952148439, 0.12504637145996095, 0.125330078125, 0.12548655700683595, 0.12578787231445313, 0.12595603179931641, 0.12505996704101563, 0.1270128631591797, 0.1259768295288086, 0.1250260467529297, 0.12544931030273437, 0.12544403076171876, 0.12515017700195313, 0.12683609771728516, 0.12456614685058594, 0.12452044677734375, 0.125279296875, 0.12533856201171875, 0.12488915252685547, 0.1280021514892578, 0.125544189453125, 0.12772566223144532, 0.12875570678710938, 0.12528428649902343, 0.12566329956054687, 0.12540428924560548, 0.12553919982910156, 0.12502207946777344, 0.1253229751586914, 0.12530934143066405, 0.12464128112792969, 0.12550745391845702, 0.12529990386962891, 0.12525663757324218, 0.12480239868164063, 0.12483036804199218, 0.1252311019897461, 0.13083853149414063, 0.12545142364501954, 0.12593548583984376, 0.12619615936279296, 0.12494518280029297, 0.12493545532226563, 0.1256822738647461, 0.1256197738647461, 0.1250392303466797, 0.12474073791503906, 0.12502828979492187, 0.12581037139892579, 0.12461974334716797, 0.12454204559326172, 0.12547110748291015, 0.12480876922607421, 0.12561504364013673, 0.12542054748535156, 0.1255225296020508, 0.12537404632568358, 0.12531385803222655, 0.1245902099609375, 0.12555474853515625, 0.12440147399902343, 0.125329345703125, 0.12482099151611328, 0.12898976135253906, 0.12558348846435546, 0.1253763198852539, 0.12503478240966798, 0.12541929626464843, 0.12467779541015625, 0.12529698944091797, 0.12437299346923827, 0.12488678741455078, 0.12480850982666016, 0.1250635223388672, 0.12569229125976564, 0.12588668823242188, 0.12477340698242187, 0.12459011077880859, 0.12471794891357423, 0.12477037048339844, 0.12445855712890624, 0.12616544342041017, 0.12525520324707032, 0.1251332778930664, 0.12417638397216797, 0.12530912017822265, 0.12543920135498046, 0.12548770904541015, 0.12586361694335937, 0.12537635040283204, 0.12505750274658203, 0.1269863967895508, 0.12458905792236329, 0.12555126190185548, 0.12539513397216798, 0.1251530227661133, 0.12611135864257814, 0.12482624053955078, 0.12875386047363283, 0.12868421936035157, 0.1273826217651367, 0.12517469024658204, 0.12574908447265626, 0.12548851013183593, 0.12557584381103515, 0.12700685119628907, 0.12641267395019531, 0.12507484436035157, 0.12759677124023439, 0.12688668823242188, 0.12582784271240235, 0.12552700805664063, 0.12610050964355468, 0.1263472671508789, 0.12565193939208985, 0.1270292510986328, 0.12726841735839844, 0.12580908966064452, 0.12636569976806641, 0.1255016326904297, 0.12532723236083984, 0.12621202850341798, 0.1258532485961914, 0.1266888961791992, 0.12605494689941407, 0.1254648666381836, 0.12528025817871094, 0.12537036895751952, 0.12559123229980468, 0.12640198516845702, 0.12530572509765625, 0.12528025817871094, 0.12703238677978515, 0.12606723022460936, 0.12582339477539062, 0.12593663787841797, 0.12508697509765626, 0.12874032592773438, 0.12650688171386718, 0.12488384246826172, 0.12529603576660156, 0.1253200988769531, 0.12734012603759765, 0.12520992279052734, 0.12524972534179687, 0.12593353271484375, 0.12617935943603514, 0.12560243225097656, 0.12584659576416016, 0.1255433578491211, 0.12503858947753907, 0.12521826934814453, 0.12500374603271486, 0.1256546859741211, 0.12539997100830078, 0.1251962890625, 0.12548006439208984, 0.12464217376708984, 0.12468428802490235, 0.125517822265625, 0.12516966247558595, 0.12605030059814454, 0.1251412124633789, 0.12548915100097657, 0.12589875030517578, 0.1252864990234375, 0.12535533142089844, 0.12540787506103515, 0.1260703353881836, 0.126974365234375, 0.12516716766357422, 0.12566278076171875, 0.12499033355712891, 0.12505702209472655, 0.12500991821289062, 0.12522291564941407, 0.1296753845214844, 0.12528832244873048, 0.1295626220703125, 0.12641280364990234, 0.12498738861083984, 0.1262551040649414, 0.12584483337402344, 0.12549001312255859, 0.12592723083496093, 0.1249587173461914, 0.1255014419555664, 0.12703475189208985, 0.1260175018310547, 0.12540995025634766, 0.12463513946533203, 0.12524339294433592, 0.12616489410400392, 0.12482733154296875, 0.12925698852539064, 0.1260011215209961, 0.12578482818603515, 0.12508297729492188, 0.12502031707763672, 0.12458665466308594, 0.12520243072509765, 0.12478668975830078, 0.1252085723876953, 0.12521865844726562, 0.12611190032958985, 0.12484963226318359, 0.12449846649169923, 0.1242576675415039, 0.125304931640625, 0.12393116760253907, 0.12482701110839843, 0.12590566253662108, 0.1253678436279297, 0.1252027816772461, 0.1249423370361328, 0.12480028533935547, 0.12529737854003906, 0.12455897521972656, 0.1253392333984375, 0.12603472137451172, 0.1333043212890625, 0.12607103729248048, 0.12426563262939454, 0.1244964828491211, 0.12554774475097658]",tokens/s,7.949304206400029,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 416.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 370.12 MiB is free. Process 30067 has 14.38 GiB memory in use. Of the allocated memory 14.24 GiB is allocated by PyTorch, and 25.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,837.922816,2872.967168,0.0,2470.445056,2459.375616,s,1,12.88734765625,12.88734765625,0.0,12.88734765625,12.88734765625,12.88734765625,12.88734765625,[12.88734765625],,kWh,0.00011988901446665445,1.3217349060009837e-05,3.823003058400082e-05,0.00017133639411066512,,MB,1352.945664,3479.044096,0.0,3063.939072,2984.869888,s,10,5.1870249633789065,0.5187024963378907,0.000950797694764596,0.5187325744628907,0.5197865661621094,0.5201598541259767,0.5204584844970703,"[0.51970361328125, 0.518352294921875, 0.516720458984375, 0.5180075073242187, 0.5188209838867187, 0.5186906127929688, 0.5187745361328125, 0.5184530029296875, 0.5189688110351562, 0.5205331420898438]",tokens/s,493.53917092629086,kWh,1.540033515416665e-05,1.6983810847779514e-06,1.0184774229683724e-05,2.728349046862832e-05,tokens/kWh,9382963.67520715,MB,1386.221568,3481.141248,0.0,3063.939072,2984.872448,s,10,35.0078583984375,3.5007858398437497,0.009475212726746488,3.5002600097656247,3.509337939453125,3.5152982421875003,3.520066484375,"[3.499208740234375, 3.49437255859375, 3.521258544921875, 3.490714599609375, 3.4970302734375, 3.4857138671875, 3.50404345703125, 3.508013427734375, 3.501311279296875, 3.506191650390625]",tokens/s,17.995959445154714,kWh,0.00010311770732083575,1.1374328907165619e-05,5.9579670470716334e-05,0.0001740717066987177,tokens/kWh,361919.8156598765,,s,630,35.00567852401733,0.0555645690857418,0.0006791711685449121,0.05540334510803223,0.056234456634521486,0.056638946914672854,0.05879137928009034,"[0.05650070571899414, 0.0558837776184082, 0.05617007827758789, 0.056721824645996094, 0.05601279830932617, 0.055769088745117185, 0.05729833602905274, 0.05559561538696289, 0.05565977478027344, 0.055577342987060546, 0.055959072113037106, 0.05528009414672851, 0.055267200469970704, 0.05531615829467774, 0.05568780899047852, 0.055082817077636716, 0.055177440643310545, 0.05576230239868164, 0.05542105484008789, 0.05513779067993164, 0.05579964828491211, 0.05547068786621094, 0.05525539016723633, 0.05486905670166015, 0.055031742095947266, 0.05705625534057617, 0.05809766387939453, 0.05870979309082031, 0.0552077751159668, 0.05542067337036133, 0.055109279632568356, 0.05513724899291992, 0.055826560974121094, 0.055312255859375, 0.05516035079956055, 0.054857215881347655, 0.05594416046142578, 0.054761760711669924, 0.05486153411865234, 0.05641401672363281, 0.05544713592529297, 0.05479280090332031, 0.05525680160522461, 0.055047679901123046, 0.05495859146118164, 0.054974750518798826, 0.05495337677001953, 0.05496464157104492, 0.05560483169555664, 0.05501910400390625, 0.054776256561279296, 0.05533657455444336, 0.05486278533935547, 0.05567484664916992, 0.05524486541748047, 0.05507171249389648, 0.055121185302734375, 0.05530799865722656, 0.05519974517822265, 0.05578342437744141, 0.0558040657043457, 0.05494972610473633, 0.05525219345092774, 0.057079807281494144, 0.055787521362304686, 0.055629825592041014, 0.05675417709350586, 0.05528780746459961, 0.055241825103759766, 0.05539728164672852, 0.05521379089355469, 0.05525503921508789, 0.0551611213684082, 0.05518854522705078, 0.05543360137939453, 0.05560335922241211, 0.055613311767578125, 0.0566890869140625, 0.05541897583007813, 0.05604755020141602, 0.05534668731689453, 0.05578604888916015, 0.05542502212524414, 0.05497577667236328, 0.05561401748657226, 0.05516304016113281, 0.05508025741577149, 0.054973121643066405, 0.055027584075927734, 0.05484966278076172, 0.05581340789794922, 0.055032543182373043, 0.05492095947265625, 0.054943519592285155, 0.05477833557128906, 0.05509529495239258, 0.05499699020385742, 0.05495772933959961, 0.05511993789672852, 0.0553331527709961, 0.05651456069946289, 0.05539583969116211, 0.055865856170654295, 0.055100513458251954, 0.05526620864868164, 0.055228416442871096, 0.055134239196777346, 0.05543423843383789, 0.05564483261108399, 0.05525740814208984, 0.055621631622314455, 0.05564147186279297, 0.05515532684326172, 0.05494534301757813, 0.054986785888671875, 0.05583686447143555, 0.05580729675292969, 0.05576492691040039, 0.05569839859008789, 0.05599846267700195, 0.056698654174804686, 0.055648479461669925, 0.05553164672851563, 0.05523862457275391, 0.055142303466796876, 0.055553375244140626, 0.05706752014160156, 0.05819827270507812, 0.05634841537475586, 0.056561279296875, 0.05623027038574219, 0.05562531280517578, 0.056348545074462894, 0.05646390533447266, 0.05628518295288086, 0.05563187026977539, 0.055416831970214846, 0.05545555114746094, 0.05562572860717773, 0.055429409027099606, 0.055258689880371095, 0.05542287826538086, 0.055632320404052735, 0.055592414855957034, 0.05594972610473633, 0.05592281723022461, 0.05554755020141602, 0.0554700813293457, 0.055546207427978514, 0.055358623504638674, 0.055279678344726565, 0.055376670837402345, 0.055424415588378906, 0.055241310119628906, 0.055377246856689454, 0.05562771224975586, 0.055136993408203126, 0.055191551208496094, 0.05540214538574219, 0.05537766265869141, 0.05518191909790039, 0.055580673217773435, 0.055901470184326174, 0.05610755157470703, 0.0560043830871582, 0.05632601547241211, 0.05584076690673828, 0.055560737609863284, 0.05566873550415039, 0.05551276779174805, 0.05521440124511719, 0.055406593322753904, 0.05573836898803711, 0.05616844940185547, 0.0555601921081543, 0.05572217559814453, 0.0555618896484375, 0.05564432144165039, 0.059172863006591796, 0.05560432052612305, 0.05534563064575195, 0.05606649780273437, 0.055811264038085937, 0.05625468826293945, 0.05623814392089844, 0.05544806289672852, 0.05677878570556641, 0.057450496673583984, 0.058347518920898435, 0.05654118347167969, 0.05567465591430664, 0.05564992141723633, 0.055365665435791016, 0.05512044906616211, 0.05511372756958008, 0.055398494720458984, 0.05515599822998047, 0.05521267318725586, 0.05502975845336914, 0.05492326354980469, 0.05492736053466797, 0.05509734344482422, 0.055209983825683595, 0.055139457702636716, 0.055081855773925784, 0.05579980850219726, 0.055096736907958986, 0.05525974273681641, 0.0554881591796875, 0.05559331130981445, 0.05525436782836914, 0.055081375122070314, 0.055271678924560544, 0.05515468978881836, 0.05532876968383789, 0.05543529510498047, 0.05567881774902344, 0.05551020812988281, 0.05588848114013672, 0.05555971145629883, 0.05539827346801758, 0.05536016082763672, 0.05573625564575195, 0.05523852920532227, 0.054870494842529295, 0.05494988632202148, 0.05545779037475586, 0.05561548614501953, 0.055968799591064454, 0.055503841400146484, 0.05577536010742187, 0.05551251220703125, 0.05541654586791992, 0.05559497451782226, 0.055401374816894534, 0.05535728073120117, 0.05550284957885742, 0.05550694274902344, 0.056641696929931644, 0.05520780944824219, 0.05516025543212891, 0.0558515510559082, 0.05522227096557617, 0.05536486434936523, 0.05543958282470703, 0.05573020935058594, 0.055487102508544925, 0.055543838500976564, 0.05508694458007812, 0.05535696029663086, 0.05522016143798828, 0.05498223876953125, 0.05882470321655273, 0.056438785552978515, 0.05615411376953125, 0.05547212982177734, 0.05592623901367187, 0.055218273162841794, 0.05547462463378906, 0.056123390197753906, 0.05585286331176758, 0.05532281494140625, 0.05552742385864258, 0.05582233428955078, 0.055414783477783204, 0.055201793670654295, 0.05496409606933594, 0.05524067306518555, 0.05494185638427734, 0.055318145751953124, 0.05520528030395508, 0.05635945510864258, 0.05603350448608398, 0.05537606430053711, 0.055387359619140625, 0.05542995071411133, 0.05499903869628906, 0.05530624008178711, 0.05503180694580078, 0.05497782516479492, 0.05507702255249024, 0.05547462463378906, 0.0552940788269043, 0.05560659027099609, 0.05535801696777344, 0.05527369689941406, 0.05520169448852539, 0.05525449752807617, 0.05773366546630859, 0.056606048583984374, 0.05637295913696289, 0.05533545684814453, 0.05561401748657226, 0.05607408142089844, 0.05584691238403321, 0.05565625762939453, 0.05536582565307617, 0.055093246459960936, 0.0547852783203125, 0.05499478530883789, 0.05503683090209961, 0.055093246459960936, 0.0549378890991211, 0.055196670532226565, 0.05517548751831055, 0.05502163314819336, 0.05486627197265625, 0.055276927947998045, 0.05575904083251953, 0.055412544250488284, 0.05493404769897461, 0.055160926818847655, 0.055330814361572264, 0.05523251342773437, 0.05502361679077149, 0.05624889755249023, 0.0563138542175293, 0.05663558578491211, 0.05600387191772461, 0.055376415252685544, 0.054935550689697264, 0.055504894256591795, 0.054975807189941404, 0.054923969268798827, 0.055144447326660156, 0.05526534271240234, 0.055328704833984374, 0.055188575744628904, 0.05507984161376953, 0.054861598968505856, 0.05546006393432617, 0.055479488372802734, 0.05544537734985352, 0.055043006896972654, 0.05551513671875, 0.055943168640136716, 0.05636710357666016, 0.055183040618896485, 0.055626049041748046, 0.056741886138916016, 0.05542652893066406, 0.057385440826416015, 0.05499283218383789, 0.05480857467651367, 0.055381824493408206, 0.055162239074707034, 0.05519251251220703, 0.055116031646728514, 0.05519113540649414, 0.05490713500976562, 0.05512796783447266, 0.05484051132202149, 0.054997825622558595, 0.056234046936035155, 0.05545331192016602, 0.05501984024047851, 0.05518457412719727, 0.0550838394165039, 0.05501337432861328, 0.054994335174560545, 0.05532505416870117, 0.05508086395263672, 0.05490118408203125, 0.055190494537353516, 0.05521321487426758, 0.055150367736816405, 0.05530771255493164, 0.05518150329589844, 0.055204513549804685, 0.054842239379882814, 0.05474150466918945, 0.05538623809814453, 0.055212223052978515, 0.05511167907714844, 0.05469785690307617, 0.05483663940429687, 0.05475187301635742, 0.055244895935058595, 0.056884639739990236, 0.05594940948486328, 0.056449535369873044, 0.055914081573486325, 0.0561176643371582, 0.055613567352294925, 0.05535878372192383, 0.05533078384399414, 0.05566320037841797, 0.05538127899169922, 0.055314910888671874, 0.05512015914916992, 0.05659971237182617, 0.05548524856567383, 0.05540454483032226, 0.056038753509521484, 0.05537843322753906, 0.05533929443359375, 0.05505606460571289, 0.05489273452758789, 0.056043521881103515, 0.05568044662475586, 0.05576556777954102, 0.055076862335205076, 0.05518915176391601, 0.055081279754638675, 0.0551440315246582, 0.05514284896850586, 0.05499859237670898, 0.05481868743896484, 0.05485830307006836, 0.05504735946655273, 0.05489337539672851, 0.05532467269897461, 0.05966412734985352, 0.05732172775268555, 0.05601046371459961, 0.055544097900390624, 0.05603440093994141, 0.05536767959594727, 0.05551196670532227, 0.055554046630859374, 0.0557704963684082, 0.05565856170654297, 0.05545151901245117, 0.05551993560791016, 0.05616025543212891, 0.055504894256591795, 0.055424671173095706, 0.05553180694580078, 0.05535136032104492, 0.055973888397216794, 0.055379615783691404, 0.05573462295532226, 0.05527532958984375, 0.05568531036376953, 0.05585919952392578, 0.05609881591796875, 0.05596556854248047, 0.05517119979858399, 0.05512716674804687, 0.05491801452636719, 0.054902782440185545, 0.06077433776855469, 0.056752574920654296, 0.056231552124023435, 0.0560766716003418, 0.056199169158935545, 0.05596899032592773, 0.05563228988647461, 0.05532710266113281, 0.05550630569458008, 0.055206432342529296, 0.05533091354370117, 0.0553963508605957, 0.05526323318481445, 0.055465984344482425, 0.055303199768066406, 0.05537891387939453, 0.055035903930664064, 0.05558988952636719, 0.05533363342285156, 0.055058017730712894, 0.055359840393066406, 0.05572025680541992, 0.055547904968261716, 0.055387840270996094, 0.055204158782958986, 0.05479372787475586, 0.05483737564086914, 0.05483763122558594, 0.05482495880126953, 0.055054336547851565, 0.05508095932006836, 0.05550662231445312, 0.055553375244140626, 0.05558700942993164, 0.05528236770629883, 0.055406719207763674, 0.0558612174987793, 0.05621894454956055, 0.05605635070800781, 0.05608995056152344, 0.055802497863769535, 0.055543872833251955, 0.055400577545166016, 0.055039710998535156, 0.05499523162841797, 0.055510814666748044, 0.05534332656860352, 0.055482368469238284, 0.05588582229614258, 0.05558249664306641, 0.05522454452514648, 0.05569945526123047, 0.05503395080566406, 0.05577308654785156, 0.055142398834228515, 0.05520608139038086, 0.055699264526367184, 0.055955265045166014, 0.05576828765869141, 0.05892195129394531, 0.055624832153320314, 0.05525910568237305, 0.05886374282836914, 0.05647974395751953, 0.05635289764404297, 0.05560224151611328, 0.05577299118041992, 0.05538265609741211, 0.0551325454711914, 0.05501449584960937, 0.055032958984375, 0.05508415985107422, 0.05514307022094726, 0.054978561401367185, 0.054902782440185545, 0.05498448181152344, 0.055281982421875, 0.05524854278564453, 0.055118080139160155, 0.05533695983886719, 0.05534310531616211, 0.055037086486816406, 0.055370590209960935, 0.05507193756103516, 0.056412353515625, 0.055511199951171875, 0.05506505584716797, 0.05472051239013672, 0.054994430541992184, 0.05555014419555664, 0.05551340866088867, 0.055158782958984375, 0.055520927429199216, 0.05529651260375976, 0.055341983795166014, 0.055186336517333984, 0.055299297332763675, 0.05553644943237305, 0.055567615509033205, 0.05945113754272461, 0.05606838226318359, 0.05565216064453125, 0.05638032150268555, 0.05642444610595703, 0.05588918304443359, 0.055773246765136716, 0.05620095825195313, 0.05554064178466797, 0.05527072143554688, 0.05515852737426758, 0.05684320068359375, 0.055103488922119144, 0.05522604751586914, 0.05631935882568359, 0.055382976531982424, 0.05587353515625, 0.05658582305908203, 0.05590585708618164, 0.055935775756835934, 0.055109535217285156, 0.05544668960571289, 0.05531862258911133, 0.055127071380615233, 0.054939521789550784, 0.05649513626098633, 0.05531523132324219, 0.05801571273803711, 0.05600732803344727, 0.05537411117553711, 0.055838623046875, 0.05587260818481445, 0.05598310470581055, 0.05553919982910156, 0.055269214630126955, 0.05553424072265625, 0.05574879837036133, 0.05592655944824219, 0.05559699249267578, 0.05591027069091797, 0.05571196746826172, 0.05583987045288086, 0.0556960334777832, 0.055202014923095705, 0.05532380676269531, 0.055913505554199217, 0.05741654586791992, 0.0552652473449707, 0.05557164764404297, 0.05551699066162109, 0.0551464958190918, 0.055070720672607425, 0.05511999893188477, 0.05574639892578125, 0.055388191223144534, 0.055504894256591795, 0.055541728973388674, 0.05519555282592774, 0.055126144409179685, 0.055432960510253905, 0.05585027313232422, 0.05587385559082031, 0.05710300827026367, 0.05586454391479492, 0.05568796920776367, 0.05492464065551758, 0.05618960189819336, 0.05660467147827149, 0.05593267059326172, 0.05558531188964844, 0.055473377227783206, 0.05557030487060547, 0.05526796722412109, 0.05554604721069336, 0.055199295043945315, 0.055013633728027346, 0.055316478729248046, 0.055998271942138675, 0.05537401580810547, 0.05568511962890625, 0.05550688171386719, 0.05548652648925781, 0.05571993637084961, 0.05521641540527344, 0.055170303344726564, 0.055593246459960936, 0.05597187042236328, 0.05529337692260742, 0.05520457458496094, 0.05539430236816406]",tokens/s,17.99708008995621,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 1243, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 1121, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 842, in forward attn_outputs, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 530, in forward attn_output = self._flash_attention_forward( File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 628, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 1243, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 1121, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 842, in forward attn_outputs, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 530, in forward attn_output = self._flash_attention_forward( File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 628, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,8724.11136,4515.037184,0.0,4112.515072,4056.160256,s,1,22.604189453125,22.604189453125,0.0,22.604189453125,22.604189453125,22.604189453125,22.604189453125,[22.604189453125],,kWh,0.0004040408390375167,4.4561447858412135e-05,0.0001301873263720027,0.0005787896132679316,,MB,2056.462336,4898.816,0.0,4475.322368,4373.26336,s,10,0.9307263183593749,0.09307263183593749,0.0029256204593348526,0.09225484466552734,0.09331636123657226,0.09755691566467285,0.10094935920715332,"[0.10179747009277344, 0.09192726135253906, 0.09124150085449219, 0.09227510070800782, 0.09223458862304687, 0.09222991943359375, 0.0923463363647461, 0.09237401580810548, 0.09228873443603515, 0.09201139068603516]",tokens/s,2750.54003470387,kWh,2.6929739797782285e-06,2.9697888643981385e-07,1.7894942960183002e-06,4.779447162236343e-06,tokens/kWh,53562680.224341154,MB,2069.938176,4900.913152,0.0,4475.322368,4344.50176,s,10,58.55696630859375,5.855696630859375,0.012584317840700344,5.854067138671875,5.868887988281251,5.87427919921875,5.87859216796875,"[5.8501884765625, 5.84744921875, 5.86768994140625, 5.85794580078125, 5.86663037109375, 5.860470703125, 5.84160400390625, 5.87967041015625, 5.84901220703125, 5.83630517578125]",tokens/s,10.758754076840589,kWh,0.0001702749199798042,1.8781967096819756e-05,7.00103964771815e-05,0.0002590672835538055,tokens/kWh,243180.06942360813,,s,630,58.552865600585925,0.09294105650886657,0.0008694255140964443,0.09280662536621094,0.0937010971069336,0.09424003372192383,0.09583510910034179,"[0.09241990661621094, 0.09280941009521484, 0.09376054382324218, 0.09285513305664063, 0.09753961944580078, 0.09307129669189453, 0.09262147521972657, 0.09267814636230469, 0.09282355499267578, 0.09258710479736328, 0.09241203308105468, 0.09608477020263671, 0.0930035171508789, 0.09292352294921875, 0.09255999755859375, 0.09350678253173828, 0.092391357421875, 0.09268720245361328, 0.0927693099975586, 0.09271327972412109, 0.09254889678955078, 0.09333439636230469, 0.09232179260253906, 0.09226649475097656, 0.09241600036621093, 0.09222758483886719, 0.09204230499267578, 0.09270681762695313, 0.09383177947998046, 0.09259164428710938, 0.09225901031494141, 0.0921396484375, 0.09191321563720703, 0.09192095947265624, 0.09213174438476562, 0.09264335632324219, 0.09272525024414062, 0.09245081329345703, 0.09214495849609375, 0.09275865936279297, 0.09287071990966797, 0.09403724670410156, 0.09275059509277343, 0.09194806671142577, 0.09186748504638671, 0.09320716857910157, 0.09216393280029297, 0.0927213134765625, 0.09250956726074219, 0.09218726348876953, 0.09251548767089844, 0.0927649917602539, 0.09320246124267578, 0.0930365447998047, 0.0947240982055664, 0.09351577758789062, 0.09331916809082032, 0.09303411102294921, 0.09283779144287109, 0.09295423889160156, 0.09244758605957032, 0.09273343658447265, 0.092857666015625, 0.09262882995605469, 0.09189737701416016, 0.09230332946777343, 0.093538818359375, 0.09228902435302734, 0.09314918518066406, 0.09224192047119141, 0.09228006744384766, 0.09246310424804688, 0.09307577514648438, 0.0928025894165039, 0.09276918029785156, 0.09406646728515625, 0.09345455932617187, 0.09281321716308594, 0.09288508605957031, 0.09271501159667969, 0.09235033416748047, 0.09236259460449218, 0.09236918640136718, 0.09356082916259766, 0.09331302642822266, 0.0927088623046875, 0.09233612823486329, 0.09265065765380859, 0.0925409927368164, 0.09233484649658204, 0.09281897735595702, 0.09241036987304688, 0.09269379425048828, 0.09270105743408204, 0.092846435546875, 0.09297673797607423, 0.09242870330810547, 0.09259622192382813, 0.09333900451660156, 0.09251222229003907, 0.09250473785400391, 0.09308159637451172, 0.09225596618652344, 0.09214147186279296, 0.0923689956665039, 0.09288835144042969, 0.09336934661865234, 0.09227878570556641, 0.0924483871459961, 0.09292192077636718, 0.0930552978515625, 0.09667110443115234, 0.09363314819335937, 0.0927088623046875, 0.09267609405517578, 0.09261670684814453, 0.09235250854492187, 0.09296249389648438, 0.095919677734375, 0.09290118408203125, 0.0926666259765625, 0.09222297668457032, 0.09243097686767578, 0.0923095703125, 0.09229312133789062, 0.09228902435302734, 0.09279808044433593, 0.09302719879150391, 0.09259740447998047, 0.09272838592529296, 0.09252384185791016, 0.09265363311767578, 0.09279923248291015, 0.0930797119140625, 0.09350553894042969, 0.09317375946044922, 0.0928680648803711, 0.09298512268066406, 0.0933096923828125, 0.09303961944580078, 0.09330735778808594, 0.09245772552490235, 0.09246864318847656, 0.09303065490722656, 0.09293801879882813, 0.09258428955078125, 0.09332121276855469, 0.0930672607421875, 0.09342156982421874, 0.09308544158935547, 0.09283309173583984, 0.09287570953369141, 0.09262899017333984, 0.09270066833496093, 0.09295667266845703, 0.0925450210571289, 0.09263900756835937, 0.09567036437988281, 0.09231775665283203, 0.09322038269042969, 0.09497654724121093, 0.09312617492675782, 0.09254755401611328, 0.09317581176757812, 0.09394697570800781, 0.09327094268798829, 0.0933349151611328, 0.09328704071044921, 0.09327568054199219, 0.09325615692138672, 0.09268019104003906, 0.09261798095703125, 0.09360009765625, 0.09303081512451172, 0.09306304168701172, 0.09330636596679688, 0.09249222564697265, 0.09254112243652343, 0.09306931304931641, 0.09304268646240234, 0.09316867065429688, 0.09286959838867187, 0.09578201293945313, 0.09360259246826172, 0.09284207916259765, 0.09383116912841796, 0.09358512115478515, 0.09305104064941407, 0.0936849594116211, 0.09261443328857422, 0.09369961547851563, 0.09259613037109375, 0.09304271697998047, 0.09280384063720704, 0.09269657897949218, 0.09232793426513672, 0.09281692504882813, 0.09244924926757812, 0.09244207763671874, 0.0920499496459961, 0.09220841979980468, 0.09301033782958984, 0.09258425903320312, 0.09291571044921874, 0.0936377944946289, 0.09309426879882812, 0.09230595397949219, 0.092970947265625, 0.09269049835205079, 0.09241593933105469, 0.09212928009033203, 0.09273548889160156, 0.09261055755615234, 0.092368896484375, 0.09304064178466796, 0.09276374053955078, 0.0942391357421875, 0.09384754943847656, 0.09251580810546875, 0.09260015869140625, 0.09238803100585938, 0.09194416046142578, 0.09206864166259765, 0.09263667297363282, 0.10436662292480468, 0.09320649719238282, 0.09285017395019532, 0.09255020904541016, 0.09350444793701172, 0.09245481872558593, 0.09272739410400391, 0.09322086334228516, 0.09259986877441406, 0.09320492553710938, 0.0939599380493164, 0.09300812530517578, 0.09262079620361328, 0.09306313323974609, 0.0926781768798828, 0.09355216217041015, 0.09315074920654297, 0.09321746826171876, 0.09224217224121094, 0.09254438018798829, 0.09231603240966797, 0.09292006683349609, 0.09268633270263672, 0.09221324920654297, 0.0928025894165039, 0.09280953979492187, 0.09331283569335938, 0.09251398468017578, 0.09425711822509765, 0.09299846649169922, 0.09233293151855469, 0.0922051544189453, 0.0925453109741211, 0.09263507080078125, 0.09287129974365234, 0.0930439682006836, 0.0940115509033203, 0.09285692596435546, 0.09244166564941406, 0.09257660675048827, 0.09244681549072266, 0.09424076843261718, 0.09448384094238281, 0.09353270721435547, 0.09301001739501953, 0.09314214324951171, 0.0935146255493164, 0.09335721588134765, 0.09333987426757813, 0.09293478393554687, 0.09292374420166015, 0.09269590759277344, 0.09242707061767579, 0.09375334167480469, 0.09366876983642577, 0.09257635498046875, 0.09236070251464844, 0.09380217742919922, 0.09304201507568359, 0.09257820892333984, 0.09277702331542968, 0.09282559967041015, 0.09274777221679688, 0.09550748443603516, 0.0950708770751953, 0.09293254089355468, 0.09334259033203125, 0.09400972747802734, 0.09312089538574218, 0.09270448303222656, 0.09292797088623046, 0.09274425506591796, 0.09289727783203125, 0.09254297637939453, 0.09225968170166016, 0.09312429046630859, 0.0925992660522461, 0.092446044921875, 0.09329670715332031, 0.0935348129272461, 0.0931942367553711, 0.09263894653320312, 0.09300816345214843, 0.09277849578857422, 0.0927457275390625, 0.09319136047363281, 0.09385657501220702, 0.09280281829833985, 0.09295037078857422, 0.09320285034179687, 0.09384060668945313, 0.09274745941162109, 0.0951053466796875, 0.09325103759765625, 0.09251593780517578, 0.09214771270751954, 0.0924865951538086, 0.0928951644897461, 0.09492896270751953, 0.09303190612792969, 0.09340694427490234, 0.09365792083740235, 0.09273452758789062, 0.09235346984863281, 0.09277030181884766, 0.09483993530273438, 0.09779472351074218, 0.09278691101074218, 0.0922603530883789, 0.09272659301757813, 0.09309398651123046, 0.09312271881103516, 0.09323101043701172, 0.09269213104248047, 0.09346038055419922, 0.09288706970214844, 0.09218294525146484, 0.0929817886352539, 0.09264259338378907, 0.09215049743652344, 0.09210675048828125, 0.09273926544189454, 0.09353855895996094, 0.09272531127929687, 0.09279897308349609, 0.09336000061035156, 0.09226866912841797, 0.09211872100830078, 0.09284435272216797, 0.09252454376220703, 0.09286809539794921, 0.0929359359741211, 0.09259699249267578, 0.09285222625732421, 0.09397452545166016, 0.09221324920654297, 0.09273139190673828, 0.09199616241455078, 0.09238527679443359, 0.09305244445800781, 0.09255369567871094, 0.09238470458984376, 0.09280329895019532, 0.09236908721923828, 0.09277244567871094, 0.092917724609375, 0.09320851135253906, 0.09282575988769531, 0.09523814392089844, 0.0935198745727539, 0.09244608306884766, 0.09272723388671875, 0.09342806243896484, 0.09337071990966797, 0.0932515869140625, 0.0931954574584961, 0.09354528045654296, 0.09275801849365234, 0.09251248168945313, 0.09346975708007813, 0.09284681701660157, 0.09235763549804688, 0.09209548950195312, 0.09287407684326172, 0.09305760192871093, 0.09276179504394531, 0.09290509033203125, 0.0931725082397461, 0.09338819122314453, 0.09560944366455078, 0.0936377944946289, 0.09314201354980468, 0.09326092529296875, 0.09327267456054687, 0.09226390075683594, 0.09227529907226563, 0.09238278198242188, 0.09259059143066406, 0.0925880355834961, 0.09161740875244141, 0.09151222229003907, 0.09192723083496093, 0.09189311981201172, 0.09159049224853516, 0.09190691375732422, 0.09146057891845703, 0.09204112243652343, 0.09204822540283203, 0.09203024291992187, 0.09198870086669922, 0.09187260437011718, 0.09259894561767579, 0.0928194580078125, 0.09252812957763672, 0.09264383697509766, 0.092295166015625, 0.09233309173583984, 0.09208930969238281, 0.09218457794189452, 0.09226834869384766, 0.09285171508789063, 0.09271347045898437, 0.09276025390625, 0.09273750305175782, 0.09456416320800781, 0.0945830078125, 0.0930027847290039, 0.09268323516845703, 0.09258972930908203, 0.09189615631103516, 0.09254208374023437, 0.09279551696777344, 0.09332310485839844, 0.09442278289794923, 0.09284470367431641, 0.09293977355957031, 0.09304029083251954, 0.09303836822509766, 0.09261260986328125, 0.09237443542480468, 0.09309609222412109, 0.0929952621459961, 0.09305174255371093, 0.09292288208007812, 0.09318287658691406, 0.09281523132324218, 0.09343398284912109, 0.09393561553955078, 0.09414041900634766, 0.09320230102539062, 0.09304486083984374, 0.09324476623535156, 0.09364723205566407, 0.09331910705566407, 0.09285187530517579, 0.09302291107177735, 0.09293734741210938, 0.09340579223632813, 0.09326620483398437, 0.09339836883544922, 0.0941021728515625, 0.09291980743408203, 0.09272505950927734, 0.09326595306396485, 0.09295887756347657, 0.09282559967041015, 0.09294028472900391, 0.09257772827148437, 0.09291494750976563, 0.0930558090209961, 0.09433216094970703, 0.09420877075195312, 0.09258377838134765, 0.09380265808105469, 0.09585679626464844, 0.09327353668212891, 0.09297090911865234, 0.09349715423583985, 0.09343865966796874, 0.0934993896484375, 0.09347789001464844, 0.09309900665283204, 0.09290505981445313, 0.09339126586914062, 0.09303798675537109, 0.09341398620605469, 0.09380659484863281, 0.09270066833496093, 0.09302425384521484, 0.09302979278564454, 0.09334025573730469, 0.09376563262939454, 0.09344569396972656, 0.09310457611083985, 0.09322467041015625, 0.09506025695800781, 0.09360969543457032, 0.0942553939819336, 0.0933431396484375, 0.09343366241455078, 0.09227648162841796, 0.09275801849365234, 0.09250342559814453, 0.09341110229492187, 0.09215062713623047, 0.0923884506225586, 0.09295702362060547, 0.09238175964355469, 0.09250201416015626, 0.09242124938964844, 0.09232383728027344, 0.09242835235595703, 0.09209696197509766, 0.09213910675048828, 0.09251715087890625, 0.09249295806884765, 0.0926114273071289, 0.09322434997558594, 0.09306787109375, 0.09282073974609376, 0.09238604736328125, 0.09307135772705079, 0.09252454376220703, 0.09201254272460938, 0.09238057708740234, 0.09248745727539062, 0.09241069030761719, 0.09207177734375, 0.09170336151123047, 0.09228092956542969, 0.09530092620849609, 0.09393017578125, 0.09234432220458984, 0.09244057464599609, 0.09296076965332031, 0.09236406707763672, 0.09236534118652344, 0.09173216247558594, 0.094060546875, 0.09290310668945312, 0.0937265625, 0.09318777465820313, 0.0934612808227539, 0.09299075317382813, 0.09290825653076172, 0.09320652770996093, 0.09332681274414062, 0.09277471923828125, 0.0924428482055664, 0.09371443176269531, 0.09297837066650391, 0.09353094482421875, 0.0929928970336914, 0.09379657745361328, 0.093999267578125, 0.0934010238647461, 0.09266617584228516, 0.09298880004882812, 0.09248627471923829, 0.09255321502685547, 0.09318195343017578, 0.09279686737060547, 0.09424272155761719, 0.09215955352783203, 0.09205987548828125, 0.09247142028808594, 0.09263967895507813, 0.0921145248413086, 0.09179587554931641, 0.09156559753417969, 0.09244515228271484, 0.09225625610351562, 0.09259129333496094, 0.0929201889038086, 0.0928543701171875, 0.09197555541992188, 0.09208675384521485, 0.09205094146728515, 0.09191404724121094, 0.09350406646728515, 0.09284416198730469, 0.09239961242675782, 0.0921777572631836, 0.09285401916503906, 0.09290396881103516, 0.09244009399414063, 0.092631103515625, 0.09258195495605469, 0.09232434844970704, 0.09227001953125, 0.09165494537353516, 0.09401344299316407, 0.09528678131103516, 0.0925040283203125, 0.09248617553710937, 0.09295053100585937, 0.09219417572021485, 0.09271155548095703, 0.09298124694824218, 0.09299353790283203, 0.09266738891601563, 0.09436726379394532, 0.09434786987304687, 0.09268262481689453, 0.09270681762695313, 0.09272223663330079, 0.09342047882080078, 0.09220095825195312, 0.0922460174560547, 0.09206374359130859, 0.09256755065917968, 0.09284194946289062, 0.09190576171875, 0.09241776275634765, 0.09198857879638672, 0.09204239654541016, 0.09261551666259765, 0.09288038635253906, 0.09249148559570312, 0.09245148468017578, 0.09293750762939453, 0.09357398223876953, 0.09199801635742187, 0.09264351654052734, 0.09409126281738281, 0.0924254379272461]",tokens/s,10.75950755847713,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: Phi3ForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: Phi3ForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,15873.060864,9166.52032,0.0,8763.998208,8763.843072,s,1,34.22266796875,34.22266796875,0.0,34.22266796875,34.22266796875,34.22266796875,34.22266796875,[34.22266796875],,kWh,0.0007821089886166723,8.626525950551006e-05,0.00026274521019600816,0.0011311194583181906,,MB,1349.869568,9818.734592,0.0,9395.24096,9193.043456,s,10,0.9239330444335938,0.09239330444335936,0.0008417123693627113,0.09248470306396483,0.09342223052978516,0.09344624938964843,0.09346546447753906,"[0.09183171081542969, 0.09188416290283204, 0.09255286407470703, 0.0919969253540039, 0.09052214050292968, 0.09309654235839844, 0.09241654205322265, 0.0934168930053711, 0.0927449951171875, 0.09347026824951171]",tokens/s,2770.763547665273,kWh,2.7385614672117138e-06,3.020145993105199e-07,1.822613088411269e-06,4.863189154933503e-06,tokens/kWh,52640354.27046852,MB,1406.738432,9820.831744,0.0,9395.24096,9042.051072,s,10,29.710315673828124,2.9710315673828123,0.0065091468048112215,2.9711990966796877,2.9770820312499997,2.9814139404296873,2.9848794677734376,"[2.96406396484375, 2.9648828125, 2.973093505859375, 2.976119384765625, 2.963831298828125, 2.965788818359375, 2.974391845703125, 2.97126904296875, 2.971129150390625, 2.985745849609375]",tokens/s,21.204756183555745,kWh,8.711944886695824e-05,9.609277553675825e-06,5.764707337618728e-05,0.00015437579979682133,tokens/kWh,408095.05170445243,,s,630,29.708395545959473,0.04715618340628487,0.0004352531047991968,0.047089599609375005,0.04755192527770996,0.04777150287628174,0.04917611068725586,"[0.04850473785400391, 0.04758537673950195, 0.0470118408203125, 0.04774224090576172, 0.047048511505126955, 0.046846656799316405, 0.046960769653320314, 0.04701398468017578, 0.04682342529296875, 0.04682547378540039, 0.04661033630371094, 0.046819198608398435, 0.04664486312866211, 0.046738239288330076, 0.046865760803222654, 0.04651414489746094, 0.046648960113525394, 0.04734377670288086, 0.04696134567260742, 0.04706943893432617, 0.0466203842163086, 0.0467374382019043, 0.04670211029052734, 0.046591583251953124, 0.04719500732421875, 0.04691299057006836, 0.04737897491455078, 0.04819558334350586, 0.047067134857177735, 0.04699289703369141, 0.04683340835571289, 0.046777088165283205, 0.04681727981567383, 0.04680499267578125, 0.046846271514892575, 0.04697465515136719, 0.046811134338378906, 0.046937118530273436, 0.04685084915161133, 0.04676422500610351, 0.047064510345458985, 0.047151679992675784, 0.0470052490234375, 0.04690284729003906, 0.04673215866088867, 0.047013633728027346, 0.04743596649169922, 0.04751696014404297, 0.04725872039794922, 0.04747401428222656, 0.04710639953613281, 0.04699484634399414, 0.0471844482421875, 0.04714704132080078, 0.04671078491210937, 0.04704460906982422, 0.047322399139404295, 0.04697980880737305, 0.04708147048950195, 0.04723324966430664, 0.04726147079467773, 0.047351806640625, 0.047505409240722656, 0.0486212158203125, 0.04692438507080078, 0.047261760711669924, 0.04697087860107422, 0.046749759674072265, 0.0467347526550293, 0.04669664001464844, 0.0466412467956543, 0.046579967498779296, 0.046679359436035156, 0.046865089416503906, 0.04718982315063477, 0.04677807998657227, 0.04676691055297852, 0.04680364990234375, 0.04691270446777344, 0.046728736877441404, 0.04668646240234375, 0.04663820648193359, 0.04675180816650391, 0.046686145782470705, 0.04686751937866211, 0.046595775604248046, 0.04698492813110351, 0.04697529602050781, 0.04685833740234375, 0.04688649749755859, 0.04722630310058594, 0.04704476928710938, 0.04712531280517578, 0.04689321517944336, 0.04737356948852539, 0.047177566528320315, 0.04691353607177735, 0.04707814407348633, 0.04715727996826172, 0.046926910400390626, 0.047080352783203126, 0.046890945434570314, 0.04724550247192383, 0.047077247619628906, 0.04731196975708008, 0.04706000137329101, 0.04715302276611328, 0.04701388931274414, 0.04781465530395508, 0.047075328826904295, 0.04721868896484375, 0.04725964736938477, 0.04737747192382812, 0.047495552062988285, 0.047303550720214846, 0.04746137619018555, 0.04737299346923828, 0.04732137680053711, 0.047251201629638674, 0.04725244903564453, 0.04718700790405273, 0.047220542907714845, 0.047102081298828126, 0.04708147048950195, 0.04720809555053711, 0.047113697052001954, 0.04859904098510742, 0.047726593017578124, 0.046852096557617184, 0.04688399887084961, 0.04716016006469727, 0.046970718383789065, 0.04673049545288086, 0.046835678100585934, 0.046760894775390624, 0.047669246673583986, 0.046878719329833986, 0.04696473693847656, 0.04703641510009766, 0.04705279922485352, 0.04696425628662109, 0.04691961669921875, 0.04668204879760742, 0.04688665771484375, 0.04683625411987305, 0.04704249572753906, 0.047139232635498046, 0.04706000137329101, 0.04759238433837891, 0.04736614227294922, 0.047099071502685545, 0.047102783203125, 0.04725350570678711, 0.04712243270874023, 0.04695654296875, 0.04722393417358398, 0.047121280670166014, 0.04697225570678711, 0.04685891342163086, 0.046714881896972656, 0.047289920806884767, 0.04822060775756836, 0.047247169494628906, 0.04941606521606445, 0.047241535186767575, 0.0468454704284668, 0.04682726287841797, 0.04717599868774414, 0.047182079315185546, 0.04739916610717773, 0.04707932662963867, 0.04703641510009766, 0.04724246215820312, 0.04758371353149414, 0.04709203338623047, 0.047148479461669925, 0.047007904052734376, 0.047330718994140625, 0.047131649017333986, 0.047126529693603515, 0.047075328826904295, 0.04714105606079102, 0.0471220817565918, 0.04742544174194336, 0.0473164176940918, 0.047301441192626956, 0.047239166259765625, 0.047325183868408206, 0.04728627014160156, 0.04917734527587891, 0.048912158966064455, 0.04777724838256836, 0.04714960098266602, 0.04677427291870117, 0.046960159301757814, 0.0468647346496582, 0.04698944091796875, 0.04723251342773437, 0.04717619323730469, 0.047413246154785156, 0.04714921569824219, 0.04699324798583984, 0.04711017608642578, 0.0467248649597168, 0.04724470520019531, 0.04704719924926758, 0.04714495849609375, 0.047038143157958984, 0.04726470565795898, 0.047414337158203125, 0.04786956787109375, 0.04728521728515625, 0.04730879974365235, 0.04725299072265625, 0.04727369689941406, 0.04734431838989258, 0.047171390533447266, 0.0470530891418457, 0.04733497619628906, 0.04727603149414063, 0.04726726531982422, 0.047164417266845705, 0.04735539245605469, 0.047329792022705076, 0.04727807998657227, 0.04745199966430664, 0.04741737747192383, 0.0471638412475586, 0.04742876815795898, 0.04726963043212891, 0.04734342575073242, 0.047691070556640625, 0.04723174285888672, 0.048621726989746095, 0.04741321563720703, 0.04679731369018555, 0.04683190536499023, 0.047045921325683596, 0.04703219223022461, 0.04699631881713867, 0.04707561492919922, 0.04709347152709961, 0.047013633728027346, 0.04692371368408203, 0.04671110534667969, 0.04653276824951172, 0.04676313781738281, 0.047005664825439455, 0.047145729064941404, 0.04688508987426758, 0.04694185638427734, 0.04698931121826172, 0.04924710464477539, 0.04762009429931641, 0.04720556640625, 0.04707411193847656, 0.046917022705078124, 0.046787166595458986, 0.046862335205078126, 0.046720191955566405, 0.04664524841308594, 0.0465307502746582, 0.046742080688476566, 0.046737022399902343, 0.04649030303955078, 0.04680659103393554, 0.04679916763305664, 0.046542720794677736, 0.0467949104309082, 0.046744415283203125, 0.04674867248535156, 0.046698017120361326, 0.04683414459228516, 0.04722687911987305, 0.04693113708496094, 0.04678534317016601, 0.04674911880493164, 0.046690399169921876, 0.04677264022827148, 0.04665350341796875, 0.04743148803710937, 0.046964927673339846, 0.0469903678894043, 0.04670518493652344, 0.04709542465209961, 0.0469918098449707, 0.04682380676269531, 0.04681523132324219, 0.046601696014404295, 0.04687507247924805, 0.04717577743530273, 0.04686761474609375, 0.04684272003173828, 0.047263870239257814, 0.04695561599731445, 0.049296161651611325, 0.04715657424926758, 0.047352481842041015, 0.048320510864257815, 0.04737590408325195, 0.04728857421875, 0.04706038284301758, 0.04729967880249023, 0.047011550903320314, 0.04704399871826172, 0.047239776611328124, 0.04716953659057617, 0.04685647964477539, 0.04705660629272461, 0.04688032150268555, 0.04724371337890625, 0.047092929840087894, 0.047012287139892577, 0.047180160522460934, 0.04694537734985352, 0.049160224914550785, 0.04755174255371094, 0.04715715026855469, 0.04704342269897461, 0.047058910369873044, 0.04713071823120117, 0.04726572799682617, 0.046895103454589845, 0.04707955169677734, 0.04698278427124023, 0.046803550720214845, 0.046638751983642576, 0.04674355316162109, 0.04683161544799805, 0.0468021125793457, 0.04709686279296875, 0.04700044631958008, 0.0467608642578125, 0.04663033676147461, 0.046742080688476566, 0.046930145263671875, 0.04715423965454101, 0.04705731201171875, 0.0469159049987793, 0.04671184158325195, 0.046855327606201175, 0.047171390533447266, 0.04706243133544922, 0.04690163040161133, 0.046663902282714845, 0.046642303466796875, 0.046714752197265626, 0.046828384399414065, 0.04673292922973633, 0.04681094360351563, 0.04707542419433594, 0.04699609756469727, 0.046723072052001956, 0.04682057571411133, 0.04676812744140625, 0.047311710357666015, 0.04749894332885742, 0.04701571273803711, 0.04723311996459961, 0.04688524627685547, 0.046984512329101565, 0.04727571105957031, 0.04699795150756836, 0.047085567474365236, 0.04707088088989258, 0.04774911880493164, 0.04694620895385742, 0.047764480590820314, 0.04714473724365234, 0.048256607055664064, 0.047508094787597654, 0.04719126510620117, 0.04704131317138672, 0.04716873550415039, 0.04716649627685547, 0.047022144317626954, 0.04720336151123047, 0.04717654418945313, 0.04917308807373047, 0.04733747100830078, 0.046634334564208985, 0.04660496139526367, 0.04670684814453125, 0.04685836791992187, 0.04651593780517578, 0.04661043167114258, 0.04672918319702148, 0.04662275314331055, 0.04664966583251953, 0.04647305679321289, 0.046778144836425783, 0.04685609436035156, 0.0467314224243164, 0.046637054443359374, 0.0465428466796875, 0.04679862213134765, 0.04884492874145508, 0.047108257293701175, 0.047425472259521484, 0.04769545745849609, 0.047450527191162106, 0.04734672164916992, 0.047352798461914064, 0.04721868896484375, 0.04755366516113281, 0.04732131195068359, 0.04737500762939453, 0.04705484771728516, 0.04694220733642578, 0.04694825744628906, 0.04709331130981445, 0.04704924774169922, 0.047108318328857424, 0.047584129333496095, 0.047263904571533205, 0.047042526245117185, 0.04716742324829101, 0.047321952819824216, 0.04733744049072266, 0.04731907272338867, 0.04782048034667969, 0.04759900665283203, 0.04729743957519531, 0.04762822341918945, 0.047279678344726565, 0.047782398223876955, 0.04758528137207031, 0.04732928085327148, 0.04740236663818359, 0.04754262542724609, 0.04716790390014648, 0.04695011138916016, 0.04703862380981445, 0.04720848083496094, 0.047176990509033206, 0.04718406295776367, 0.047204864501953124, 0.04708761596679688, 0.04719327926635742, 0.04750035095214844, 0.04803558349609375, 0.0490992317199707, 0.04752384185791016, 0.04702412796020508, 0.046952449798583984, 0.04701001739501953, 0.04699280166625976, 0.046819713592529295, 0.046954559326171874, 0.046951454162597654, 0.04691164779663086, 0.04692777633666992, 0.046900062561035155, 0.046712833404541014, 0.046827201843261716, 0.04686182403564453, 0.04677305603027344, 0.04722713470458984, 0.047374080657958985, 0.04732342529296875, 0.04725884628295898, 0.04782495880126953, 0.04748128128051758, 0.047117694854736325, 0.04716396713256836, 0.046986656188964845, 0.04704732894897461, 0.047091487884521485, 0.04730828857421875, 0.04703715133666992, 0.047244544982910155, 0.047352577209472654, 0.04709756851196289, 0.04709833526611328, 0.046919296264648434, 0.04680108642578125, 0.047004833221435546, 0.046943199157714846, 0.047008705139160153, 0.046930145263671875, 0.04700960159301758, 0.047096576690673825, 0.04730720138549805, 0.04713347244262695, 0.04704902267456055, 0.04714115142822266, 0.04737897491455078, 0.047417152404785154, 0.0472388801574707, 0.047169822692871094, 0.04696268844604492, 0.04711423873901367, 0.04716896057128906, 0.04710639953613281, 0.047357566833496095, 0.04705484771728516, 0.04708771133422852, 0.04709222412109375, 0.047265792846679686, 0.04717363357543945, 0.047405055999755856, 0.04722687911987305, 0.04756604766845703, 0.047658817291259765, 0.04918495941162109, 0.04820787048339844, 0.047101791381835935, 0.04705414581298828, 0.04702294540405273, 0.04704665756225586, 0.0470098876953125, 0.04666950225830078, 0.04680233764648437, 0.04664198303222656, 0.046644447326660156, 0.04727888107299805, 0.0472042236328125, 0.04690547180175781, 0.04696211242675781, 0.04705120086669922, 0.04700710296630859, 0.04714163208007813, 0.04704595184326172, 0.04723168182373047, 0.04776300811767578, 0.047522239685058594, 0.04766310501098633, 0.04755356979370117, 0.046946720123291014, 0.04700038528442383, 0.047037822723388675, 0.046964542388916015, 0.04701571273803711, 0.04677065658569336, 0.046807361602783204, 0.04687020874023438, 0.04712588882446289, 0.04680755233764648, 0.04689680099487305, 0.04680207824707031, 0.04675955200195313, 0.047001377105712894, 0.04706067276000977, 0.04712028884887695, 0.04723500823974609, 0.04741120147705078, 0.047429889678955076, 0.04728460693359375, 0.047040576934814456, 0.04720025634765625, 0.04722073745727539, 0.047299873352050784, 0.0470863037109375, 0.04706099319458008, 0.04720979309082031, 0.04696543884277344, 0.047166622161865235, 0.04688943862915039, 0.047024417877197265, 0.0470810546875, 0.04716595077514649, 0.047247425079345706, 0.04736336135864258, 0.047432350158691405, 0.04731903839111328, 0.047573055267333984, 0.04752787017822266, 0.049433311462402346, 0.0473639030456543, 0.04711983871459961, 0.047196895599365234, 0.050116607666015625, 0.04687366485595703, 0.047940544128417965, 0.04760566329956055, 0.04736419296264648, 0.04697235107421875, 0.04700831985473633, 0.046927871704101565, 0.04672419357299805, 0.04704143905639648, 0.04695616149902344, 0.04697945785522461, 0.046962879180908204, 0.04688569641113281, 0.04715827178955078, 0.04724505615234375, 0.047337726593017576, 0.04770537567138672, 0.04702022552490234, 0.04725814437866211, 0.046821727752685546, 0.04731401443481445, 0.04717631912231445, 0.04682767868041992, 0.047091487884521485, 0.04800431823730469, 0.04730883026123047, 0.04702284622192383, 0.04713497543334961, 0.046919166564941404, 0.04711260986328125, 0.048289630889892576, 0.04715267181396485, 0.04759318542480469, 0.04736687850952148, 0.047288352966308594, 0.04778598403930664, 0.047761375427246094, 0.04776278305053711, 0.04761465454101563, 0.047540287017822265, 0.04768678283691406, 0.0473908805847168, 0.04713334274291992, 0.04727807998657227, 0.04756816101074219, 0.04753276824951172, 0.047325183868408206, 0.04730879974365235, 0.04733456039428711, 0.047379295349121095, 0.04741059112548828, 0.04724591827392578, 0.047430721282958985, 0.047264480590820314, 0.04760726547241211, 0.047526657104492186, 0.047538433074951175, 0.047480575561523436]",tokens/s,21.20612669995516,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,7681.9456,3463.380992,0.0,3068.133376,2990.958592,s,1,17.269208984375,17.269208984375,0.0,17.269208984375,17.269208984375,17.269208984375,17.269208984375,[17.269208984375],,kWh,0.00030027408976249605,3.3115269661735504e-05,0.00011231675652000062,0.00044570611594423217,,MB,4325.49888,3576.6272,0.0,3160.408064,3145.649152,s,10,1.0323965148925782,0.10323965148925782,0.0007294582217660479,0.10305582427978516,0.10365469589233399,0.10448214836120605,0.1051441103363037,"[0.1028340835571289, 0.10299177551269531, 0.10347081756591797, 0.10287270355224609, 0.103119873046875, 0.10284572601318359, 0.102572509765625, 0.10322710418701173, 0.10530960083007812, 0.1031523208618164]",tokens/s,2479.6674175778,kWh,3.033725846305835e-06,3.345620975392618e-07,1.9910726123298936e-06,5.359360556174991e-06,tokens/kWh,47766892.582929485,MB,4329.69728,3723.42784,0.0,3307.208704,3216.658432,s,10,62.0150166015625,6.20150166015625,0.01409957620037942,6.2049936523437506,6.2156473144531255,6.219474291992188,6.222535874023437,"[6.1972587890625, 6.18768359375, 6.17258935546875, 6.20286279296875, 6.2073837890625, 6.19045849609375, 6.21155712890625, 6.214796875, 6.20712451171875, 6.22330126953125]",tokens/s,10.158829820971569,kWh,0.0001817351260232784,2.0045434461496105e-05,8.103329936267051e-05,0.000282813859847445,tokens/kWh,222761.35983570025,,s,630,62.01150816345216,0.09843096533881293,0.0009471494447929424,0.0982900161743164,0.09923420028686523,0.09994131202697754,0.10191001358032227,"[0.09723782348632813, 0.0983116455078125, 0.09762255859375, 0.10137519836425782, 0.09755088043212891, 0.09744976043701171, 0.09728169250488282, 0.09756111907958985, 0.09703590393066407, 0.09780684661865234, 0.10037862396240234, 0.0973148193359375, 0.09777766418457032, 0.0974725112915039, 0.09707705688476563, 0.09772828674316406, 0.09792979431152343, 0.09844697570800781, 0.09772054290771484, 0.09857843017578125, 0.09793561553955078, 0.09770771026611329, 0.09800505828857421, 0.09812582397460938, 0.09778995513916015, 0.09822035217285156, 0.09842156982421875, 0.09932275390625, 0.09871084594726562, 0.0985910415649414, 0.09845369720458984, 0.09805023956298828, 0.09848012542724609, 0.09819481658935547, 0.0979807357788086, 0.09826345825195312, 0.09810320281982422, 0.09899027252197265, 0.09824441528320313, 0.09827532958984375, 0.09826850891113281, 0.09778479766845703, 0.09802310180664063, 0.09794329833984375, 0.09807843017578124, 0.1005362548828125, 0.09913788604736327, 0.09873622131347656, 0.09837532806396485, 0.09951692962646484, 0.09856409454345703, 0.09830518341064454, 0.09845209503173828, 0.09969686126708985, 0.09905561828613281, 0.09892969512939453, 0.09897657775878907, 0.09975635528564453, 0.0982936019897461, 0.0993259506225586, 0.09870566558837891, 0.09832217407226562, 0.09861135864257813, 0.09809510040283204, 0.09777561950683594, 0.0982462387084961, 0.09824278259277344, 0.09761004638671875, 0.0987605743408203, 0.09808003234863281, 0.09770877075195313, 0.0976527328491211, 0.09701699066162109, 0.09793405151367188, 0.09780429077148438, 0.10124205017089843, 0.09834815979003907, 0.09773372650146485, 0.09782550048828124, 0.09813113403320313, 0.09808774566650391, 0.09866035461425782, 0.09811293029785156, 0.09853580474853516, 0.09817059326171874, 0.09913954925537109, 0.09872819519042969, 0.09809311676025391, 0.09784742736816407, 0.09989539337158203, 0.09864777374267578, 0.09777999877929687, 0.09727999877929687, 0.09773836517333985, 0.09852761840820312, 0.09782819366455078, 0.09735234832763671, 0.09760562896728515, 0.09762815856933593, 0.09764832305908203, 0.0973171844482422, 0.09777721405029297, 0.09730912017822266, 0.09786179351806641, 0.10003440093994141, 0.09753363037109375, 0.09754041290283202, 0.09801318359375, 0.09854914855957031, 0.1016611557006836, 0.10004902648925781, 0.0999788818359375, 0.0978803482055664, 0.09867887878417969, 0.09843097686767578, 0.09821501159667968, 0.09782796478271484, 0.0989978256225586, 0.09766934204101563, 0.09828105926513672, 0.09748105621337891, 0.09713494110107422, 0.09699298858642579, 0.0980758056640625, 0.09815740966796875, 0.09839337921142578, 0.09763702392578125, 0.09789011383056641, 0.0977103042602539, 0.09774073791503907, 0.09773881530761719, 0.09795993804931641, 0.09767526245117188, 0.09785139465332031, 0.09827327728271484, 0.0983552017211914, 0.09845145416259765, 0.09827942657470704, 0.09828966522216796, 0.09861254119873047, 0.09810195159912109, 0.09817906951904297, 0.09813318634033204, 0.09808159637451172, 0.09777558135986328, 0.10108112335205079, 0.09795993804931641, 0.09749708557128907, 0.09758719635009766, 0.09900624084472656, 0.09749497222900391, 0.09725981140136719, 0.09754547119140625, 0.09744255828857422, 0.09776947021484375, 0.09739673614501954, 0.09831037139892577, 0.09793513488769531, 0.09755238342285157, 0.09802342224121094, 0.09773056030273437, 0.0977940444946289, 0.09826509094238281, 0.0982459487915039, 0.09823712158203125, 0.09810329437255859, 0.09841868591308593, 0.09808281707763672, 0.09851507568359374, 0.09801920318603516, 0.09764995574951171, 0.0975302734375, 0.09727008056640625, 0.0970857925415039, 0.09728540802001953, 0.0976546859741211, 0.1008185272216797, 0.09777359771728515, 0.09788706970214844, 0.09733734130859376, 0.09765830230712891, 0.09829228973388672, 0.09728582763671875, 0.09726326751708984, 0.09803632354736327, 0.09757052612304687, 0.09784051513671875, 0.09817826843261719, 0.09782374572753906, 0.1005355224609375, 0.09880633544921875, 0.09857794952392578, 0.09845938873291016, 0.09871046447753906, 0.098914306640625, 0.09890406036376953, 0.0984268798828125, 0.09831423950195313, 0.09799680328369141, 0.09783465576171875, 0.09841865539550781, 0.09714236450195313, 0.0973094711303711, 0.09781043243408204, 0.10109091186523438, 0.09763062286376953, 0.09882947540283203, 0.09711856079101562, 0.09856665802001953, 0.09800681304931641, 0.10008393859863281, 0.098334716796875, 0.09760940551757813, 0.09752198028564453, 0.09824018859863282, 0.09782918548583984, 0.09851615905761718, 0.09844204711914062, 0.09834806060791015, 0.09869821166992188, 0.09855506896972656, 0.09838902282714844, 0.09859388732910156, 0.09886585235595703, 0.09843231964111328, 0.09891705322265625, 0.09827737426757813, 0.09747856140136718, 0.09772589111328125, 0.09830825805664062, 0.09813359832763671, 0.09807513427734375, 0.09807027435302734, 0.09761449432373047, 0.09760332489013672, 0.09842098999023438, 0.0990597152709961, 0.09862313842773437, 0.09802582550048829, 0.09827152252197266, 0.09862668609619141, 0.0979767074584961, 0.0977696990966797, 0.0984432601928711, 0.09843917083740235, 0.09896086120605468, 0.10192291259765625, 0.09876934051513672, 0.09808265686035156, 0.09876290893554687, 0.09939923095703125, 0.0988815689086914, 0.09734550476074219, 0.09762076568603516, 0.09781350708007812, 0.09798342132568359, 0.09769519805908203, 0.09737276458740235, 0.09735763549804688, 0.10311698913574219, 0.09764787292480469, 0.0976965103149414, 0.09736160278320312, 0.09749049377441406, 0.09875263977050781, 0.09880774688720703, 0.09867743682861328, 0.09776274871826172, 0.09809772491455078, 0.09875862121582031, 0.09925398254394531, 0.0988563232421875, 0.09835820770263672, 0.09850470733642579, 0.09856400299072265, 0.09865023803710937, 0.10345001220703125, 0.0988834228515625, 0.09871337890625, 0.0988845443725586, 0.09897551727294922, 0.09818748474121093, 0.0981310043334961, 0.09763116455078125, 0.09737010955810547, 0.09853715515136718, 0.0983043212890625, 0.09787363433837891, 0.09780048370361329, 0.09841171264648438, 0.09985107421875, 0.09841868591308593, 0.09756441497802734, 0.09792070770263672, 0.09761235046386718, 0.09822822570800781, 0.09900016021728515, 0.09871932983398438, 0.0984865264892578, 0.09876009368896485, 0.09899100494384766, 0.09896991729736328, 0.09848595428466797, 0.09875411224365234, 0.10047862243652343, 0.09967696380615235, 0.09947891235351562, 0.0992037124633789, 0.0981849594116211, 0.09810969543457031, 0.09807904052734374, 0.10052166748046874, 0.09792511749267578, 0.09777276611328126, 0.09717021179199219, 0.09814768218994141, 0.09825580596923827, 0.09750908660888671, 0.09732307434082031, 0.09761186981201173, 0.09820336151123046, 0.09804022216796875, 0.0978043212890625, 0.0975052490234375, 0.09770188903808594, 0.09836466979980468, 0.09869798278808593, 0.09816883087158203, 0.09831833648681641, 0.09854911804199219, 0.0986118392944336, 0.09829376220703125, 0.09831014251708985, 0.09771212768554688, 0.09797209930419921, 0.09819967651367187, 0.09902694702148437, 0.09742950439453125, 0.09774819183349609, 0.09801119995117187, 0.09800761413574219, 0.09749520111083984, 0.09719987487792969, 0.0970909423828125, 0.09802127838134765, 0.09926509094238281, 0.09819551849365235, 0.09779151916503906, 0.09751958465576172, 0.097968994140625, 0.09816464233398438, 0.09780585479736328, 0.09862726593017578, 0.09869606781005859, 0.09859891510009766, 0.09899008178710937, 0.09869312286376954, 0.09886105346679687, 0.0987484130859375, 0.09870870208740234, 0.09865090942382812, 0.09845916748046875, 0.09785391998291015, 0.09769983673095703, 0.09797427368164062, 0.09799680328369141, 0.09934848022460938, 0.10130585479736329, 0.09814704132080078, 0.09845680236816406, 0.09804841613769531, 0.09833897399902344, 0.09837567901611328, 0.09800624084472656, 0.09798531341552734, 0.09794764709472656, 0.09841458892822266, 0.10112204742431641, 0.0983531494140625, 0.09916588592529296, 0.098570556640625, 0.09947955322265625, 0.09857433319091796, 0.09861734771728516, 0.09861666870117188, 0.09859343719482422, 0.09939523315429688, 0.09853577423095704, 0.09787391662597657, 0.09903923034667969, 0.09834300994873046, 0.09803695678710937, 0.09759404754638672, 0.0977589111328125, 0.09808448028564454, 0.09829036712646484, 0.09874432373046875, 0.09811558532714844, 0.09819136047363282, 0.0979415054321289, 0.09819324493408203, 0.09859292602539063, 0.09813196563720702, 0.0979415054321289, 0.10219545745849609, 0.0984901123046875, 0.09882950592041016, 0.09843558502197265, 0.09887503814697265, 0.10187843322753906, 0.10143743896484375, 0.0990904312133789, 0.09850816345214844, 0.09857087707519531, 0.09838966369628906, 0.09871385955810547, 0.0987853775024414, 0.09917030334472657, 0.09829376220703125, 0.09894092559814453, 0.09800886535644532, 0.09834518432617187, 0.09775923156738281, 0.09819750213623046, 0.09703040313720702, 0.09789360046386719, 0.09802188873291015, 0.09769705963134766, 0.09812044525146485, 0.09834700775146485, 0.09961174774169922, 0.09849689483642578, 0.09826972961425781, 0.09787187194824219, 0.09714892578125, 0.09836748504638672, 0.09928607940673828, 0.09912944030761718, 0.09867938995361328, 0.09873423767089844, 0.09877718353271485, 0.0985687026977539, 0.09843065643310547, 0.09807917022705079, 0.09949388885498046, 0.10041734313964844, 0.098836669921875, 0.09818851470947265, 0.09787577819824218, 0.09864393615722657, 0.0988436508178711, 0.09821743774414063, 0.0978969955444336, 0.09814812469482422, 0.09859667205810548, 0.0985030746459961, 0.09920909118652343, 0.0983921890258789, 0.09829942321777344, 0.09852976226806641, 0.0985676498413086, 0.09809564971923829, 0.09807023620605469, 0.09877267456054688, 0.09856265258789063, 0.09879347229003907, 0.09886105346679687, 0.09870236968994141, 0.09866748809814453, 0.09843014526367187, 0.09868576049804688, 0.09841458892822266, 0.09810256195068359, 0.09768780517578125, 0.09814822387695313, 0.09763286590576171, 0.09878880310058594, 0.10842784118652343, 0.09778790283203125, 0.09778585815429687, 0.09768450927734375, 0.09782985687255859, 0.09772748565673828, 0.09948028564453125, 0.09832681274414062, 0.0993623046875, 0.09899673461914063, 0.09872112274169922, 0.098259521484375, 0.09817046356201171, 0.09980159759521484, 0.09881394958496094, 0.09873149108886718, 0.09830249786376953, 0.09870950317382812, 0.0984019546508789, 0.09923200225830078, 0.09841439819335937, 0.09834729766845703, 0.09786777496337891, 0.09915392303466797, 0.09800048065185547, 0.09823609924316407, 0.09773744201660156, 0.0982261734008789, 0.09935004425048828, 0.09791126251220703, 0.09786685180664062, 0.0976553955078125, 0.09799088287353516, 0.09914297485351563, 0.09809590148925781, 0.09798863983154296, 0.09837359619140625, 0.09810739135742187, 0.09913549041748047, 0.0985224609375, 0.09819404602050781, 0.09873411560058594, 0.101572509765625, 0.09852047729492187, 0.09840614318847657, 0.09890614318847656, 0.098947998046875, 0.09857843017578125, 0.09878883361816407, 0.09827977752685547, 0.09770822143554687, 0.09751513671875, 0.09811392211914062, 0.09811920166015625, 0.09821027374267578, 0.09812368011474609, 0.09780758666992187, 0.09706790161132813, 0.0983531494140625, 0.0986270751953125, 0.09848242950439454, 0.09872930908203124, 0.0981390380859375, 0.09919590759277344, 0.09891311645507812, 0.09811507415771484, 0.09813426971435547, 0.09795417785644531, 0.1010893096923828, 0.09917030334472657, 0.09870130920410156, 0.0994119644165039, 0.09875865936279297, 0.09861507415771484, 0.09883875274658203, 0.09894092559814453, 0.09849836730957032, 0.09796422576904297, 0.09813359832763671, 0.0989659194946289, 0.09770384216308593, 0.09863177490234375, 0.10135142517089844, 0.09857023620605469, 0.09838121795654296, 0.09820816040039063, 0.09786592102050781, 0.09771212768554688, 0.0980802230834961, 0.09855257415771485, 0.09847398376464844, 0.09844940948486328, 0.09907814025878907, 0.09884633636474609, 0.09837401580810547, 0.09837129974365234, 0.09841487884521484, 0.09934454345703125, 0.09922745513916016, 0.10249014282226562, 0.09919181060791016, 0.09877401733398437, 0.09970687866210938, 0.09903513336181641, 0.09886224365234375, 0.09828848266601563, 0.09843427276611329, 0.09834780883789063, 0.09866649627685548, 0.09879859161376953, 0.0982845458984375, 0.09865583801269531, 0.09988508605957032, 0.09835968017578126, 0.09839520263671875, 0.0981984634399414, 0.09803366088867188, 0.09814342498779297, 0.09887123107910156, 0.09982041931152344, 0.09819862365722656, 0.09787075042724609, 0.09864806365966797, 0.09846988677978516, 0.09888883209228516, 0.09873833465576172, 0.1013603515625, 0.09945734405517578, 0.09887097930908204, 0.09985020446777344, 0.0993845443725586, 0.10000466918945312, 0.09932185363769531, 0.1023815689086914, 0.0984320297241211, 0.09822492980957032, 0.09774851226806641, 0.09762857818603515, 0.09818342590332031, 0.09874022674560547, 0.09806758117675782, 0.09761811065673828, 0.09796784210205078, 0.09859375762939453, 0.0980090560913086, 0.0978578872680664, 0.09864979553222657, 0.09795152282714843, 0.09810326385498047, 0.09909037017822266, 0.09802783966064453, 0.0982630386352539, 0.09849037170410156]",tokens/s,10.15940457921816,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,14217.449472,7943.880704,0.0,7625.244672,7480.726528,s,1,31.691271484375,31.691271484375,0.0,31.691271484375,31.691271484375,31.691271484375,31.691271484375,[31.691271484375],,kWh,0.0007135638925916661,7.870436890557557e-05,0.0002393438025860145,0.0010316120640832562,,MB,1291.923456,8038.252544,0.0,7614.758912,7378.613248,s,10,1.1029262771606445,0.11029262771606445,0.0006181282013756579,0.11017644882202149,0.11105537338256836,0.11130946311950683,0.11151273490905762,"[0.11003997039794922, 0.11045417785644532, 0.10926451110839844, 0.1101108169555664, 0.10964064025878906, 0.1100836181640625, 0.11052799987792969, 0.11024208068847656, 0.11156355285644531, 0.11099890899658203]",tokens/s,2321.098021701344,kWh,3.30867441877326e-06,3.6484776343284085e-07,2.1940030036405577e-06,5.867525185846658e-06,tokens/kWh,43629978.89084652,MB,1324.679168,8046.641152,0.0,7621.050368,7312.009728,s,10,50.068246093750005,5.006824609375,0.04798225837781835,4.985991455078125,5.069226220703126,5.092208081054688,5.110593569335937,"[4.98066064453125, 4.971724609375, 4.9686943359375, 4.97070263671875, 4.95717041015625, 4.991322265625, 5.02559033203125, 5.02307177734375, 5.064119140625, 5.11518994140625]",tokens/s,12.5828254263263,kWh,0.0001454977585487257,1.6048895966643477e-05,7.525497562215869e-05,0.00023680163013752785,tokens/kWh,266045.46583320113,,s,630,50.06497982788084,0.07946822194901725,0.0011984803211027646,0.07921017837524413,0.08102634429931642,0.08153281745910644,0.08363001403808595,"[0.07879561614990234, 0.07886086273193359, 0.07914246368408204, 0.0789959716796875, 0.07910665893554687, 0.07852985382080079, 0.07880332946777344, 0.07863008117675781, 0.07822214508056641, 0.07815325164794922, 0.07835286712646485, 0.07857750701904297, 0.07916092681884766, 0.0792561264038086, 0.0784691162109375, 0.07881507110595704, 0.07856553649902344, 0.07844834899902343, 0.07906684875488282, 0.07908614349365234, 0.07908096313476562, 0.07918438720703125, 0.07844608306884765, 0.07849625396728516, 0.07839119720458984, 0.0785235824584961, 0.07844915008544921, 0.0782381134033203, 0.07893135833740235, 0.07935833740234376, 0.07847081756591796, 0.078697021484375, 0.07899868774414062, 0.0797254409790039, 0.07904783630371094, 0.07943660736083985, 0.0784890594482422, 0.07886083221435547, 0.0788828125, 0.07908748626708985, 0.07923110198974609, 0.07984703826904296, 0.07883036804199219, 0.07904214477539062, 0.08299724578857422, 0.079710205078125, 0.07896051025390625, 0.07852019500732423, 0.07870079803466797, 0.07859200286865234, 0.07879052734375, 0.0789423370361328, 0.07962419128417969, 0.07975321960449219, 0.08072601318359375, 0.07944745635986328, 0.07953033447265626, 0.08043750762939453, 0.07959862518310547, 0.07934406280517578, 0.07946089935302734, 0.07921663665771485, 0.07923506927490234, 0.07839939117431641, 0.07882147216796875, 0.07949056243896484, 0.07878079986572266, 0.07844461059570312, 0.07855628967285157, 0.07834102630615235, 0.07872096252441406, 0.07826579284667969, 0.07843225860595703, 0.07914502716064453, 0.07867574310302734, 0.0787607650756836, 0.07891712188720704, 0.07927244567871093, 0.0797655029296875, 0.07850800323486327, 0.0798597412109375, 0.08022121429443359, 0.08182803344726562, 0.08063177490234374, 0.07883622741699219, 0.07859027099609375, 0.07859200286865234, 0.07883110046386718, 0.07865190124511719, 0.07839238739013672, 0.07823248291015625, 0.07887055969238281, 0.07865532684326172, 0.07927021026611328, 0.07864076995849609, 0.07888502502441407, 0.07848067474365235, 0.07861122894287109, 0.08225286102294922, 0.08089027404785157, 0.0787481918334961, 0.07841996765136719, 0.07867148590087891, 0.07957952117919923, 0.07994982147216798, 0.07858790588378907, 0.07844454193115234, 0.07843840026855468, 0.07841382598876953, 0.07832166290283203, 0.07835033416748047, 0.07835238647460938, 0.0784152603149414, 0.07859843444824219, 0.07828717041015625, 0.07817350769042969, 0.07931075286865234, 0.07866038513183594, 0.0782389144897461, 0.07899372863769531, 0.07808870697021485, 0.07815167999267578, 0.07893551635742188, 0.07874531555175782, 0.07891011047363282, 0.07910006713867188, 0.07856947326660156, 0.07854898834228516, 0.0792144012451172, 0.0783025894165039, 0.0784505615234375, 0.07837503814697265, 0.07795696258544922, 0.07813423919677734, 0.07817382049560546, 0.07870297241210937, 0.078607421875, 0.07974803161621094, 0.07844659423828125, 0.07788748931884766, 0.0787759017944336, 0.07856988525390625, 0.07876131439208985, 0.07894416046142579, 0.07874742126464844, 0.07881340789794922, 0.07852518463134765, 0.07844249725341797, 0.07885414123535156, 0.07830089569091797, 0.07941919708251953, 0.07804566192626954, 0.07812505340576172, 0.07904665374755859, 0.08089369964599609, 0.07845913696289063, 0.07881728363037109, 0.07899750518798829, 0.07859814453125, 0.07885619354248047, 0.08038195037841797, 0.08182169342041015, 0.07891929626464844, 0.07886265563964844, 0.07891986846923828, 0.07872499084472656, 0.07882742309570312, 0.07861686706542968, 0.0780469741821289, 0.0838061752319336, 0.07826636505126953, 0.07971234893798829, 0.07816806030273438, 0.07799193572998046, 0.0781844482421875, 0.07879065704345703, 0.07873331451416016, 0.07887977600097656, 0.07933232116699218, 0.07920832061767578, 0.07871424102783203, 0.07845369720458985, 0.07837014770507812, 0.07823526763916015, 0.07840838623046875, 0.07939087677001953, 0.07906508636474609, 0.07889305877685547, 0.07954988861083985, 0.07931263732910156, 0.07984342193603515, 0.0793268814086914, 0.07900621032714844, 0.07935088348388672, 0.07876905822753906, 0.07844179534912109, 0.0783240966796875, 0.07876441955566406, 0.07874758148193359, 0.07903641510009765, 0.07836243438720703, 0.07860002899169923, 0.07869213104248046, 0.078704833984375, 0.08488998413085938, 0.07940812683105469, 0.08008924865722657, 0.0799826889038086, 0.07814015960693359, 0.07838662719726562, 0.07824553680419923, 0.07832259368896484, 0.07889510345458985, 0.07856742095947265, 0.07833164978027343, 0.08076914978027344, 0.07907328033447265, 0.07830335998535157, 0.07804914855957032, 0.07813337707519531, 0.07806566619873047, 0.07844659423828125, 0.07804927825927735, 0.08085504150390625, 0.07887667083740234, 0.07852201843261719, 0.07852886199951171, 0.07911219024658203, 0.07900701141357422, 0.07961190032958984, 0.07837670135498047, 0.0784271011352539, 0.07853260803222656, 0.07810189056396484, 0.0782196502685547, 0.07822566223144531, 0.0784254379272461, 0.0779986572265625, 0.0782991714477539, 0.07797561645507813, 0.0797669448852539, 0.07878511810302734, 0.07970406341552734, 0.0789401626586914, 0.07834009552001953, 0.07842736053466796, 0.07810880279541016, 0.07798646545410157, 0.07859814453125, 0.07852342224121094, 0.07931132507324219, 0.08138143920898437, 0.07848291015625, 0.07895094299316406, 0.07853180694580078, 0.07804803466796875, 0.07812300872802734, 0.0782265625, 0.07821794891357423, 0.07879475402832031, 0.0788515853881836, 0.07880770874023438, 0.07845887756347657, 0.0782929916381836, 0.07860601806640626, 0.07819296264648437, 0.07926576232910157, 0.07882662200927734, 0.0786502685546875, 0.07842816162109376, 0.07845478057861328, 0.07805542755126953, 0.07814553833007812, 0.07789158630371094, 0.0783667221069336, 0.08050035095214844, 0.07844493103027343, 0.07829094696044922, 0.07791785430908203, 0.07845308685302735, 0.07817625427246094, 0.0785059814453125, 0.07857561492919922, 0.07825408172607422, 0.07814697265625, 0.07811094665527343, 0.07841180419921875, 0.07901190185546875, 0.07909200286865234, 0.07983293151855468, 0.0785997085571289, 0.07868268585205078, 0.07896479797363282, 0.08152384185791016, 0.07823654174804688, 0.07842233276367187, 0.07854867553710937, 0.07861769866943359, 0.07887760162353516, 0.07909171295166016, 0.07903865814208984, 0.07895225524902344, 0.08138137817382812, 0.07874969482421874, 0.07893389129638671, 0.07908153533935547, 0.07848761749267578, 0.07865052795410156, 0.07848841857910156, 0.07808966064453125, 0.07853523254394532, 0.07827648162841797, 0.07941651153564454, 0.07822227478027344, 0.07862067413330077, 0.07862608337402344, 0.07902259063720703, 0.07850006103515625, 0.07818595123291015, 0.07829058837890625, 0.07835533142089844, 0.07860739135742187, 0.07903126525878906, 0.07921190643310547, 0.07838105773925781, 0.07890563201904296, 0.07846685028076172, 0.07845126342773437, 0.07923712158203125, 0.07959910583496094, 0.08150067138671875, 0.08057241821289063, 0.07822950744628906, 0.07805542755126953, 0.07799603271484375, 0.07792639923095702, 0.07804518127441407, 0.07815574645996094, 0.07817424011230469, 0.07992012786865234, 0.07860326385498047, 0.0784110107421875, 0.08319872283935546, 0.07875788879394531, 0.07885334777832032, 0.07888361358642579, 0.0788848648071289, 0.08184832000732421, 0.07933747100830078, 0.07924736022949219, 0.08000863647460937, 0.07954412841796875, 0.07928086090087891, 0.07890704345703126, 0.07912470245361328, 0.07850198364257813, 0.0789167709350586, 0.07883599853515624, 0.07891763305664062, 0.0789776611328125, 0.0803430404663086, 0.07941887664794922, 0.07908512115478515, 0.07942854309082031, 0.08042291259765624, 0.07956479644775391, 0.07951360321044922, 0.07997602844238282, 0.0791427230834961, 0.07960636901855468, 0.07949107360839844, 0.0794491195678711, 0.0795453109741211, 0.07947264099121094, 0.0807455062866211, 0.079363037109375, 0.07982080078125, 0.0800948486328125, 0.07943193817138672, 0.07967984008789063, 0.07939276885986328, 0.07932723236083984, 0.0791732177734375, 0.07937065887451172, 0.07979212951660156, 0.07937229156494141, 0.07896063995361328, 0.07889920043945313, 0.07956585693359375, 0.07926882934570313, 0.0794471664428711, 0.07954463958740235, 0.07926636505126954, 0.07961599731445312, 0.07972428894042968, 0.08042521667480469, 0.08028543853759766, 0.07974687957763672, 0.08049504089355469, 0.08051331329345703, 0.08021759796142579, 0.08008672332763672, 0.07940329742431641, 0.07912678527832032, 0.07946819305419922, 0.07927606201171875, 0.07908367919921876, 0.0790090560913086, 0.07903858947753906, 0.07941606140136719, 0.07920845031738281, 0.07952706909179688, 0.07912534332275391, 0.07939481353759766, 0.07926579284667969, 0.0794859848022461, 0.07932112121582031, 0.08883500671386718, 0.07975730895996094, 0.08004198455810548, 0.07976080322265625, 0.07985142517089844, 0.07935199737548829, 0.07932774353027344, 0.07924928283691406, 0.07999501037597656, 0.07942537689208984, 0.07955471801757813, 0.07899657440185547, 0.07924995422363282, 0.07950070190429688, 0.08487010955810546, 0.07916134643554687, 0.07880850982666016, 0.0799292449951172, 0.07994579315185547, 0.07966575622558594, 0.07994140625, 0.08020403289794922, 0.07942345428466797, 0.07965436553955078, 0.07905894470214844, 0.0797083511352539, 0.0793773422241211, 0.0801695327758789, 0.07968185424804687, 0.07939798736572265, 0.07949609375, 0.0791695327758789, 0.07939033508300782, 0.07929280090332032, 0.08064717102050781, 0.08238722991943359, 0.08133904266357422, 0.08123165130615234, 0.07937462615966796, 0.07973478698730468, 0.08077053070068359, 0.0797906265258789, 0.07975526428222657, 0.08021746826171874, 0.07929305267333984, 0.07973811340332031, 0.07948979187011719, 0.07944143676757813, 0.07962841796875, 0.07920470428466797, 0.07927398681640625, 0.07909375762939454, 0.07912448120117188, 0.08118886566162109, 0.07941046142578125, 0.07968841552734375, 0.07950252532958985, 0.08150035095214844, 0.08104409790039062, 0.07987779235839844, 0.07938288116455078, 0.07935794830322265, 0.0795670394897461, 0.0797489242553711, 0.07938457489013671, 0.079499267578125, 0.07935810852050781, 0.07943283081054688, 0.07930735778808594, 0.07965331268310546, 0.07971926116943359, 0.07925782775878906, 0.07901769256591797, 0.07898204803466796, 0.07918710327148437, 0.07961891174316406, 0.07918319702148438, 0.07946659088134765, 0.07966124725341797, 0.07996006774902344, 0.07964527893066406, 0.07971817779541016, 0.0797507553100586, 0.07967084503173828, 0.07922537231445312, 0.07941561889648438, 0.08006233978271485, 0.08129679870605469, 0.08105350494384765, 0.08074720001220703, 0.07995321655273438, 0.07971046447753906, 0.07941993713378906, 0.07920342254638672, 0.07918275451660156, 0.07891305541992187, 0.07879727935791016, 0.07898521423339844, 0.07886367797851562, 0.07955526733398438, 0.0793675537109375, 0.0793032989501953, 0.07929974365234375, 0.07898812866210937, 0.078919677734375, 0.07923846435546875, 0.07999072265625, 0.07932185363769531, 0.07918924713134766, 0.07969446563720703, 0.0798096923828125, 0.0800980453491211, 0.08417021179199219, 0.08050787353515625, 0.08026908874511719, 0.08064205169677735, 0.08102003479003907, 0.08100543975830078, 0.08074240112304687, 0.08088166046142578, 0.08057036590576172, 0.08027718353271485, 0.08064851379394532, 0.08478694152832031, 0.08089116668701171, 0.08030307006835938, 0.0804636459350586, 0.0807949447631836, 0.08055900573730469, 0.08041820526123047, 0.08113622283935547, 0.08144268798828125, 0.08038822174072266, 0.08046182250976562, 0.08059664154052734, 0.08097724914550782, 0.0805711669921875, 0.080453857421875, 0.08154681396484376, 0.08175046539306641, 0.08085664367675781, 0.08114220428466797, 0.08052889251708985, 0.08103705596923828, 0.08074217224121094, 0.0805401611328125, 0.08038243103027344, 0.08041292572021484, 0.0803421401977539, 0.08058694458007812, 0.08132038116455079, 0.08161027526855469, 0.08093949127197266, 0.08082841491699219, 0.0822108154296875, 0.0815401611328125, 0.08206784057617188, 0.08187551879882812, 0.08274687957763673, 0.08231983947753906, 0.080868896484375, 0.08472351837158203, 0.08262969970703125, 0.08121305847167969, 0.081295166015625, 0.0808647689819336, 0.08037590026855469, 0.08034159851074218, 0.08008057403564453, 0.08046956634521485, 0.08078617858886719, 0.08072096252441406, 0.08091539001464844, 0.08023372650146485, 0.08257785797119141, 0.0815836181640625, 0.08157676696777344, 0.08118681335449218, 0.08089920043945313, 0.08230796813964844, 0.08117964935302735, 0.08102515411376954, 0.0805323486328125, 0.08036544036865234, 0.08006259155273437, 0.08044953918457032, 0.08022835540771485, 0.08011529541015625, 0.08007023620605469, 0.08076576232910156, 0.08019091033935546, 0.0821335678100586, 0.08043759918212891, 0.08139081573486329, 0.08040265655517578, 0.08052745819091797, 0.08093913269042968, 0.08129740905761719, 0.08088188934326172, 0.08110467529296875, 0.08171657562255859, 0.08140009307861328, 0.08114419555664062, 0.081515869140625, 0.08179779052734375, 0.0813811492919922, 0.0811522216796875, 0.08138956451416016, 0.0811127700805664, 0.0815118408203125, 0.08080207824707031, 0.08075328063964844, 0.08192566680908203]",tokens/s,12.583646336538761,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,13838.893056,7509.77024,0.0,7107.248128,7106.945536,s,1,31.031279296875,31.031279296875,0.0,31.031279296875,31.031279296875,31.031279296875,31.031279296875,[31.031279296875],,kWh,0.0006975386936875034,7.693650366144187e-05,0.00022624129210400967,0.001000716489452955,,MB,1344.75776,7786.594304,0.0,7363.100672,7335.826944,s,10,1.1814403228759764,0.11814403228759765,0.0006428792549113897,0.11794177627563476,0.11926618499755859,0.11933966979980468,0.11939845764160156,"[0.11827983856201171, 0.1192498550415039, 0.11803539276123047, 0.11784815979003906, 0.11767497253417969, 0.11775094604492188, 0.11774118041992188, 0.11734198760986328, 0.1181048355102539, 0.11941315460205078]",tokens/s,2166.8466450918145,kWh,3.4873467305557264e-06,3.8457890405338194e-07,2.300845094642632e-06,6.17277072925174e-06,tokens/kWh,41472462.08041687,MB,1381.593088,7788.691456,0.0,7363.100672,7289.69216,s,10,73.7051279296875,7.370512792968751,0.057315518565666995,7.352478271484375,7.426213037109375,7.4726045654296875,7.509717788085938,"[7.3794248046875, 7.353822265625, 7.35113427734375, 7.3449443359375, 7.31719921875, 7.334029296875, 7.37536474609375, 7.31430908203125, 7.41590380859375, 7.51899609375]",tokens/s,8.54757352298474,kWh,0.00021599308251444565,2.382507287234034e-05,9.488759376715787e-05,0.0003347057491539438,tokens/kWh,188225.03096898977,,s,630,73.70289146423352,0.1169887166098943,0.0015773882885027697,0.11651896286010743,0.11905084762573243,0.12007807731628418,0.12293061935424805,"[0.11731967926025391, 0.11781938934326172, 0.11844182586669921, 0.11734233856201172, 0.11731692504882812, 0.11693113708496093, 0.11727174377441406, 0.12074082946777344, 0.11734416198730468, 0.1168917465209961, 0.1178921890258789, 0.11738819122314453, 0.11733113861083984, 0.11954259490966797, 0.11880242919921875, 0.11742928314208985, 0.1204397735595703, 0.11887923431396484, 0.11828025817871093, 0.11747014617919922, 0.11704073333740235, 0.1171921615600586, 0.11662432098388673, 0.1159208984375, 0.11571971130371093, 0.11668905639648437, 0.11597856140136718, 0.11601692962646484, 0.11621807861328125, 0.11574678039550781, 0.11586358642578125, 0.1160110092163086, 0.11649791717529297, 0.11741030120849609, 0.11890483093261718, 0.11583078765869141, 0.11580355072021484, 0.11570992279052734, 0.11961408233642579, 0.1165455322265625, 0.11770861053466797, 0.1159333724975586, 0.11606204986572266, 0.11648630523681641, 0.11650662231445312, 0.11591065979003906, 0.11644108581542968, 0.11681996917724609, 0.11949260711669922, 0.11677391815185546, 0.11716706848144531, 0.11711209869384766, 0.11657234954833984, 0.11677750396728516, 0.1166819839477539, 0.11635104370117187, 0.11651757049560547, 0.11652035522460938, 0.11678781127929687, 0.1169078369140625, 0.11674339294433594, 0.11728995513916016, 0.11740569305419922, 0.11787961578369141, 0.11680524444580079, 0.11669747161865235, 0.11728451538085938, 0.11810355377197265, 0.11815203094482422, 0.11713740539550781, 0.11719884490966796, 0.11585536193847656, 0.11587174224853515, 0.11567718505859376, 0.11603148651123046, 0.11657759857177734, 0.11669574737548828, 0.11621990203857421, 0.11622531127929688, 0.11654608154296875, 0.11669318389892579, 0.11615385437011719, 0.11585731506347656, 0.11661711883544922, 0.11615916442871094, 0.11637542724609375, 0.11603981018066406, 0.11755254364013672, 0.11582659149169922, 0.115716796875, 0.11542118072509766, 0.11871846771240234, 0.11781938934326172, 0.11640815734863282, 0.11618319702148437, 0.11547561645507813, 0.11653327941894531, 0.11639417266845703, 0.11977996826171874, 0.11607654571533203, 0.11618643188476563, 0.11606495666503906, 0.11558236694335937, 0.11538902282714844, 0.11517951965332031, 0.11526143646240235, 0.1150967025756836, 0.11513945770263671, 0.1153597412109375, 0.12005359649658204, 0.11631759643554687, 0.11578034973144531, 0.11612979125976562, 0.11550252532958985, 0.1212639389038086, 0.11702758026123047, 0.11775926208496093, 0.11736956787109375, 0.1172778549194336, 0.1204293441772461, 0.11807456207275391, 0.11695753479003906, 0.117223388671875, 0.11697727966308594, 0.11672672271728515, 0.116698974609375, 0.11925676727294922, 0.11763235473632813, 0.11768521881103515, 0.11718450927734375, 0.11718771362304688, 0.11717638397216797, 0.12007654571533204, 0.11768479919433594, 0.11701385498046875, 0.11756610870361328, 0.11706982421875, 0.11710259246826171, 0.11749581146240234, 0.1176428451538086, 0.11690640258789063, 0.11888639831542969, 0.1194598388671875, 0.11746284484863281, 0.11734185791015625, 0.11791004943847656, 0.11656143951416016, 0.11658847808837891, 0.11632038116455078, 0.1165684814453125, 0.11606425476074218, 0.11607036590576172, 0.11613391876220704, 0.11702639770507813, 0.116498046875, 0.11711309051513671, 0.1158722915649414, 0.115736572265625, 0.11559295654296875, 0.11550857543945313, 0.11556118774414062, 0.11597376251220703, 0.11622252655029297, 0.11592460632324218, 0.11569801330566407, 0.11577347564697266, 0.11540070343017578, 0.11528601837158203, 0.11564019012451172, 0.1155025634765625, 0.11636188507080078, 0.11686495971679688, 0.11536345672607422, 0.11547686767578125, 0.11563014221191406, 0.11544134521484375, 0.11863062286376953, 0.11634822082519532, 0.1165420150756836, 0.11671778869628906, 0.1161338882446289, 0.11616460418701172, 0.11616598510742188, 0.11884140777587891, 0.11605257415771485, 0.11568256378173829, 0.11550592041015625, 0.11623833465576172, 0.11635302734375, 0.11666496276855469, 0.11647795104980468, 0.11590860748291015, 0.11561164855957032, 0.1158079071044922, 0.11579151916503906, 0.11660358428955078, 0.11582380676269531, 0.11573945617675781, 0.1154128646850586, 0.1157034912109375, 0.11679558563232421, 0.11622016143798829, 0.11633251190185546, 0.11848707580566406, 0.11619712066650391, 0.11595359802246094, 0.11624012756347656, 0.11581088256835938, 0.11568457794189453, 0.11568617248535157, 0.11518534088134766, 0.11517574310302735, 0.11590656280517578, 0.11579084777832031, 0.11615744018554687, 0.11601715087890625, 0.11692646026611328, 0.11905398559570313, 0.11669948577880859, 0.11633599853515625, 0.11656460571289062, 0.11700019073486329, 0.11746304321289062, 0.11719884490966796, 0.1173012466430664, 0.11711692810058594, 0.11610492706298828, 0.11734454345703126, 0.11670732879638672, 0.11752169799804688, 0.11726703643798828, 0.11702079772949218, 0.11719683074951172, 0.11743023681640626, 0.11689775848388671, 0.11668073272705078, 0.11698175811767578, 0.11886697387695312, 0.12007933044433594, 0.11697561645507812, 0.11680528259277344, 0.1162509765625, 0.11608175659179687, 0.11588441467285156, 0.11609347534179687, 0.11653529357910156, 0.11616365051269531, 0.11637020874023438, 0.11617407989501953, 0.11681475067138672, 0.11683010864257813, 0.11680960083007813, 0.1155110092163086, 0.11537222290039062, 0.11562841796875, 0.11645696258544921, 0.1155814437866211, 0.11577340698242188, 0.11580214691162109, 0.11597414398193359, 0.11694284820556641, 0.11632025909423828, 0.11595161437988281, 0.11608882904052735, 0.11550879669189452, 0.11561619567871094, 0.11720703887939453, 0.11919769287109375, 0.11601305389404297, 0.11652301025390625, 0.115957763671875, 0.11607443237304688, 0.11581241607666015, 0.1157747802734375, 0.11556259155273438, 0.11583516693115234, 0.11557920074462891, 0.11560550689697266, 0.11651481628417969, 0.11644927978515625, 0.11666022491455078, 0.1159004135131836, 0.11606835174560547, 0.11572745513916016, 0.11591123199462891, 0.11627964782714843, 0.11609088134765624, 0.11568943786621094, 0.12488854217529297, 0.11595801544189453, 0.11549219512939453, 0.1157191390991211, 0.11534867095947265, 0.11579270172119141, 0.11528601837158203, 0.11540882873535156, 0.11542864227294922, 0.11546018981933594, 0.11562255859375, 0.11541712188720703, 0.11544102478027343, 0.1184610595703125, 0.11902361297607422, 0.11668889617919923, 0.11612364959716796, 0.11546326446533203, 0.11573750305175781, 0.11598745727539063, 0.1156147232055664, 0.11516473388671875, 0.11552947235107422, 0.11589635467529297, 0.11547913360595703, 0.11577744293212891, 0.11580425262451172, 0.11632835388183593, 0.11610739135742187, 0.11605315399169921, 0.11566572570800782, 0.11635465240478515, 0.11596431732177734, 0.11573001861572266, 0.11595407867431641, 0.11575689697265625, 0.11589014434814453, 0.11553606414794922, 0.11572780609130859, 0.11563270568847656, 0.1154703369140625, 0.11560959625244141, 0.11576422119140625, 0.11572531127929687, 0.11614412689208985, 0.11596979522705078, 0.11566105651855468, 0.1155973129272461, 0.11542658996582031, 0.11562617492675781, 0.11532546997070313, 0.11626268768310546, 0.11688931274414062, 0.11594185638427734, 0.11526557159423828, 0.1154867172241211, 0.11527935791015625, 0.11870854187011719, 0.1162221450805664, 0.11603763580322266, 0.11706572723388672, 0.11595568084716797, 0.11605814361572266, 0.11729446411132813, 0.12177005004882813, 0.11676089477539063, 0.11711923217773437, 0.11619849395751954, 0.11615526580810547, 0.11625885009765625, 0.11688256072998048, 0.11649523162841798, 0.11589222717285157, 0.11602738952636718, 0.12105318450927735, 0.11771862030029297, 0.1167487030029297, 0.11696742248535157, 0.11648000335693359, 0.11663565063476562, 0.11650252532958984, 0.11602867126464844, 0.11671218872070313, 0.11636685180664062, 0.11688396453857422, 0.11913565063476562, 0.11680409240722656, 0.11591817474365235, 0.1165893096923828, 0.11620556640625, 0.11644528198242188, 0.11641280364990235, 0.11707977294921874, 0.11668972778320312, 0.1166677474975586, 0.11659945678710938, 0.11656393432617188, 0.1177619857788086, 0.1171211166381836, 0.11697357177734374, 0.11697932434082031, 0.11807577514648437, 0.11681587219238282, 0.11648191833496094, 0.11665625762939454, 0.11618819427490235, 0.11712611389160156, 0.11630182647705078, 0.1171409912109375, 0.11646367645263672, 0.11731788635253906, 0.11631021118164063, 0.11670118713378906, 0.11694387054443359, 0.11684508514404297, 0.11630844879150391, 0.11652095794677735, 0.11644108581542968, 0.1173829116821289, 0.11636351776123047, 0.11669055938720703, 0.11667494201660156, 0.1166909408569336, 0.11718246459960938, 0.11763097381591797, 0.11913779449462891, 0.11796259307861329, 0.11828495788574218, 0.11730329895019531, 0.11736473846435547, 0.11732742309570313, 0.11700438690185547, 0.11713571166992187, 0.11685408020019532, 0.11667664337158203, 0.11714009857177735, 0.11657174682617187, 0.11944169616699218, 0.11951238250732422, 0.11686739349365234, 0.11757622528076171, 0.11844403076171875, 0.11842355346679688, 0.11742617797851562, 0.1163235855102539, 0.11728768157958984, 0.11658035278320313, 0.11668889617919923, 0.11866521453857422, 0.11702476501464844, 0.1170489273071289, 0.11535196685791016, 0.11517951965332031, 0.11618303680419922, 0.11619328308105469, 0.11575091552734375, 0.11548598480224609, 0.11575881958007812, 0.11641548919677734, 0.11565462493896485, 0.11566492462158204, 0.11658444976806641, 0.11585740661621094, 0.11600691223144531, 0.11534694671630859, 0.11598284912109375, 0.11595161437988281, 0.11565875244140625, 0.11563804626464844, 0.12335126495361329, 0.11589427185058594, 0.11602329254150391, 0.11589971160888672, 0.11558294677734375, 0.11576598358154297, 0.11532083129882813, 0.115248291015625, 0.11609772491455078, 0.11578998565673829, 0.11568736267089844, 0.11614559936523437, 0.11591539001464844, 0.1182740478515625, 0.11597209930419922, 0.11644313812255859, 0.11608255767822266, 0.11595145416259765, 0.11555439758300781, 0.11560550689697266, 0.11642841339111328, 0.1162204818725586, 0.11532208251953124, 0.11538492584228516, 0.11590675354003906, 0.11561065673828125, 0.11565878295898438, 0.11538909149169922, 0.11592937469482421, 0.11631756591796875, 0.11659942626953125, 0.11552259063720703, 0.11919241333007813, 0.11558719635009766, 0.11555225372314454, 0.115357666015625, 0.116295166015625, 0.11642115020751953, 0.11659878540039062, 0.1163581771850586, 0.11563209533691406, 0.11633465576171875, 0.11569862365722657, 0.1160417251586914, 0.11531059265136719, 0.11606179046630859, 0.11660329437255859, 0.1161568603515625, 0.11615641784667968, 0.11594863891601563, 0.11614915466308594, 0.11594137573242187, 0.1170382080078125, 0.1160426254272461, 0.1162913589477539, 0.11654121398925782, 0.12063951873779297, 0.11642237091064453, 0.11596665954589844, 0.11635897827148438, 0.11595180511474609, 0.11610111999511719, 0.11602508544921875, 0.12166374206542968, 0.11651385498046875, 0.11630838775634765, 0.11626713562011719, 0.11626127624511719, 0.1160478744506836, 0.11586463928222657, 0.11582300567626953, 0.11591056060791016, 0.11640486145019531, 0.11585318756103516, 0.11577561950683593, 0.11583283233642579, 0.11585330963134766, 0.11607027435302734, 0.11566454315185547, 0.11567766571044921, 0.11607161712646484, 0.1202552947998047, 0.11952742767333985, 0.11950284576416016, 0.1197627182006836, 0.12187875366210937, 0.11865087890625, 0.1187056655883789, 0.1182438735961914, 0.12312067413330079, 0.11841222381591797, 0.11807917022705078, 0.11783971405029296, 0.11805538940429687, 0.11794976043701172, 0.11777503967285156, 0.11779481506347657, 0.1186013412475586, 0.12476976013183594, 0.11918838500976563, 0.11921817779541016, 0.11872255706787109, 0.11809587097167969, 0.11841519927978515, 0.1189106216430664, 0.11845683288574219, 0.11869747161865235, 0.12264028930664063, 0.11909916687011719, 0.11773379516601562, 0.11789443206787109, 0.11813529968261718, 0.11810224151611329, 0.11838041687011719, 0.12293644714355469, 0.11858979034423828, 0.11842134094238281, 0.11801683044433593, 0.11804876708984376, 0.1178064956665039, 0.1179510726928711, 0.11802214050292968, 0.11862793731689453, 0.12290499114990235, 0.11883929443359376, 0.11898441314697265, 0.11905049896240234, 0.11834780883789063, 0.11938982391357422, 0.11981657409667969, 0.11947357177734375, 0.11894230651855468, 0.12298604583740234, 0.11841171264648437, 0.1187962875366211, 0.12015142059326171, 0.11936627197265624, 0.11872025299072266, 0.11878425598144532, 0.12145417785644531, 0.12479065704345703, 0.11929971313476563, 0.12009305572509765, 0.12036937713623047, 0.11819010925292969, 0.11803171539306641, 0.12105948638916016, 0.11948726654052734, 0.11871027374267579, 0.12256870269775391, 0.11793513488769532, 0.11781568145751953, 0.11783612823486328, 0.11754112243652344, 0.11750396728515625, 0.11801136016845704, 0.11842800140380859, 0.12081568145751953, 0.12011945343017578, 0.12092556762695313, 0.11849075317382812, 0.11861055755615234, 0.1217765121459961, 0.11825679779052735, 0.1191207046508789, 0.11906460571289063, 0.12291635131835937, 0.118884033203125, 0.1207856674194336, 0.11918946838378906, 0.11895110321044922, 0.11867587280273438, 0.11916716766357421]",tokens/s,8.547832893445257,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,3749.842944,1885.208576,0.0,1482.686464,1467.396096,s,1,11.8882353515625,11.8882353515625,0.0,11.8882353515625,11.8882353515625,11.8882353515625,11.8882353515625,[11.8882353515625],,kWh,0.0001422475784041732,1.567877017248466e-05,4.6420314913989125e-05,0.000204346663490647,,MB,3766.603776,2090.729472,0.0,1667.23584,1577.964032,s,10,0.5690587539672851,0.05690587539672852,0.0011523572793791178,0.056804256439208986,0.057631431579589845,0.05876733207702636,0.05967605247497559,"[0.05680326461791992, 0.05990323257446289, 0.05737900924682617, 0.057311103820800784, 0.056839008331298825, 0.05667107009887695, 0.055923137664794925, 0.05578105545043945, 0.05680524826049805, 0.055642623901367184]",tokens/s,4498.656741773228,kWh,1.684052123323933e-06,1.857211555895595e-07,9.897677139080211e-07,2.8595409928215135e-06,tokens/kWh,89524857.5357559,MB,3770.732544,2092.826624,0.0,1667.23584,1579.625472,s,10,34.983436279296875,3.4983436279296876,0.041154172068510665,3.4920903320312497,3.5502058349609373,3.5546718872070313,3.5582447290039063,"[3.559137939453125, 3.542182373046875, 3.54921337890625, 3.52357666015625, 3.506055419921875, 3.473361083984375, 3.454615234375, 3.478125244140625, 3.44027978515625, 3.45688916015625]",tokens/s,18.008522518207645,kWh,0.00010323872574334502,1.1387322113740926e-05,4.067454339529247e-05,0.00015530059125237842,tokens/kWh,405664.9075959984,,s,630,34.978776645660425,0.05552186769152445,0.0009240732439161798,0.05550774383544922,0.05645574531555175,0.05676730499267578,0.0581692774963379,"[0.05509983825683594, 0.05568121719360351, 0.05622323226928711, 0.05688790512084961, 0.0567388801574707, 0.05606870269775391, 0.056413822174072266, 0.05631987380981445, 0.0561649284362793, 0.055975936889648435, 0.056150016784667967, 0.0574455680847168, 0.056124446868896484, 0.05626144027709961, 0.0559851188659668, 0.05616998291015625, 0.05683865737915039, 0.05617049789428711, 0.056430335998535155, 0.05642470550537109, 0.05729017639160156, 0.05630624008178711, 0.05714944076538086, 0.05640192031860351, 0.05641436767578125, 0.05608745574951172, 0.05642550277709961, 0.05720419311523438, 0.05620985412597656, 0.05613772964477539, 0.05668659210205078, 0.056180736541748044, 0.05590630340576172, 0.05593407821655273, 0.05631375885009766, 0.05579776000976563, 0.05616841506958008, 0.0561797103881836, 0.056387584686279295, 0.056140960693359374, 0.05621161651611328, 0.05594086456298828, 0.05628380966186523, 0.05641654586791992, 0.056589920043945315, 0.05639360046386719, 0.05613612747192383, 0.05603692626953125, 0.05611276626586914, 0.05606444931030274, 0.05625084686279297, 0.056430431365966795, 0.05713296127319336, 0.05701043319702148, 0.05641766357421875, 0.05633292770385742, 0.05634595108032227, 0.05728927993774414, 0.05630144119262695, 0.05627414321899414, 0.06015887832641602, 0.05701232147216797, 0.060483585357666014, 0.0556278076171875, 0.056025215148925785, 0.05658995056152344, 0.056458625793457035, 0.05589433670043945, 0.05576489639282227, 0.05627356719970703, 0.05662297439575195, 0.05752963256835938, 0.0560873908996582, 0.05650636672973633, 0.05668832015991211, 0.056237537384033205, 0.05611212921142578, 0.05623283386230469, 0.05576188659667969, 0.05571379089355469, 0.055861248016357425, 0.05622323226928711, 0.05634099197387695, 0.05599401473999024, 0.05569353485107422, 0.05774774551391602, 0.05599420928955078, 0.056454208374023436, 0.05649299240112305, 0.056463359832763675, 0.056557056427001956, 0.05690214538574219, 0.056715328216552736, 0.05624431991577149, 0.056106208801269535, 0.05695756912231445, 0.055938526153564455, 0.055960094451904294, 0.05576713562011719, 0.05619612884521484, 0.05607513427734375, 0.05595750427246094, 0.055925823211669924, 0.055892929077148434, 0.055806976318359375, 0.05575372695922851, 0.056008705139160155, 0.05665731048583984, 0.05628915023803711, 0.057950847625732424, 0.056086593627929685, 0.05608451080322266, 0.0559692497253418, 0.05611552047729492, 0.05608879852294922, 0.056045280456542966, 0.055931167602539064, 0.05601808166503906, 0.055742305755615236, 0.05574467086791992, 0.05647769546508789, 0.05637615966796875, 0.05572403335571289, 0.05589785766601563, 0.056166015625, 0.05608489608764648, 0.05644902420043945, 0.056319808959960936, 0.055889694213867185, 0.05609689712524414, 0.05611894226074219, 0.0558287353515625, 0.056166942596435544, 0.05629123306274414, 0.05597382354736328, 0.056336414337158205, 0.056051231384277346, 0.05606636810302734, 0.05604364776611328, 0.05606800079345703, 0.056413566589355466, 0.05645542526245117, 0.055966175079345704, 0.05616659164428711, 0.05612524795532227, 0.056231937408447265, 0.05613724899291992, 0.05682223892211914, 0.05659856033325195, 0.055748577117919924, 0.0556596794128418, 0.05596246337890625, 0.055853279113769534, 0.05610412979125977, 0.056017505645751954, 0.05555199813842773, 0.05579385757446289, 0.06115020751953125, 0.056279422760009766, 0.05653753662109375, 0.05653299331665039, 0.05664115142822266, 0.05679056167602539, 0.056533023834228514, 0.055951454162597655, 0.0561253776550293, 0.056093471527099606, 0.05603750228881836, 0.05627859115600586, 0.06039788818359375, 0.05627494430541992, 0.05670297622680664, 0.05629302215576172, 0.05623023986816406, 0.05621680068969727, 0.056236927032470706, 0.056594337463378906, 0.0561148796081543, 0.056136001586914064, 0.05654064178466797, 0.05625040054321289, 0.05601126480102539, 0.05596160125732422, 0.05599641418457031, 0.056033184051513675, 0.056648929595947264, 0.05590454483032226, 0.05583446502685547, 0.055959903717041015, 0.05565206527709961, 0.055877632141113284, 0.05607424163818359, 0.05643468856811523, 0.05624422454833984, 0.05607955169677734, 0.05599929428100586, 0.05641830444335937, 0.055840446472167966, 0.055758880615234374, 0.05648329544067383, 0.05580230331420898, 0.05556671905517578, 0.05556329727172851, 0.05546697616577148, 0.05622272109985352, 0.05570611190795898, 0.05570816040039062, 0.05581568145751953, 0.05549107360839844, 0.05570729446411133, 0.056546688079833984, 0.0559031982421875, 0.05610063934326172, 0.056232158660888674, 0.05630527877807617, 0.05570499038696289, 0.05589215850830078, 0.05581414413452149, 0.05580879974365234, 0.05596160125732422, 0.05583462524414062, 0.0559288330078125, 0.05591654586791992, 0.056229183197021484, 0.05585737609863281, 0.05611772918701172, 0.055931999206542966, 0.05615913772583008, 0.0558485107421875, 0.05617504119873047, 0.05603942489624023, 0.05552239990234375, 0.056115585327148436, 0.055767070770263674, 0.05616486358642578, 0.058258495330810546, 0.055813056945800785, 0.058327041625976565, 0.05752012634277344, 0.05570880126953125, 0.055562625885009764, 0.05552799987792969, 0.0554879035949707, 0.05515727996826172, 0.05515462493896484, 0.05513631820678711, 0.055175167083740234, 0.05542816162109375, 0.05543212890625, 0.055431297302246094, 0.05511699295043945, 0.05501795196533203, 0.055363582611083983, 0.055591102600097655, 0.05550470352172852, 0.05547792053222656, 0.05562543869018555, 0.05560793685913086, 0.055414081573486325, 0.055488510131835936, 0.058770111083984375, 0.05574860763549805, 0.056592159271240235, 0.05552492904663086, 0.056035999298095704, 0.055691551208496094, 0.056852191925048826, 0.05591459274291992, 0.05607190322875977, 0.055699905395507815, 0.05597148895263672, 0.05592457580566406, 0.05556659317016602, 0.05559276962280273, 0.05572345733642578, 0.055484256744384765, 0.055483295440673826, 0.055586814880371094, 0.05558236694335938, 0.055352798461914064, 0.05528591918945312, 0.05533779144287109, 0.05532403182983398, 0.055130878448486326, 0.05532649612426758, 0.05550649642944336, 0.0558350715637207, 0.055654270172119144, 0.056663425445556644, 0.055452415466308594, 0.05547964859008789, 0.055513439178466795, 0.05553100967407226, 0.05587231826782227, 0.05570694351196289, 0.05574316787719726, 0.05550447845458984, 0.05547564697265625, 0.055264225006103514, 0.05517414474487305, 0.05557974243164063, 0.05517916870117188, 0.055226497650146485, 0.05517513656616211, 0.05522604751586914, 0.05527743911743164, 0.05565884780883789, 0.05551030349731445, 0.05521481704711914, 0.05521369552612305, 0.05525904083251953, 0.05585728073120117, 0.05607161712646484, 0.05557955169677734, 0.055481407165527345, 0.05490726470947266, 0.055054176330566404, 0.055428447723388674, 0.055053119659423826, 0.055053855895996096, 0.054984256744384764, 0.05540342330932617, 0.05589411163330078, 0.05540604782104492, 0.055327167510986326, 0.05557779312133789, 0.05494409561157226, 0.05460425567626953, 0.0547061767578125, 0.055215774536132814, 0.05492156982421875, 0.05445859146118164, 0.0543507194519043, 0.056600704193115234, 0.05479708862304687, 0.05442902374267578, 0.054850208282470704, 0.054247425079345706, 0.05454956817626953, 0.0543831672668457, 0.05434787368774414, 0.054389022827148435, 0.05491888046264649, 0.05602323150634766, 0.0552523193359375, 0.055198463439941406, 0.05531238555908203, 0.055365631103515625, 0.05584691238403321, 0.05515468978881836, 0.05509545516967773, 0.05500912094116211, 0.0550252799987793, 0.0554552001953125, 0.05621238327026367, 0.05590755081176758, 0.05514736175537109, 0.05507676696777344, 0.05583039855957031, 0.055146240234375, 0.055541248321533204, 0.05513628768920899, 0.05537187194824219, 0.055167617797851565, 0.055314590454101566, 0.05506252670288086, 0.0550830078125, 0.05526537704467773, 0.054867870330810545, 0.05486537551879883, 0.055206047058105466, 0.0552564811706543, 0.05505737686157226, 0.05500873565673828, 0.05507952117919922, 0.054731807708740234, 0.0549950065612793, 0.055126880645751955, 0.054163455963134766, 0.054284191131591795, 0.054202014923095704, 0.05439231872558594, 0.054231006622314455, 0.05405487823486328, 0.054209537506103515, 0.05420032119750977, 0.05401190567016602, 0.054075393676757816, 0.05441312026977539, 0.0550709114074707, 0.055236129760742186, 0.05730287933349609, 0.05501776123046875, 0.054386398315429685, 0.05458560180664063, 0.05403071975708008, 0.054128639221191405, 0.05389299011230469, 0.05446873474121094, 0.05407360076904297, 0.05464643096923828, 0.05479638290405273, 0.0546798095703125, 0.05558643341064453, 0.05549913787841797, 0.05613337707519531, 0.05461199951171875, 0.055139583587646486, 0.054706527709960935, 0.05521036911010742, 0.05532396697998047, 0.05491958236694336, 0.055788928985595704, 0.05708019256591797, 0.05655401611328125, 0.057742462158203126, 0.05753472137451172, 0.055556705474853516, 0.05526095962524414, 0.05519935989379883, 0.05520217514038086, 0.05510483169555664, 0.05520684814453125, 0.05589215850830078, 0.05450070571899414, 0.05427452850341797, 0.05426752090454102, 0.05463391876220703, 0.05398419189453125, 0.0538152961730957, 0.05400166320800781, 0.05414297485351562, 0.053901313781738285, 0.05374697494506836, 0.05496044921875, 0.054909343719482424, 0.053970623016357425, 0.053782527923583984, 0.053741695404052735, 0.05490502548217773, 0.054937599182128906, 0.053856254577636715, 0.05452313613891602, 0.054827777862548825, 0.054255615234375, 0.055932926177978515, 0.05652297592163086, 0.05470163345336914, 0.055320064544677736, 0.055318496704101563, 0.05483750534057617, 0.05586380767822265, 0.05571548843383789, 0.0559310073852539, 0.05522249603271484, 0.05502975845336914, 0.05520515060424805, 0.055933887481689454, 0.05512271881103516, 0.05561446380615234, 0.055259136199951174, 0.055191551208496094, 0.055662239074707034, 0.05523011016845703, 0.054993888854980466, 0.05485744094848633, 0.0550522575378418, 0.05542697525024414, 0.05492073440551758, 0.05505465698242187, 0.05504438400268555, 0.05537750244140625, 0.055226238250732425, 0.05511017608642578, 0.0547608642578125, 0.05481532669067383, 0.054849536895751956, 0.055271263122558596, 0.05515222549438477, 0.05517504119873047, 0.05507347106933594, 0.055285377502441405, 0.05494617462158203, 0.05452908706665039, 0.05480748748779297, 0.055301185607910155, 0.05549382400512695, 0.05518617630004883, 0.05529702377319336, 0.054830528259277346, 0.05510611343383789, 0.05517926406860352, 0.05527961730957031, 0.05548799896240234, 0.05550899124145508, 0.055351806640625, 0.05511167907714844, 0.055951358795166016, 0.0545926399230957, 0.05518425750732422, 0.05504358291625976, 0.05501388931274414, 0.05565030288696289, 0.05618483352661133, 0.0550401611328125, 0.05491024017333984, 0.054680030822753904, 0.05486396789550781, 0.054624256134033204, 0.05473846435546875, 0.05480495834350586, 0.05482211303710938, 0.05720963287353516, 0.05621760177612305, 0.05588172912597656, 0.05512966537475586, 0.05520329666137695, 0.05479110336303711, 0.05462169647216797, 0.05441180801391601, 0.05496543884277344, 0.05439148712158203, 0.05433152008056641, 0.05429043197631836, 0.0543785285949707, 0.05438051223754883, 0.054247425079345706, 0.054230911254882816, 0.05421244812011719, 0.05504003143310547, 0.054751071929931644, 0.054704544067382815, 0.055398399353027344, 0.05517311859130859, 0.054980609893798826, 0.05446041488647461, 0.05417577743530273, 0.054441089630126956, 0.054276161193847657, 0.054271873474121095, 0.05437532806396484, 0.05402009582519531, 0.05389273452758789, 0.054024158477783205, 0.05473936080932617, 0.05480799865722656, 0.054694305419921874, 0.05470399856567383, 0.05436403274536133, 0.054645153045654295, 0.05458524703979492, 0.05526742553710937, 0.05518912124633789, 0.05537171173095703, 0.0546124153137207, 0.05408969497680664, 0.054005630493164064, 0.05428460693359375, 0.0542534065246582, 0.0539703369140625, 0.053854751586914065, 0.05389078521728516, 0.053886302947998045, 0.0538039665222168, 0.05384796905517578, 0.05396905517578125, 0.053856254577636715, 0.054172702789306644, 0.054700958251953126, 0.05415433502197266, 0.05496412658691406, 0.05448495864868164, 0.05433142471313476, 0.05396889495849609, 0.054687744140625, 0.054306751251220704, 0.05426953506469727, 0.05391603088378906, 0.054015872955322265, 0.05424505615234375, 0.05418361663818359, 0.054510433197021486, 0.05497612762451172, 0.054960769653320314, 0.05541622543334961, 0.05550511932373047, 0.05614604949951172, 0.05523046493530273, 0.05466912078857422, 0.055484607696533204, 0.05471827316284179, 0.05480172729492187, 0.056127902984619144, 0.05520787048339844, 0.05716345596313477, 0.056642398834228516, 0.05509734344482422, 0.055334529876708984, 0.055126399993896485, 0.05510464096069336, 0.055091617584228515, 0.055415264129638674, 0.05528771209716797, 0.055060161590576175, 0.055077438354492185, 0.05510128021240234, 0.055261184692382816, 0.055314208984375, 0.054841567993164066, 0.05457408142089844, 0.05443270492553711, 0.05438793563842773, 0.05457392120361328, 0.05402627182006836, 0.054210369110107424, 0.05419023895263672, 0.0540032958984375, 0.05417795181274414, 0.05483750534057617, 0.056281089782714844, 0.05427199935913086, 0.05410927963256836, 0.0544285774230957, 0.05575884628295898, 0.054493408203125, 0.05565241622924805, 0.055693119049072266, 0.05506038284301758, 0.054329345703125, 0.053983070373535155]",tokens/s,18.010921490536465,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,9997.320192,6193.872896,0.0,5798.62528,5404.427264,s,1,21.414580078125,21.414580078125,0.0,21.414580078125,21.414580078125,21.414580078125,21.414580078125,[21.414580078125],,kWh,0.00040889490950000887,4.509696875311965e-05,0.00015209345500799554,0.0006060853332611241,,MB,5851.189248,6493.765632,0.0,6077.546496,5755.124736,s,10,1.5461013641357424,0.15461013641357424,0.0010709991829966146,0.15421614074707032,0.15553902740478515,0.1563750663757324,0.15704389755249024,"[0.1536700439453125, 0.1537261047363281, 0.15385443115234376, 0.1552923583984375, 0.15485772705078124, 0.15721110534667967, 0.15535324096679687, 0.15371498107910156, 0.15384352111816407, 0.15457785034179689]",tokens/s,1655.7775960769677,kWh,4.525662495128671e-06,4.990994874557971e-07,2.754899639815319e-06,7.779661622399787e-06,tokens/kWh,32906315.521860942,MB,5855.453184,6495.862784,0.0,6079.643648,5755.127296,s,10,93.96771484375,9.396771484374998,0.022288536846970027,9.403882324218749,9.41765830078125,9.423301318359375,9.427815732421875,"[9.3799228515625, 9.384916015625, 9.3537705078125, 9.4024921875, 9.4150244140625, 9.4099326171875, 9.416404296875, 9.4289443359375, 9.4052724609375, 9.37103515625]",tokens/s,6.704430357251609,kWh,0.00027397648633362115,3.0219135124437525e-05,0.00012033097660658569,0.0004245265980646443,tokens/kWh,148400.59559803305,,s,630,93.96464640808102,0.14915023239377945,0.0014576631769397138,0.14882168579101562,0.15035004272460936,0.1517781608581543,0.15478501693725588,"[0.14869322204589844, 0.14762567138671875, 0.1479171447753906, 0.14763845825195313, 0.14811148071289063, 0.14806390380859374, 0.14732322692871094, 0.14759526062011719, 0.1470401611328125, 0.15032931518554687, 0.14794320678710937, 0.14745535278320313, 0.1491240692138672, 0.14827081298828124, 0.14831849670410155, 0.1475722198486328, 0.1472824249267578, 0.14856396484375, 0.1482196502685547, 0.15020841979980468, 0.14824070739746092, 0.14825811767578126, 0.14736863708496092, 0.14863743591308592, 0.1481976623535156, 0.14862748718261717, 0.14834889221191405, 0.14808268737792968, 0.14870527648925783, 0.1483038787841797, 0.14882611083984376, 0.1495715789794922, 0.1484365692138672, 0.1597334747314453, 0.14883030700683594, 0.14878717041015624, 0.14747048950195313, 0.14897372436523437, 0.14865235900878906, 0.1496309814453125, 0.14900428771972657, 0.14825062561035157, 0.1486131134033203, 0.14883021545410155, 0.14932786560058595, 0.14994583129882813, 0.14887580871582032, 0.14981666564941407, 0.14863226318359374, 0.1529896697998047, 0.14901248168945314, 0.14992092895507814, 0.15065589904785157, 0.15179107666015626, 0.14887391662597657, 0.14837120056152345, 0.14838377380371093, 0.14877839660644532, 0.1484888000488281, 0.14904864501953125, 0.14909516906738282, 0.1494608917236328, 0.14846978759765625, 0.1496558074951172, 0.14841241455078125, 0.1487639617919922, 0.14867318725585937, 0.14862339782714845, 0.14770947265625, 0.1483675537109375, 0.148212158203125, 0.14875222778320313, 0.1481748504638672, 0.1482845458984375, 0.1483008575439453, 0.1510084228515625, 0.14925482177734375, 0.1484363250732422, 0.14909698486328124, 0.14778175354003906, 0.1488296661376953, 0.14932199096679688, 0.14869664001464844, 0.1495149688720703, 0.14797145080566407, 0.1482144012451172, 0.14876162719726563, 0.14857110595703124, 0.14824552917480469, 0.1488271026611328, 0.14931942749023439, 0.14919296264648438, 0.14880149841308593, 0.1487803497314453, 0.1488778533935547, 0.14855392456054686, 0.1491531524658203, 0.1489846649169922, 0.1486736297607422, 0.15232412719726562, 0.14863632202148438, 0.14847152709960937, 0.14796214294433593, 0.14859674072265625, 0.1481171875, 0.15005113220214844, 0.1487667236328125, 0.14937210083007812, 0.14936557006835938, 0.1481011199951172, 0.14915788269042968, 0.1490960693359375, 0.14819570922851563, 0.15090188598632812, 0.1498714904785156, 0.15106405639648438, 0.14832896423339845, 0.1481359100341797, 0.15347920227050782, 0.15166259765625, 0.1485723876953125, 0.14847564697265625, 0.14806620788574218, 0.14822412109375, 0.14807215881347657, 0.14873980712890625, 0.14863491821289063, 0.14871775817871094, 0.1475831298828125, 0.14743382263183594, 0.14791830444335938, 0.14846624755859375, 0.1480701446533203, 0.14804605102539062, 0.14847958374023437, 0.1476182403564453, 0.14829388427734375, 0.14847669982910155, 0.14720098876953125, 0.14772781372070312, 0.14734393310546876, 0.1486005096435547, 0.14904071044921874, 0.14856781005859376, 0.14798489379882812, 0.14783074951171876, 0.14863385009765626, 0.14771382141113282, 0.1479664611816406, 0.14811546325683594, 0.14944169616699218, 0.14830677795410158, 0.14739865112304687, 0.1484963836669922, 0.1483937530517578, 0.14879945373535156, 0.15001983642578126, 0.14955570983886718, 0.14793023681640624, 0.14717837524414062, 0.1480492858886719, 0.1470384063720703, 0.14725164794921874, 0.1484963836669922, 0.1487972412109375, 0.14755027770996093, 0.1514108123779297, 0.15187052917480467, 0.14746124267578126, 0.1473697967529297, 0.14760549926757813, 0.14771955871582032, 0.1472357177734375, 0.1477849884033203, 0.14707341003417967, 0.148093017578125, 0.1483857879638672, 0.14858607482910155, 0.14809129333496093, 0.1475440673828125, 0.14888960266113282, 0.15245843505859374, 0.1484923858642578, 0.14758985900878907, 0.14851277160644533, 0.1549168701171875, 0.15084339904785157, 0.1491494140625, 0.14921142578125, 0.1502882843017578, 0.14894355773925783, 0.1477081298828125, 0.14944050598144532, 0.1493071746826172, 0.14865225219726563, 0.1485701141357422, 0.1485722198486328, 0.14908409118652344, 0.14803762817382812, 0.14895718383789064, 0.14777276611328125, 0.14878172302246093, 0.14904115295410156, 0.14849853515625, 0.14821775817871094, 0.1479959716796875, 0.14994912719726564, 0.14810691833496092, 0.15077375793457032, 0.1483290557861328, 0.14976792907714845, 0.14842880249023438, 0.14974771118164062, 0.14851005554199218, 0.14803190612792969, 0.15026406860351563, 0.14997039794921874, 0.15199215698242188, 0.14902342224121093, 0.15004876708984374, 0.14858610534667968, 0.14860736083984374, 0.14988082885742188, 0.14853324890136718, 0.1490370635986328, 0.14890748596191405, 0.15016397094726563, 0.14825065612792968, 0.14872111511230468, 0.14918428039550782, 0.14949862670898437, 0.14947123718261718, 0.15446220397949217, 0.14959820556640624, 0.1488709716796875, 0.14924205017089845, 0.14839558410644532, 0.14954745483398438, 0.14903923034667968, 0.14988047790527342, 0.1484022979736328, 0.14809097290039064, 0.14888755798339845, 0.14835842895507811, 0.1482406005859375, 0.148603271484375, 0.15128997802734376, 0.1540157470703125, 0.14901043701171876, 0.14885842895507811, 0.14881837463378905, 0.1508900146484375, 0.14998348999023436, 0.14970921325683595, 0.14914093017578126, 0.1487073974609375, 0.14893446350097655, 0.1482922821044922, 0.14963302612304688, 0.1492716827392578, 0.14811839294433593, 0.14913449096679687, 0.1487779541015625, 0.14959507751464843, 0.14836361694335937, 0.14952310180664063, 0.1492141418457031, 0.14838272094726562, 0.14861827087402343, 0.14846620178222655, 0.1484640655517578, 0.14897500610351563, 0.15043394470214844, 0.14934780883789062, 0.15354365539550782, 0.14910258483886718, 0.14803506469726563, 0.14862736511230468, 0.14973193359375, 0.14824758911132813, 0.14825161743164061, 0.15297946166992188, 0.14886416625976562, 0.14847634887695313, 0.1480258483886719, 0.1501449279785156, 0.1487930908203125, 0.14885906982421876, 0.14982354736328124, 0.14834597778320313, 0.1485832977294922, 0.14977641296386718, 0.1501390380859375, 0.14935600280761718, 0.14840663146972657, 0.14973747253417968, 0.1487237091064453, 0.15372288513183593, 0.1542021179199219, 0.15008563232421876, 0.14929843139648438, 0.1494535675048828, 0.15041944885253905, 0.14890567016601564, 0.14943994140625, 0.15004547119140624, 0.14868695068359375, 0.14909645080566405, 0.15104730224609375, 0.14900314331054687, 0.14810520935058594, 0.14951190185546875, 0.15054006958007812, 0.15104666137695313, 0.148453369140625, 0.1553720703125, 0.15253619384765624, 0.15005477905273437, 0.15027244567871093, 0.1490392608642578, 0.14948953247070312, 0.14966348266601562, 0.14848284912109375, 0.14821693420410156, 0.14854351806640625, 0.14924864196777343, 0.147783935546875, 0.1483994903564453, 0.15148042297363282, 0.14947177124023436, 0.1485823974609375, 0.14839808654785155, 0.1488773193359375, 0.148494140625, 0.1496938934326172, 0.14873458862304687, 0.1487904052734375, 0.1484127960205078, 0.14929533386230467, 0.15060169982910157, 0.14935833740234375, 0.14962728881835938, 0.14855401611328126, 0.14854908752441406, 0.14969091796875, 0.1484633331298828, 0.14830589294433594, 0.14874237060546874, 0.14845526123046876, 0.14877926635742186, 0.1484035186767578, 0.14987881469726563, 0.1501620788574219, 0.14897970581054687, 0.14876022338867187, 0.14869334411621093, 0.1488504638671875, 0.14823440551757813, 0.14950778198242187, 0.14917610168457032, 0.1492875213623047, 0.14885682678222656, 0.1486267547607422, 0.14904124450683592, 0.15527792358398437, 0.1493828125, 0.1489998779296875, 0.15046054077148438, 0.1489331512451172, 0.1493519744873047, 0.14800035095214845, 0.14866111755371095, 0.14909823608398437, 0.14909788513183594, 0.15014556884765626, 0.15066879272460937, 0.14921014404296876, 0.14942384338378906, 0.1540068817138672, 0.1486313018798828, 0.1485221405029297, 0.14935162353515624, 0.15868313598632813, 0.14874176025390626, 0.14904620361328125, 0.1481169891357422, 0.14850204467773437, 0.14870013427734374, 0.15256480407714842, 0.14831610107421875, 0.1491425323486328, 0.14923887634277344, 0.14864175415039063, 0.1484666290283203, 0.14875852966308595, 0.14868829345703125, 0.14889418029785156, 0.14868826293945311, 0.14865306091308594, 0.14826060485839843, 0.1482936248779297, 0.1487441864013672, 0.14824038696289063, 0.14832640075683592, 0.14848410034179688, 0.15093898010253906, 0.14885136413574218, 0.14819244384765626, 0.14928790283203125, 0.14864918518066406, 0.14966636657714844, 0.14883955383300781, 0.14889248657226561, 0.14866152954101564, 0.14918687438964845, 0.1586386260986328, 0.14936473083496093, 0.14888490295410156, 0.14899606323242187, 0.14933056640625, 0.14898585510253906, 0.14900230407714843, 0.14952415466308594, 0.1493362579345703, 0.1493873291015625, 0.14871347045898436, 0.14941094970703125, 0.14929939270019532, 0.15034233093261717, 0.14849827575683594, 0.15302671813964844, 0.1494466552734375, 0.1496821746826172, 0.14883757019042967, 0.14960928344726562, 0.1500603790283203, 0.14789903259277343, 0.14862156677246094, 0.14918415832519533, 0.14916336059570312, 0.14897967529296874, 0.14984083557128905, 0.14887225341796875, 0.1493144989013672, 0.1508905029296875, 0.14921522521972655, 0.1494970245361328, 0.15022572326660155, 0.1524625244140625, 0.14907379150390626, 0.14920109558105468, 0.14924057006835936, 0.1495421142578125, 0.14883033752441407, 0.14918313598632812, 0.14827705383300782, 0.14834063720703125, 0.14922323608398438, 0.14885699462890625, 0.14836143493652343, 0.14920089721679688, 0.14912666320800783, 0.1480955810546875, 0.1491406707763672, 0.14992057800292968, 0.14986650085449219, 0.1492906494140625, 0.1483206024169922, 0.1494958038330078, 0.1485148468017578, 0.14854960632324218, 0.1516559295654297, 0.15033599853515625, 0.14883602905273438, 0.15114454650878906, 0.1524001007080078, 0.15234979248046876, 0.150059326171875, 0.15003231811523438, 0.15018240356445312, 0.148861083984375, 0.15020191955566406, 0.14874790954589845, 0.14908294677734374, 0.14983331298828126, 0.1494727325439453, 0.1487631072998047, 0.14905596923828124, 0.15092326354980468, 0.14940570068359374, 0.1492699890136719, 0.15017015075683593, 0.15011744689941406, 0.14924691772460938, 0.149712890625, 0.15187557983398436, 0.1520639953613281, 0.14955929565429688, 0.14969241333007813, 0.15002032470703125, 0.15015298461914062, 0.1496303405761719, 0.14942672729492187, 0.14839517211914063, 0.14889573669433595, 0.14893875122070313, 0.1481029510498047, 0.1496693115234375, 0.15147906494140626, 0.15175680541992187, 0.14954202270507813, 0.14962690734863282, 0.14901539611816406, 0.1528070373535156, 0.16052671813964844, 0.14847999572753906, 0.14863999938964845, 0.14976092529296875, 0.14942604064941406, 0.1487962951660156, 0.14859068298339845, 0.14915769958496095, 0.14903091430664062, 0.14839776611328126, 0.1489679412841797, 0.14991973876953124, 0.14869094848632813, 0.14928076171875, 0.1490370635986328, 0.14832231140136717, 0.1493975067138672, 0.14924919128417968, 0.14871034240722655, 0.149972900390625, 0.1496285400390625, 0.14870506286621094, 0.14879356384277342, 0.15167718505859376, 0.1489633026123047, 0.14870906066894532, 0.14868663024902343, 0.14885574340820312, 0.14886431884765625, 0.14913363647460937, 0.148629638671875, 0.14878924560546875, 0.14863063049316405, 0.14892877197265625, 0.14867222595214843, 0.14891702270507812, 0.148428955078125, 0.14850047302246094, 0.14818211364746095, 0.14813043212890625, 0.1492626190185547, 0.14876783752441405, 0.14903146362304687, 0.14881011962890625, 0.14862336730957032, 0.14835507202148437, 0.14837126159667968, 0.14952806091308593, 0.15098358154296876, 0.14858348083496092, 0.14821871948242188, 0.14834471130371094, 0.1480642547607422, 0.1488249969482422, 0.14818304443359376, 0.14829708862304689, 0.14906431579589843, 0.1476807098388672, 0.14768730163574217, 0.14904800415039063, 0.1503406982421875, 0.15102864074707031, 0.1487728576660156, 0.14776934814453124, 0.14785536193847656, 0.1481543731689453, 0.15176237487792968, 0.14841299438476563, 0.14772621154785157, 0.1480316162109375, 0.14856192016601563, 0.14825196838378907, 0.14816712951660158, 0.14846357727050782, 0.14897523498535156, 0.1480587158203125, 0.1484247283935547, 0.1483857879638672, 0.14871347045898436, 0.14871347045898436, 0.14920115661621094, 0.14758476257324218, 0.14891935729980468, 0.14966426086425783, 0.14807699584960937, 0.14752748107910157, 0.14834707641601563, 0.14875033569335938, 0.148094970703125, 0.15096627807617188, 0.14874214172363281, 0.1487667236328125, 0.1492725067138672, 0.14851487731933594, 0.14845709228515624, 0.1480625, 0.14875596618652343, 0.149512451171875, 0.15319485473632813, 0.14843597412109374, 0.14783999633789063, 0.14840179443359375, 0.14834521484375, 0.14817893981933594, 0.14816188049316406, 0.14835148620605468, 0.14889590454101562, 0.14799026489257813, 0.14823869323730468, 0.14845907592773439, 0.1491922607421875, 0.1486835479736328, 0.14825881958007814, 0.14903091430664062, 0.14790042114257812, 0.1525964813232422]",tokens/s,6.704649291861959,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,14672.134144,7846.428672,0.0,7451.181056,7445.507072,s,1,32.599328125,32.599328125,0.0,32.599328125,32.599328125,32.599328125,32.599328125,[32.599328125],,kWh,0.0007451603443958343,8.218951153430724e-05,0.00028046911326399995,0.0011078189691941414,,MB,1326.424064,7997.423616,0.0,7581.20448,7570.843648,s,10,1.2462345962524415,0.12462345962524415,0.0007616768434001599,0.12444008255004882,0.12562794189453125,0.12605982818603514,0.12640533721923827,"[0.12415731048583985, 0.12649171447753907, 0.12409785461425782, 0.12390544128417968, 0.12442038726806641, 0.12445977783203124, 0.12467171478271484, 0.123998046875, 0.12450038146972656, 0.12553196716308593]",tokens/s,2054.187877385357,kWh,3.679309774947909e-06,4.0550177105870324e-07,2.442557509600002e-06,6.527369055606614e-06,tokens/kWh,39219476.91621811,MB,1346.707456,8010.006528,0.0,7593.787392,7514.46784,s,10,73.59379541015625,7.359379541015626,0.0465667174478991,7.388698486328125,7.402563427734375,7.404894506835937,7.406759370117188,"[7.2944599609375, 7.31129345703125, 7.30241943359375, 7.30375048828125, 7.3937861328125, 7.3947548828125, 7.4072255859375, 7.40204541015625, 7.40044921875, 7.38361083984375]",tokens/s,8.560504271981838,kWh,0.0002156417623467185,2.3786567240443014e-05,0.00011003842136400003,0.0003494667509511616,tokens/kWh,180274.66083262474,,s,630,73.59000663757323,0.11680953434535435,0.0012684493239230965,0.11676417541503906,0.1180474578857422,0.11875861663818359,0.12120064575195315,"[0.1160970230102539, 0.11539778900146484, 0.11523977661132813, 0.11485711669921875, 0.11529408264160156, 0.1157266845703125, 0.11562582397460938, 0.11551209259033203, 0.11531801605224609, 0.11615103912353515, 0.1164362564086914, 0.11628546905517578, 0.11953740692138672, 0.11644000244140625, 0.11668479919433594, 0.11681110382080079, 0.11589862060546875, 0.11515872192382813, 0.11577827453613282, 0.11583798217773437, 0.11587273406982422, 0.11573862457275391, 0.11602070617675782, 0.11547907257080078, 0.11569273376464843, 0.1153257598876953, 0.11519363403320312, 0.11521027374267578, 0.11570604705810547, 0.11493981170654297, 0.1151468505859375, 0.11531263732910156, 0.11581439971923828, 0.11627315521240235, 0.11587789154052734, 0.11596800231933593, 0.11618099212646485, 0.11576319885253906, 0.11576064300537109, 0.11617740631103515, 0.11597824096679688, 0.11645868682861328, 0.11512300872802735, 0.11490211486816407, 0.1153647689819336, 0.11534035491943359, 0.11553478240966797, 0.11531673431396484, 0.1161872329711914, 0.11560540771484375, 0.11546009826660156, 0.11487436676025391, 0.1147883529663086, 0.11538636779785157, 0.11564236450195313, 0.115810302734375, 0.11586492919921874, 0.11569423675537109, 0.11648143768310547, 0.11596367645263672, 0.11632109069824219, 0.11588188934326171, 0.11652243041992187, 0.11559958648681641, 0.11577126312255859, 0.11575103759765625, 0.12446310424804688, 0.11985100555419922, 0.11544985961914063, 0.11503164672851562, 0.11566941070556641, 0.11535715484619141, 0.11593782043457031, 0.11550243377685547, 0.11531740570068359, 0.11495833587646484, 0.11456511688232422, 0.11553997039794922, 0.11571405029296875, 0.11636121368408203, 0.11611545562744141, 0.11615792083740234, 0.11890128326416016, 0.11635667419433594, 0.115914306640625, 0.11584559631347656, 0.11539222717285157, 0.1154136962890625, 0.11470642852783203, 0.11508326721191406, 0.11558707427978515, 0.11582463836669922, 0.11537203216552734, 0.11501158142089844, 0.114957763671875, 0.11516326141357422, 0.11580381011962891, 0.11872745513916015, 0.1157734375, 0.11580210876464844, 0.11587789154052734, 0.11562159729003907, 0.1157573471069336, 0.11652294158935547, 0.11644486236572266, 0.11630630493164062, 0.11598841857910157, 0.11673197174072265, 0.11689564514160156, 0.11694646453857421, 0.11592787170410156, 0.11525615692138672, 0.1158153305053711, 0.11586764526367188, 0.11571360015869141, 0.11586399841308594, 0.11580210876464844, 0.11603148651123046, 0.11544287872314453, 0.11631903839111328, 0.11544976043701172, 0.11595785522460937, 0.11578163146972656, 0.11560047912597657, 0.11586809539794922, 0.11636579132080078, 0.12063958740234375, 0.11580620574951171, 0.11587993621826172, 0.11568675231933594, 0.11545043182373046, 0.1161503677368164, 0.11670527648925781, 0.11588813018798828, 0.1155047378540039, 0.11522908782958985, 0.11525059509277344, 0.11543126678466797, 0.11564723205566406, 0.11516722869873047, 0.11557859039306641, 0.11542556762695312, 0.11607596588134765, 0.11633312225341796, 0.11522870635986328, 0.1154017562866211, 0.11590316772460937, 0.11826006317138672, 0.11534429168701171, 0.11591500854492187, 0.11670809936523438, 0.11613763427734375, 0.12308223724365235, 0.11586771392822266, 0.1161674575805664, 0.11587753295898437, 0.11541129302978516, 0.11523619079589843, 0.11494185638427734, 0.11592985534667968, 0.11571609497070312, 0.11527718353271485, 0.11510848236083984, 0.11574009704589844, 0.115050048828125, 0.11455795288085938, 0.11506893157958985, 0.11540659332275391, 0.11612095642089844, 0.11551833343505859, 0.1152194595336914, 0.11522354888916016, 0.11610438537597656, 0.11565139007568359, 0.11557039642333984, 0.11688748931884765, 0.1160683822631836, 0.1158670425415039, 0.115687744140625, 0.11541951751708984, 0.11537961578369141, 0.11546502685546875, 0.11632434844970703, 0.11558697509765625, 0.11517961883544922, 0.11581132507324218, 0.11644620513916015, 0.11604358673095704, 0.11530825805664062, 0.115884033203125, 0.1155150375366211, 0.11602365112304687, 0.11723366546630859, 0.118742431640625, 0.11600518035888673, 0.11558505249023437, 0.11578755187988281, 0.11614051055908203, 0.11612979125976562, 0.1157201919555664, 0.11601510620117188, 0.11636326599121094, 0.11549696350097656, 0.11541407775878906, 0.11514329528808594, 0.11525971221923828, 0.11578777313232422, 0.11533513641357422, 0.11524918365478516, 0.11522563171386718, 0.11552867126464844, 0.11529183959960937, 0.11505286407470704, 0.1149296646118164, 0.11498700714111328, 0.11531407928466797, 0.11517193603515626, 0.11653289794921876, 0.1161397476196289, 0.11620825958251953, 0.11622185516357422, 0.11541228485107421, 0.11592985534667968, 0.11600665283203125, 0.115593505859375, 0.11508850860595703, 0.11580262756347656, 0.11598588562011719, 0.1156263656616211, 0.115259521484375, 0.11465155029296875, 0.1150750732421875, 0.11833920288085938, 0.11533916473388672, 0.11480111694335937, 0.11530035400390624, 0.11538569641113282, 0.11636803436279297, 0.12136243438720704, 0.11543122863769531, 0.11616275024414062, 0.11640627288818359, 0.11667206573486329, 0.11683017730712891, 0.11662319946289063, 0.11666262054443359, 0.11630825805664062, 0.11626496124267578, 0.11599222564697266, 0.11607689666748047, 0.11547551727294922, 0.11573548889160157, 0.11709235382080078, 0.11671756744384766, 0.1162232666015625, 0.11608751678466797, 0.11692173004150391, 0.11735718536376953, 0.11658649444580078, 0.11714559936523437, 0.11715583801269532, 0.11665203094482422, 0.11717427062988281, 0.11775791931152343, 0.11707599639892578, 0.11885763549804687, 0.1222820816040039, 0.11848438262939454, 0.11823503875732422, 0.11810681915283203, 0.116959228515625, 0.1168015365600586, 0.11633663940429688, 0.11666429138183594, 0.11682614135742188, 0.11663900756835938, 0.11679337310791016, 0.11700838470458984, 0.11733414459228515, 0.11708377838134766, 0.11718873596191406, 0.1166890869140625, 0.11685855865478516, 0.11693148803710937, 0.1179054412841797, 0.11715167999267578, 0.11757353973388672, 0.11725977325439453, 0.11800556945800782, 0.1176514892578125, 0.11745696258544921, 0.11780982208251953, 0.11753852844238281, 0.11695346832275391, 0.11707494354248046, 0.11705862426757813, 0.11680329895019531, 0.11663916778564454, 0.11668316650390625, 0.11693408203125, 0.11776627349853516, 0.11778540802001954, 0.11876544189453125, 0.11841865539550782, 0.1168387222290039, 0.11676118469238281, 0.11687324523925781, 0.11694691467285157, 0.11694489288330077, 0.11751990509033203, 0.1189111328125, 0.11769586944580078, 0.1181910400390625, 0.11804672241210938, 0.11741923522949219, 0.12030483245849609, 0.1166917724609375, 0.1174398422241211, 0.11622467041015624, 0.11866512298583984, 0.1194415054321289, 0.12297529602050782, 0.11594802856445313, 0.11621830749511719, 0.11696947479248047, 0.11690188598632813, 0.11665135955810547, 0.11668956756591797, 0.11745689392089843, 0.11676191711425782, 0.1164909439086914, 0.11732768249511719, 0.11804179382324219, 0.11842662048339844, 0.11953094482421875, 0.11765408325195313, 0.11787264251708984, 0.11691622161865234, 0.117106689453125, 0.11670848083496094, 0.11662834930419921, 0.11696947479248047, 0.11744051361083985, 0.11699814605712891, 0.11681587219238282, 0.11670323181152344, 0.11663565063476562, 0.1160110092163086, 0.11685887908935547, 0.11691958618164063, 0.12249775695800781, 0.11722672271728515, 0.11702531433105469, 0.11724416351318359, 0.11734015655517578, 0.11726831817626954, 0.1171844482421875, 0.1175429458618164, 0.1173873291015625, 0.11728880310058594, 0.1178565444946289, 0.11738658905029296, 0.11741865539550782, 0.11638748931884765, 0.11703126525878907, 0.11667660522460938, 0.11683625793457031, 0.1166562271118164, 0.11718217468261719, 0.11685683441162109, 0.11740393829345704, 0.11711673736572266, 0.1167259521484375, 0.11688150024414062, 0.11669052886962891, 0.11746918487548828, 0.11708175659179687, 0.11732649230957032, 0.11793145751953125, 0.11750662231445312, 0.11720909118652344, 0.11738697814941407, 0.11788111877441407, 0.11718601226806641, 0.12376115417480468, 0.11826179504394531, 0.11685683441162109, 0.11694719696044922, 0.11667021179199219, 0.11736064147949218, 0.11687117004394532, 0.11719270324707032, 0.1178603515625, 0.11743231964111328, 0.11674419403076172, 0.11679129791259765, 0.11702272033691406, 0.11610470581054687, 0.11585692596435547, 0.12057494354248047, 0.11717779541015624, 0.11758649444580078, 0.11725823974609376, 0.1174835205078125, 0.1171981430053711, 0.11946166229248047, 0.11959593963623047, 0.11737088012695313, 0.116662109375, 0.11677507019042968, 0.11657545471191406, 0.11682233428955079, 0.12017711639404297, 0.11693670654296875, 0.11661443328857422, 0.11666505432128907, 0.1173381118774414, 0.1170022430419922, 0.11962319946289063, 0.11792227172851563, 0.11783155059814453, 0.11711676788330078, 0.11718479919433594, 0.11718653106689453, 0.11725360107421876, 0.117279296875, 0.11751628875732421, 0.11868569946289062, 0.11850713348388672, 0.11791715240478516, 0.11707689666748047, 0.11686502075195312, 0.1164510726928711, 0.11749606323242187, 0.11872255706787109, 0.11727257537841797, 0.11672134399414062, 0.11801615905761718, 0.11646979522705078, 0.11642642974853516, 0.11918994903564453, 0.11587519836425782, 0.11606893157958985, 0.11765711975097656, 0.11686281585693359, 0.1177688980102539, 0.11875027465820312, 0.11722809600830078, 0.11769891357421874, 0.11735187530517578, 0.11708067321777343, 0.11793567657470704, 0.11757612609863281, 0.11745216369628907, 0.11718924713134765, 0.11638784027099609, 0.11650457763671875, 0.11673395538330078, 0.11695225524902343, 0.11682012939453125, 0.11732390594482422, 0.11703145599365235, 0.11689539337158203, 0.11748153686523438, 0.11619120025634766, 0.11635731506347656, 0.11675251007080079, 0.11703660583496094, 0.11725007629394531, 0.11968144226074219, 0.1180540771484375, 0.11764371490478516, 0.1177947540283203, 0.11672006225585937, 0.11757107543945312, 0.1176493148803711, 0.1170315170288086, 0.11792179107666016, 0.1172889633178711, 0.11765964508056641, 0.11678240203857422, 0.11712358093261718, 0.11653548431396485, 0.11950617980957032, 0.11724441528320312, 0.11785855865478516, 0.11759001922607422, 0.1176184310913086, 0.11697792053222657, 0.1198202896118164, 0.12078185272216797, 0.11705443572998046, 0.11721839904785156, 0.11765760040283203, 0.11830364990234375, 0.11834982299804687, 0.11831212615966796, 0.11783046722412109, 0.11733350372314454, 0.11734886169433593, 0.11864268493652344, 0.11715753936767578, 0.11757807922363281, 0.11783164978027344, 0.11652550506591797, 0.11598611450195312, 0.11654316711425781, 0.11622259521484375, 0.11635078430175781, 0.11686128234863281, 0.11669618988037109, 0.11802067565917969, 0.11777244567871094, 0.11728281402587891, 0.11799737548828125, 0.11738950347900391, 0.11746304321289062, 0.11924886322021484, 0.11832860565185546, 0.1176764144897461, 0.11802419281005859, 0.11768608093261719, 0.11771865844726563, 0.11719705963134766, 0.1166028823852539, 0.11690838623046874, 0.11774726104736329, 0.11714844512939453, 0.11816345977783203, 0.11805900573730468, 0.11757500457763671, 0.1172220458984375, 0.11679261016845703, 0.11747708892822266, 0.1173043212890625, 0.11807334136962891, 0.11791126251220703, 0.11809616088867188, 0.1183166732788086, 0.11856524658203126, 0.11776204681396485, 0.11726220703125, 0.11810623931884766, 0.11793612670898437, 0.1179625244140625, 0.11761824035644532, 0.11718927764892578, 0.1170749740600586, 0.116685791015625, 0.11656806182861328, 0.11691203308105469, 0.11700012969970704, 0.11725532531738281, 0.11878707122802734, 0.11782553863525391, 0.11744255828857422, 0.11743231964111328, 0.11689574432373047, 0.11685244750976563, 0.11709468841552734, 0.11779824066162109, 0.11746985626220703, 0.11810816192626954, 0.11797833251953126, 0.11742902374267578, 0.11738521575927735, 0.11725619506835938, 0.1168936996459961, 0.11785340881347656, 0.1165811538696289, 0.11707596588134765, 0.12080454254150391, 0.11922032165527344, 0.12016304016113281, 0.11584419250488281, 0.11620854187011719, 0.11675852966308593, 0.11642211151123047, 0.11696380615234375, 0.11718252563476562, 0.11659839630126953, 0.11660313415527344, 0.11710671997070313, 0.11742813110351563, 0.11702828979492187, 0.1177910385131836, 0.11766419219970703, 0.11786201477050781, 0.11735798645019531, 0.11648480224609375, 0.11639427185058594, 0.11695906829833984, 0.11676643371582031, 0.11689183807373046, 0.1168939208984375, 0.11706781005859375, 0.11796275329589843, 0.11734754943847656, 0.11653814697265626, 0.11593523406982421, 0.11634194946289063, 0.11845305633544922, 0.11798732757568359, 0.11673395538330078, 0.11744051361083985, 0.1180014419555664, 0.1168016357421875, 0.11706380462646485, 0.11689075469970703, 0.11706813049316406, 0.11738365173339843, 0.11778054046630859, 0.11686707305908203, 0.11774732971191407, 0.116830078125, 0.11597465515136719, 0.11622783660888672, 0.11639389038085937, 0.11652928161621094, 0.11632867431640626, 0.11680915069580078, 0.11649827575683594, 0.11798191833496094, 0.11723571014404296, 0.11740723419189453, 0.11702092742919921, 0.11734239959716797, 0.11795257568359375, 0.11786649322509765, 0.11758592224121094]",tokens/s,8.560945008507955,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,7531.298816,3657.302016,0.0,3254.779904,3057.54368,s,1,17.415630859375,17.415630859375,0.0,17.415630859375,17.415630859375,17.415630859375,17.415630859375,[17.415630859375],,kWh,0.0002974271084167261,3.2801003584118835e-05,9.557896535200561e-05,0.0004258070773528505,,MB,4157.812736,3745.3824,0.0,3321.888768,3200.587264,s,10,1.1479020996093752,0.11479020996093751,0.0007186507990492991,0.11446708679199219,0.11591013793945312,0.11609761276245116,0.1162475926208496,"[0.11586847686767578, 0.11499568176269531, 0.11436105346679687, 0.11412995147705078, 0.11409606170654298, 0.11428329467773438, 0.11628508758544921, 0.11427900695800781, 0.11503036499023438, 0.1145731201171875]",tokens/s,2230.155342403463,kWh,3.363111316284422e-06,3.7089126613858554e-07,1.7374151830342436e-06,5.471417765457251e-06,tokens/kWh,46788604.155984394,MB,4162.060288,3789.422592,0.0,3363.831808,3181.191168,s,10,72.19289599609375,7.219289599609375,0.01002180787264135,7.215145751953125,7.23642548828125,7.238362158203125,7.239911494140625,"[7.2359951171875, 7.240298828125, 7.21896240234375, 7.21308984375, 7.21377734375, 7.21651416015625, 7.21215673828125, 7.20901318359375, 7.22077001953125, 7.212318359375]",tokens/s,8.726620414757821,kWh,0.00021138012724496115,2.331621251577546e-05,7.832459331096311e-05,0.0003130209330716997,tokens/kWh,201264.49493896752,,s,630,72.19060464477542,0.11458826134091332,0.001228669486692265,0.11432360076904297,0.11562472610473633,0.11670873718261718,0.11873630676269532,"[0.11381782531738281, 0.11385507202148437, 0.11406304168701172, 0.11394898986816407, 0.11433798217773437, 0.11497042846679688, 0.11523919677734375, 0.11516896057128906, 0.11585964965820313, 0.1153016357421875, 0.11472752380371094, 0.1141760025024414, 0.11468800354003907, 0.11468787384033204, 0.11429695892333984, 0.11517132568359376, 0.11490303802490234, 0.11449494171142578, 0.11497055816650391, 0.11554192352294922, 0.11482387542724609, 0.11554541015625, 0.11471532440185547, 0.11491942596435546, 0.11484703826904297, 0.11526169586181641, 0.11542176055908203, 0.11670658874511719, 0.1145898208618164, 0.11433404541015625, 0.11506086730957031, 0.11559923553466797, 0.11747049713134766, 0.11744342041015625, 0.11585330963134766, 0.11518576049804688, 0.11527388763427734, 0.11484339141845704, 0.11489059448242188, 0.11501932525634766, 0.11465519714355468, 0.11415615844726562, 0.11508303833007813, 0.11380265808105469, 0.11363337707519532, 0.11460681915283204, 0.11429676818847656, 0.114228515625, 0.11428268432617188, 0.12361510467529296, 0.113908447265625, 0.11450892639160157, 0.11342323303222657, 0.11389952087402344, 0.11378598022460938, 0.1141871337890625, 0.11367628479003906, 0.11423744201660156, 0.11414118194580078, 0.11457465362548828, 0.11364752197265625, 0.11365805053710938, 0.1137239990234375, 0.11561392211914062, 0.11432733154296874, 0.114735107421875, 0.11426528167724609, 0.11473798370361328, 0.11457353973388672, 0.11466515350341797, 0.11429488372802735, 0.11391363525390626, 0.11446425628662109, 0.11364425659179687, 0.11374002838134765, 0.11563597106933594, 0.11422284698486328, 0.1142577896118164, 0.1133095703125, 0.11652130889892579, 0.11361705780029296, 0.11357708740234375, 0.11615519714355468, 0.11596102142333985, 0.11566780853271484, 0.11419827270507812, 0.11430652618408203, 0.11465606689453126, 0.11503788757324218, 0.11506105804443359, 0.11580825805664062, 0.11434803009033204, 0.11668460845947265, 0.11622223663330078, 0.11675638580322266, 0.11507727813720703, 0.11516035461425782, 0.11475164794921874, 0.11854684448242188, 0.11466780853271484, 0.11462131500244141, 0.1150913314819336, 0.11831542205810547, 0.11465580749511718, 0.11516928100585938, 0.11512847900390626, 0.11486192321777344, 0.11543059539794921, 0.11605046081542969, 0.11451360321044922, 0.1143721923828125, 0.11487744140625, 0.11366400146484375, 0.1142225570678711, 0.11370716857910156, 0.11398941040039062, 0.1147449951171875, 0.11464704132080078, 0.11910444641113281, 0.11474944305419922, 0.11576934051513672, 0.11375823974609375, 0.11381552124023438, 0.1135833282470703, 0.11438365173339844, 0.11366812896728516, 0.113512451171875, 0.1143333740234375, 0.11491484832763672, 0.1145864028930664, 0.11537612915039062, 0.11449549102783203, 0.11389750671386718, 0.11458668518066406, 0.11735340881347656, 0.1150702362060547, 0.11500605010986328, 0.11456642913818359, 0.11494646453857423, 0.11406172943115235, 0.11440121459960938, 0.11431536102294922, 0.11476374053955078, 0.1140467529296875, 0.11392646026611328, 0.1156230697631836, 0.11444902038574219, 0.11442803192138672, 0.11406336212158204, 0.11415907287597657, 0.1143792953491211, 0.11415699005126953, 0.11435794830322266, 0.11412364959716798, 0.11398770904541015, 0.11430079650878906, 0.11408780670166016, 0.11390764617919921, 0.11352822113037109, 0.11388143920898437, 0.11389488220214844, 0.11710137939453125, 0.11452185821533203, 0.11450819396972656, 0.11397097778320313, 0.11585734558105469, 0.1136661148071289, 0.1142294692993164, 0.11654975891113281, 0.11680883026123047, 0.11516944122314453, 0.11477053070068359, 0.11417212677001953, 0.11454032135009766, 0.11461222076416015, 0.11406658935546875, 0.11383689880371094, 0.11370291137695313, 0.11352607727050781, 0.11368265533447265, 0.11404335784912109, 0.11445452880859375, 0.11514608001708984, 0.11489347076416015, 0.1150887680053711, 0.11651728057861328, 0.11499529266357422, 0.11487449645996094, 0.11394796752929688, 0.11409283447265625, 0.11427657318115235, 0.11389929962158203, 0.11417008209228516, 0.11426201629638671, 0.11396505737304688, 0.1139056625366211, 0.11386208343505859, 0.11350278472900391, 0.1139056625366211, 0.11466476440429688, 0.1137669448852539, 0.11389139556884766, 0.11511203002929687, 0.11815731048583984, 0.11469209289550782, 0.1146275863647461, 0.11470649719238281, 0.11429574584960937, 0.11454054260253907, 0.11396044921875, 0.11361126708984375, 0.11352278137207031, 0.1134775390625, 0.1143410873413086, 0.11382624053955079, 0.11370326232910156, 0.11443609619140625, 0.11441766357421874, 0.11412274932861328, 0.11409580993652343, 0.11697100830078125, 0.11795142364501954, 0.11436134338378906, 0.11402041625976563, 0.11366217803955078, 0.11411219024658203, 0.1141479034423828, 0.11419225311279296, 0.1150219497680664, 0.11546249389648437, 0.11495833587646484, 0.11510578918457032, 0.11450559997558593, 0.11457138824462891, 0.1148231658935547, 0.11430508422851562, 0.11415958404541016, 0.11393958282470704, 0.11450572967529297, 0.11479904174804688, 0.11497638702392578, 0.11456578826904297, 0.1145038070678711, 0.11409366607666016, 0.1140525131225586, 0.11356774139404296, 0.11330448150634766, 0.11309910583496094, 0.11857206726074218, 0.1151864013671875, 0.113925537109375, 0.11563180541992188, 0.11436441802978516, 0.113870849609375, 0.11649842834472657, 0.11445657348632812, 0.11477340698242187, 0.11428514862060547, 0.11449549102783203, 0.11470233917236328, 0.11428988647460937, 0.11471542358398437, 0.11435228729248047, 0.1140360336303711, 0.11393692779541016, 0.11443814086914063, 0.11410435485839844, 0.11429065704345703, 0.11531372833251953, 0.11422185516357422, 0.11479670715332031, 0.11477996826171875, 0.11447257232666015, 0.11437731170654297, 0.1142742691040039, 0.11391929626464843, 0.11450627136230469, 0.11480032348632813, 0.11481049346923829, 0.11995836639404298, 0.1145754852294922, 0.11565424346923828, 0.1146022720336914, 0.11488182067871094, 0.11484438323974609, 0.1145130844116211, 0.11463148498535156, 0.11432160186767579, 0.11483321380615234, 0.11619705963134766, 0.11441184234619141, 0.11497657775878906, 0.11382189178466796, 0.11369062042236328, 0.11487641906738282, 0.11363664245605469, 0.113859619140625, 0.11384595489501953, 0.11343052673339844, 0.11332790374755859, 0.11319667053222657, 0.11302713775634765, 0.11306441497802734, 0.11380332946777344, 0.11336294555664063, 0.11382300567626953, 0.11486003112792968, 0.11833622741699219, 0.11483251190185546, 0.11497695922851563, 0.11396985626220703, 0.11416140747070312, 0.11380147552490234, 0.11381129455566406, 0.11375183868408204, 0.11390086364746094, 0.11404354858398437, 0.11450345611572266, 0.11371132659912109, 0.11418374633789062, 0.11500383758544921, 0.1148821792602539, 0.11377696228027344, 0.11378889465332032, 0.11368048095703125, 0.11363533020019531, 0.11310249328613281, 0.1135, 0.1143362579345703, 0.113636962890625, 0.11409859466552734, 0.11389337921142578, 0.11393638610839844, 0.11586911773681641, 0.11547920227050781, 0.11412451171875, 0.11524729919433593, 0.11413420867919923, 0.11379385375976563, 0.11412822723388671, 0.12475981140136719, 0.11477891540527344, 0.11557081604003906, 0.1139978256225586, 0.11371059417724609, 0.11384233856201172, 0.11424748992919923, 0.11467420959472656, 0.11492662048339844, 0.11847984313964843, 0.11489894104003906, 0.11457536315917968, 0.11562393951416015, 0.11398143768310547, 0.11470028686523437, 0.1140162582397461, 0.11449753570556641, 0.113797119140625, 0.1136246109008789, 0.11480892944335938, 0.11427468872070312, 0.11418358612060547, 0.11407740783691406, 0.11444012451171875, 0.11482758331298829, 0.11456781005859375, 0.11386016082763672, 0.11358972930908204, 0.11406873321533204, 0.11395568084716796, 0.11420307159423829, 0.11471507263183593, 0.11383939361572265, 0.11333910369873047, 0.11444019317626954, 0.11434819030761718, 0.11671049499511718, 0.11491580963134766, 0.11454118347167969, 0.11494982147216797, 0.11514707183837891, 0.11463065338134766, 0.11434627532958984, 0.11525279998779298, 0.11405923461914062, 0.11577747344970703, 0.11425939178466797, 0.11409814453125, 0.11461478424072266, 0.11438950347900391, 0.1148470687866211, 0.11389798736572265, 0.11350534057617187, 0.1172366714477539, 0.11387232208251953, 0.11397792053222656, 0.11369385528564453, 0.11390847778320312, 0.11364387512207032, 0.11351145935058594, 0.11354557037353516, 0.11318873596191406, 0.11311113739013672, 0.1138151397705078, 0.11462502288818359, 0.11387935638427735, 0.11405267333984374, 0.11382345581054687, 0.11382367706298828, 0.11385097503662109, 0.11321772766113282, 0.11315609741210937, 0.11320953369140625, 0.11372319793701172, 0.113870849609375, 0.11385651397705078, 0.11354930877685547, 0.11315200042724609, 0.11403199768066406, 0.11423731231689453, 0.11621891021728516, 0.11492291259765625, 0.11555257415771485, 0.11566835021972656, 0.11404147338867188, 0.1147391357421875, 0.11455452728271484, 0.12585958099365235, 0.11518019104003906, 0.11378038024902344, 0.11416815948486328, 0.11448934173583984, 0.11449120330810547, 0.11443218994140625, 0.11401795196533203, 0.11420297241210937, 0.11381337738037109, 0.11435024261474609, 0.11559113311767578, 0.11466556549072265, 0.11530435180664063, 0.11417769622802734, 0.11445059204101563, 0.11391645050048828, 0.11508716583251953, 0.11407686614990234, 0.11386367797851563, 0.11437664031982422, 0.11490720367431641, 0.1141739501953125, 0.11408313751220703, 0.11391030120849609, 0.11307433319091797, 0.11303321838378906, 0.11290790557861329, 0.11254972839355469, 0.11394483184814454, 0.11450556945800781, 0.11472940826416016, 0.11409410858154297, 0.1144566421508789, 0.11799494171142579, 0.11555875396728515, 0.11426016235351563, 0.11372537231445312, 0.11386463928222657, 0.1161805419921875, 0.11427276611328124, 0.11506797027587891, 0.11534162902832032, 0.1142872314453125, 0.11432454681396484, 0.11460873413085937, 0.11353510284423828, 0.1141803207397461, 0.11405926513671875, 0.11377180480957032, 0.11332208251953126, 0.11395507049560546, 0.1178884506225586, 0.11880339050292969, 0.11371456146240234, 0.11366194915771484, 0.11459442901611328, 0.11432256317138671, 0.1152152328491211, 0.11513760375976563, 0.11464390563964844, 0.11444838714599609, 0.11396505737304688, 0.11377458953857422, 0.114919677734375, 0.11551513671875, 0.11485388946533204, 0.11468800354003907, 0.11459363555908203, 0.11394064331054687, 0.1137807388305664, 0.11368978881835938, 0.11330009460449218, 0.11479206085205078, 0.11384435272216797, 0.11438057708740235, 0.11368675231933593, 0.11486214447021484, 0.11906041717529296, 0.11457331085205077, 0.11366729736328125, 0.11566508483886719, 0.11524681854248046, 0.11420575714111328, 0.11368975830078125, 0.11728288269042969, 0.11398841857910157, 0.11489600372314453, 0.11392915344238282, 0.11415440368652344, 0.11431619262695313, 0.11376409912109375, 0.11410038757324219, 0.11405516815185547, 0.11354425811767578, 0.11357484436035156, 0.11477196502685547, 0.11390557098388672, 0.11371536254882812, 0.11431314849853516, 0.11358822631835938, 0.11482105255126954, 0.11454182434082032, 0.11512710571289063, 0.11536589050292968, 0.11417804718017578, 0.11394380950927735, 0.11422783660888672, 0.11432265472412109, 0.11411459350585937, 0.11533372497558594, 0.11429097747802734, 0.11399987030029297, 0.11380941009521485, 0.114155517578125, 0.11400601959228515, 0.11425555419921875, 0.11405343627929687, 0.11431334686279297, 0.11516886138916016, 0.11668009948730469, 0.11490188598632813, 0.11609088134765624, 0.1147014389038086, 0.11452057647705079, 0.11448553466796875, 0.11472496032714843, 0.11645951843261719, 0.11510928344726562, 0.11444652557373047, 0.11405977630615234, 0.11392604827880859, 0.11371238708496094, 0.11477273559570313, 0.11495184326171876, 0.11418863677978516, 0.1145059814453125, 0.11432665252685546, 0.11379776000976563, 0.11726563262939453, 0.11398348999023437, 0.11402854156494141, 0.11412073516845703, 0.11404898834228516, 0.1141673583984375, 0.11428076934814453, 0.11457753753662109, 0.11384025573730469, 0.11399491119384765, 0.11398422241210937, 0.11455104064941406, 0.1142118682861328, 0.11505967712402344, 0.11351628875732422, 0.1151562271118164, 0.11372374725341797, 0.11381990051269532, 0.1142232666015625, 0.11379420471191407, 0.11432637023925782, 0.11382514953613282, 0.11532713317871093, 0.11530899047851563, 0.1155338592529297, 0.11820188903808594, 0.11697792053222657, 0.11570607757568359, 0.11646771240234376, 0.11440716552734374, 0.11382195281982421, 0.11461837005615234, 0.11767990112304688, 0.11560982513427734, 0.11445833587646484, 0.11370320129394532, 0.1129489288330078, 0.1135248031616211, 0.11393373107910157, 0.11373465728759766, 0.11358963012695313, 0.11765340423583984, 0.11373734283447266, 0.11354032135009766, 0.1140277099609375, 0.11342848205566407, 0.11453308868408203, 0.11363426971435547, 0.11854524993896484, 0.11475558471679688, 0.11483036804199219, 0.11464182281494141, 0.11445180511474609, 0.11426806640625, 0.11392819213867188, 0.11404370880126953, 0.11416950225830078, 0.11403298950195312, 0.11410390472412109, 0.11328710174560547, 0.11368086242675782, 0.11350220489501953, 0.11311923217773437, 0.11339571380615235]",tokens/s,8.726897400292028,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,26318.962688,13989.96992,0.0,13587.447808,13583.186432,s,1,55.34729296875,55.34729296875,0.0,55.34729296875,55.34729296875,55.34729296875,55.34729296875,[55.34729296875],,kWh,0.0013920920720000216,0.0001535511198404562,0.0004511117497780076,0.0019967549416184853,,MB,1345.155072,14386.331648,0.0,13962.838016,13923.483136,s,10,1.9097643890380858,0.19097643890380858,0.0005021179267498162,0.19099681854248046,0.19163448181152345,0.19166204376220702,0.1916840933227539,"[0.1913205108642578, 0.19074166870117187, 0.19168960571289062, 0.19035565185546874, 0.19025669860839844, 0.19131721496582033, 0.1904647979736328, 0.19125196838378905, 0.19162835693359376, 0.1907379150390625]",tokens/s,1340.4794930171602,kWh,5.646703023477182e-06,6.227286364467271e-07,3.7481867592306758e-06,1.0017618419154586e-05,tokens/kWh,25554976.17183192,MB,1382.252544,14390.525952,0.0,13964.935168,13852.182528,s,10,94.3077041015625,9.43077041015625,0.046619780699759285,9.42221435546875,9.50648251953125,9.514190966796875,9.520357724609374,"[9.4504501953125, 9.50476953125, 9.4047763671875, 9.38260546875, 9.4242626953125, 9.39108984375, 9.3774677734375, 9.5218994140625, 9.420166015625, 9.430216796875]",tokens/s,6.680260176003608,kWh,0.0002797728901386045,3.086052498503738e-05,0.00013952623341576846,0.0004501596485394103,tokens/kWh,139950.3491803631,,s,630,94.30469683837894,0.1496899949815538,0.0017965715031097007,0.14918150329589844,0.15230074462890625,0.15333840866088866,0.15517699142456054,"[0.15041740417480468, 0.14939251708984375, 0.14874057006835936, 0.14951260375976563, 0.1500806427001953, 0.14955955505371094, 0.14875433349609374, 0.14912937927246095, 0.14861158752441406, 0.14903205871582031, 0.14917318725585937, 0.15006924438476563, 0.14937408447265624, 0.14923660278320314, 0.1492684783935547, 0.14939292907714843, 0.14995411682128906, 0.15037327575683593, 0.15323280334472655, 0.15014274597167968, 0.14947164916992187, 0.1517935028076172, 0.14983123779296875, 0.15084402465820312, 0.15091139221191407, 0.15085154724121094, 0.15062864685058594, 0.14987583923339845, 0.14961929321289064, 0.15047853088378907, 0.15106297302246094, 0.14904415893554687, 0.1490771484375, 0.15087799072265626, 0.1490186309814453, 0.1543004150390625, 0.15008892822265624, 0.14938192749023438, 0.1495650634765625, 0.149604736328125, 0.14898371887207032, 0.14955078125, 0.14969509887695312, 0.14934608459472656, 0.1495920715332031, 0.15251580810546875, 0.14998565673828124, 0.14994473266601563, 0.14926438903808595, 0.14896563720703124, 0.1492618865966797, 0.14929037475585938, 0.15378720092773437, 0.1496821746826172, 0.15001190185546875, 0.14892031860351562, 0.14966989135742187, 0.1516820526123047, 0.14964752197265624, 0.1498675231933594, 0.150069091796875, 0.15037644958496094, 0.1502445068359375, 0.15060694885253906, 0.15039170837402344, 0.15003033447265626, 0.15060557556152343, 0.15013504028320312, 0.15048696899414063, 0.1493023681640625, 0.15017453002929687, 0.14941110229492188, 0.14986668395996094, 0.14960514831542968, 0.15005305480957032, 0.15386361694335937, 0.14994256591796876, 0.1499149475097656, 0.15000674438476563, 0.1501931915283203, 0.1500433349609375, 0.15010768127441407, 0.15045628356933594, 0.1551856689453125, 0.1521210632324219, 0.15109353637695314, 0.15050143432617188, 0.1504808349609375, 0.15113626098632812, 0.15174656677246093, 0.1510214080810547, 0.15026713562011718, 0.14986550903320311, 0.15153958129882814, 0.1504337921142578, 0.14937702941894532, 0.1491735382080078, 0.14949591064453124, 0.1494857940673828, 0.1490925750732422, 0.15398316955566407, 0.15135130310058595, 0.1503787841796875, 0.15001324462890625, 0.1517161560058594, 0.1507552032470703, 0.15109523010253906, 0.1519598388671875, 0.15120384216308594, 0.14991944885253905, 0.14999288940429686, 0.15097666931152343, 0.15158546447753907, 0.15017575073242187, 0.15130802917480468, 0.1499866485595703, 0.14971177673339844, 0.1532579803466797, 0.150138427734375, 0.1497788848876953, 0.15086181640625, 0.15266610717773438, 0.15689523315429688, 0.15189558410644532, 0.15316015625, 0.1524715576171875, 0.1510941162109375, 0.15482266235351563, 0.1506376953125, 0.1497254333496094, 0.15248194885253907, 0.14907235717773437, 0.15015525817871095, 0.15190328979492188, 0.1490334014892578, 0.14889216613769532, 0.1498726348876953, 0.14942991638183595, 0.14896275329589845, 0.1483555908203125, 0.14955081176757812, 0.14849095153808595, 0.14852301025390624, 0.15027818298339843, 0.14829791259765626, 0.14873068237304687, 0.14923849487304688, 0.1487423095703125, 0.1507392578125, 0.14890570068359374, 0.14887327575683593, 0.14838169860839845, 0.1487984619140625, 0.14974429321289062, 0.14959767150878905, 0.14959910583496094, 0.15051593017578124, 0.15094761657714845, 0.149391357421875, 0.1484226531982422, 0.14891007995605468, 0.14793093872070312, 0.14807423400878905, 0.14794181823730468, 0.14851388549804687, 0.14835804748535156, 0.15091506958007814, 0.14911488342285156, 0.14819071960449218, 0.14852125549316406, 0.14939773559570313, 0.14909849548339843, 0.1486103973388672, 0.14780482482910157, 0.14841856384277344, 0.14761734008789062, 0.1478125762939453, 0.14985455322265626, 0.14848316955566407, 0.14846389770507812, 0.14870579528808595, 0.14898381042480469, 0.1494466552734375, 0.14943763732910156, 0.14858912658691406, 0.14864604187011718, 0.14789804077148438, 0.14887977600097657, 0.1516561279296875, 0.1496655731201172, 0.14868505859375, 0.1480625, 0.1486988525390625, 0.14800028991699218, 0.14897200012207032, 0.14861094665527344, 0.14813555908203124, 0.14856448364257813, 0.14798233032226563, 0.15117744445800782, 0.14847059631347656, 0.14839088439941406, 0.14754591369628905, 0.1476345977783203, 0.14796368408203125, 0.14781414794921874, 0.14772454833984375, 0.14810316467285156, 0.14836524963378905, 0.14785667419433593, 0.14779244995117188, 0.1477941436767578, 0.14768333435058595, 0.14849749755859376, 0.14988995361328125, 0.1483714599609375, 0.14785267639160157, 0.14786653137207031, 0.14818415832519533, 0.14851347351074218, 0.148712890625, 0.1478428192138672, 0.14913731384277343, 0.14876255798339844, 0.1485926971435547, 0.1476862335205078, 0.14912106323242189, 0.14858236694335938, 0.14831027221679688, 0.14859852600097656, 0.15301632690429687, 0.14877839660644532, 0.1514727325439453, 0.14919708251953126, 0.1493335418701172, 0.14878851318359376, 0.14892095947265624, 0.1486359405517578, 0.1491455993652344, 0.14884658813476562, 0.15128521728515626, 0.15332406616210936, 0.15029231262207032, 0.15020252990722657, 0.15007907104492188, 0.14929580688476562, 0.14931529235839844, 0.14877626037597655, 0.15185171508789064, 0.15024517822265626, 0.14805625915527343, 0.14923365783691406, 0.15221087646484374, 0.15082902526855468, 0.15045228576660155, 0.15085804748535156, 0.15015097045898437, 0.14855197143554688, 0.14993177795410156, 0.1493853759765625, 0.14894490051269532, 0.14928691101074218, 0.14920109558105468, 0.14848912048339843, 0.1486160888671875, 0.14872781372070312, 0.1491394805908203, 0.14923158264160155, 0.14847386169433593, 0.15228060913085936, 0.14951881408691406, 0.14923301696777344, 0.14949189758300782, 0.14934422302246095, 0.1498506622314453, 0.14920008850097657, 0.1496685791015625, 0.14902476501464842, 0.1494295654296875, 0.14864837646484375, 0.1535226287841797, 0.14906553649902343, 0.14930645751953125, 0.14958070373535157, 0.14907955932617187, 0.14893894958496093, 0.14965997314453125, 0.14938931274414063, 0.14938050842285155, 0.14886349487304687, 0.14933955383300782, 0.14934684753417968, 0.14889385986328124, 0.1485908203125, 0.1489261474609375, 0.1480602569580078, 0.14891148376464844, 0.14863145446777343, 0.151737060546875, 0.14898162841796875, 0.14944677734375, 0.1498460235595703, 0.14962483215332031, 0.1498533172607422, 0.14966233825683595, 0.14969882202148438, 0.14878924560546875, 0.1487433624267578, 0.14994105529785157, 0.14998527526855468, 0.15072373962402344, 0.14935494995117188, 0.15007785034179688, 0.14954290771484374, 0.15229951477050782, 0.15072796630859375, 0.1495879364013672, 0.150682373046875, 0.1521214141845703, 0.14877920532226563, 0.14813487243652343, 0.14832109069824218, 0.1496697540283203, 0.14981333923339843, 0.14996791076660157, 0.14918345642089845, 0.14900633239746094, 0.14933197021484376, 0.14916812133789062, 0.15110543823242187, 0.1497170867919922, 0.15309773254394532, 0.15037869262695314, 0.14931292724609374, 0.14853007507324217, 0.14913270568847656, 0.1482225341796875, 0.14865000915527343, 0.14803762817382812, 0.14819941711425783, 0.14789018249511718, 0.14854045104980468, 0.14840066528320311, 0.14882450866699218, 0.148527099609375, 0.15026077270507812, 0.14838636779785155, 0.148550048828125, 0.14846322631835937, 0.14829708862304689, 0.14817587280273437, 0.14878924560546875, 0.14842469787597656, 0.1489574432373047, 0.148627197265625, 0.14907391357421876, 0.1483120574951172, 0.1482629089355469, 0.1479923858642578, 0.14774092102050781, 0.14795919799804688, 0.14772662353515625, 0.14799618530273437, 0.14859494018554686, 0.1522073974609375, 0.15031100463867186, 0.14871183776855468, 0.1486663055419922, 0.14873394775390625, 0.148748291015625, 0.1486499786376953, 0.148756103515625, 0.1487640380859375, 0.14851353454589844, 0.14912947082519531, 0.14991346740722655, 0.148671875, 0.1493672332763672, 0.15362057495117187, 0.14916188049316406, 0.14910237121582032, 0.15312042236328124, 0.15428256225585937, 0.15253855895996093, 0.15241830444335938, 0.14953939819335937, 0.14887554931640626, 0.1486719055175781, 0.14860115051269532, 0.14898611450195312, 0.1485452423095703, 0.14884048461914062, 0.14841651916503906, 0.14832783508300781, 0.1483495330810547, 0.14793113708496095, 0.14746316528320313, 0.14734617614746093, 0.14794326782226563, 0.14741909790039062, 0.14732316589355468, 0.14738243103027343, 0.148068359375, 0.14776524353027343, 0.14774041748046876, 0.14831245422363282, 0.14747430419921875, 0.14899842834472657, 0.14846266174316405, 0.14804261779785155, 0.14832211303710938, 0.1525882568359375, 0.14914274597167967, 0.14753257751464843, 0.147967041015625, 0.14799961853027344, 0.14832582092285157, 0.14746896362304687, 0.14701295471191406, 0.1493546600341797, 0.14927679443359376, 0.14900869750976561, 0.1483873291015625, 0.14896127319335936, 0.14878703308105468, 0.14828851318359376, 0.14846124267578126, 0.14751539611816405, 0.14768537902832032, 0.14774386596679687, 0.15487257385253905, 0.14752496337890625, 0.14805599975585937, 0.14860374450683594, 0.14745523071289063, 0.14799334716796875, 0.14809088134765624, 0.14885682678222656, 0.14851481628417967, 0.14895071411132813, 0.14934764099121095, 0.150212890625, 0.14880752563476562, 0.1482875213623047, 0.1506612548828125, 0.14926229858398438, 0.14812982177734374, 0.14818304443359376, 0.14763555908203124, 0.14810179138183593, 0.1514477081298828, 0.14813577270507813, 0.1487608642578125, 0.14852479553222656, 0.147900390625, 0.1484206085205078, 0.14845106506347655, 0.14819354248046876, 0.14777548217773437, 0.1522423095703125, 0.1521642303466797, 0.15164950561523438, 0.15309901428222655, 0.15420623779296874, 0.1561631622314453, 0.15507664489746092, 0.15294256591796876, 0.15263221740722657, 0.1530224609375, 0.15282290649414063, 0.15211126708984374, 0.1522650604248047, 0.1525518035888672, 0.1528115234375, 0.15181805419921876, 0.15232838439941407, 0.15288525390625, 0.15628492736816407, 0.1533501434326172, 0.15231181335449218, 0.15267634582519532, 0.1521131591796875, 0.15292413330078125, 0.15203535461425782, 0.15328604125976564, 0.15204739379882812, 0.15364524841308594, 0.15366549682617187, 0.1566091766357422, 0.15393954467773438, 0.1519740447998047, 0.15225881958007811, 0.14916812133789062, 0.14988003540039063, 0.14891725158691407, 0.15192182922363281, 0.1481381072998047, 0.14821820068359376, 0.15096588134765626, 0.14909904479980468, 0.14982345581054687, 0.14924761962890626, 0.1485478057861328, 0.14886521911621095, 0.15184527587890626, 0.149544677734375, 0.14953868103027343, 0.14910067749023437, 0.14819033813476562, 0.15044284057617188, 0.14834902954101561, 0.14842681884765624, 0.14929049682617188, 0.14915008544921876, 0.15014198303222656, 0.15126217651367188, 0.15003648376464843, 0.1491573181152344, 0.14948101806640626, 0.14944937133789063, 0.1496763916015625, 0.15013478088378907, 0.14847795104980469, 0.14944869995117188, 0.1542800598144531, 0.15011007690429687, 0.149027099609375, 0.15328215026855468, 0.1494652099609375, 0.15017984008789062, 0.14879446411132813, 0.14917523193359375, 0.14855165100097656, 0.1497764434814453, 0.1510214080810547, 0.14893417358398436, 0.14858476257324219, 0.1503541717529297, 0.14921728515625, 0.1488297576904297, 0.1536324462890625, 0.14905596923828124, 0.15110086059570313, 0.14883106994628906, 0.1480925750732422, 0.14815676879882814, 0.14769357299804686, 0.1542615966796875, 0.14851437377929688, 0.14810258483886718, 0.1484866943359375, 0.14865606689453126, 0.14950387573242188, 0.14888960266113282, 0.15368978881835937, 0.14805657958984375, 0.14774432373046875, 0.14801295471191406, 0.14758390808105468, 0.14813372802734376, 0.14806822204589845, 0.15583807373046876, 0.1482472381591797, 0.14767411804199218, 0.1475161590576172, 0.1480972442626953, 0.14748854064941405, 0.15515574645996094, 0.14818803405761719, 0.14882611083984376, 0.14925628662109375, 0.14799241638183594, 0.1482520294189453, 0.14866275024414063, 0.15346697998046874, 0.15272169494628907, 0.1483221435546875, 0.14803353881835937, 0.14779318237304687, 0.14793714904785157, 0.15165965270996093, 0.15302201843261717, 0.148133056640625, 0.14851376342773437, 0.14847129821777344, 0.1484927978515625, 0.14870758056640626, 0.14894985961914062, 0.15428495788574217, 0.15027200317382813, 0.14938726806640626, 0.1496514587402344, 0.14917955017089843, 0.14946800231933594, 0.1486018829345703, 0.15311279296875, 0.14849920654296875, 0.14839811706542969, 0.14782051086425782, 0.14803558349609375, 0.14828944396972657, 0.14871498107910155, 0.15382797241210938, 0.14838902282714844, 0.148568359375, 0.14821434020996094, 0.1481338806152344, 0.15136505126953126, 0.15209516906738282, 0.15090290832519532, 0.1480396728515625, 0.1479905242919922, 0.14781234741210938, 0.14813909912109374, 0.14807504272460936, 0.1534730224609375, 0.14915420532226562, 0.14818038940429687, 0.14867308044433594, 0.1484206085205078, 0.14837954711914061, 0.14877705383300782, 0.1619058837890625, 0.14793603515625, 0.14815641784667968, 0.14785133361816405, 0.1512099151611328, 0.14869696044921876, 0.1489058837890625, 0.15437846374511718]",tokens/s,6.68047320145364,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,,MB,7490.351104,3372.089344,0.0,3024.093184,2936.943616,s,1,18.162431640625,18.162431640625,0.0,18.162431640625,18.162431640625,18.162431640625,18.162431640625,[18.162431640625],,kWh,0.00029560506117917,3.260017629614727e-05,9.87136900820057e-05,0.000426918927557323,,MB,4145.614848,3439.198208,0.0,3015.704576,3002.771456,s,10,1.003055046081543,0.10030550460815431,0.003330673186858748,0.09915217590332032,0.10150487823486327,0.1058387580871582,0.10930586196899415,"[0.09956095886230469, 0.09891613006591797, 0.09905382537841798, 0.10054179382324219, 0.09889997100830078, 0.09918758392333984, 0.099197021484375, 0.09911676788330079, 0.09840835571289062, 0.11017263793945313]",tokens/s,2552.2029025233433,kWh,2.901213017037995e-06,3.1995288087221023e-07,1.6526388358614999e-06,4.8738047337717046e-06,tokens/kWh,52525698.91159521,MB,4149.743616,3585.998848,0.0,3162.505216,3079.844864,s,10,60.11472705078125,6.011472705078125,0.01110856852859563,6.0147770996093755,6.02284208984375,6.024539697265625,6.025897783203125,"[6.0206083984375, 6.0155537109375, 6.01917041015625, 6.0262373046875, 5.99285888671875, 5.99465625, 6.0029501953125, 6.02246484375, 6.0062265625, 6.01400048828125]",tokens/s,10.479961082877653,kWh,0.000175370873971295,1.934409130904697e-05,6.995297400413906e-05,0.000264667939284481,tokens/kWh,238034.1199252087,,s,630,60.10912327575682,0.0954113067869156,0.0010658056268329282,0.0951659812927246,0.0963664047241211,0.09752816848754883,0.10005681846618653,"[0.09531801605224609, 0.09493497467041015, 0.09473987579345704, 0.09523017883300781, 0.09598973083496094, 0.0948513946533203, 0.09604841613769531, 0.0957328643798828, 0.09474201965332031, 0.09576678466796874, 0.09576758575439454, 0.09647209930419921, 0.09537075042724609, 0.09583980560302735, 0.09518524932861328, 0.09655350494384765, 0.09574748992919922, 0.09727855682373047, 0.09564569854736328, 0.09521561431884766, 0.09493913269042968, 0.09597481536865235, 0.09470217895507813, 0.09484233856201171, 0.09759737396240234, 0.09475132751464843, 0.09619251251220703, 0.09603072357177735, 0.09622732543945313, 0.09557933044433593, 0.09608889770507813, 0.09564979553222656, 0.09591321563720703, 0.09489692687988281, 0.09564157104492188, 0.09532358551025391, 0.09535321807861329, 0.09674745941162109, 0.09672525024414062, 0.09546300506591797, 0.09594239807128906, 0.09510569763183593, 0.09544060516357422, 0.09546575927734376, 0.09505792236328126, 0.09595494079589843, 0.09498726654052735, 0.0959816665649414, 0.09544796752929688, 0.09514189147949219, 0.09503734588623047, 0.0951153564453125, 0.09495967864990235, 0.09486255645751954, 0.09580934143066407, 0.0951223373413086, 0.09522994995117187, 0.0953318099975586, 0.09506665802001953, 0.0966328353881836, 0.09514713287353516, 0.09498508453369141, 0.09526608276367188, 0.09501910400390624, 0.09555398559570312, 0.09545913696289063, 0.09529363250732421, 0.09555353546142578, 0.09556729888916016, 0.09506626892089844, 0.09526313781738281, 0.09523145294189453, 0.09497270202636719, 0.09495321655273438, 0.09540985870361328, 0.09537977600097657, 0.09555254364013673, 0.09774156951904298, 0.09549014282226563, 0.09543283081054688, 0.09529129791259766, 0.09801078033447265, 0.09683808135986328, 0.09496361541748047, 0.09502934265136719, 0.09534873962402343, 0.09539379119873047, 0.09494322967529296, 0.09566617584228515, 0.09554329681396484, 0.09529714965820313, 0.09459731292724609, 0.09581382751464844, 0.0946439666748047, 0.09504518127441407, 0.09472860717773438, 0.09768169403076171, 0.09472614288330078, 0.09488764953613281, 0.09442947387695312, 0.0952995834350586, 0.10008370971679688, 0.09457571411132812, 0.09496669006347656, 0.09496575927734376, 0.09454796600341797, 0.09512754821777344, 0.09548390197753906, 0.09483990478515625, 0.09660848236083984, 0.09571398162841797, 0.09497721862792968, 0.09518367767333985, 0.09496918487548828, 0.09491487884521485, 0.09474288177490234, 0.09528524780273437, 0.09605734252929687, 0.09721040344238281, 0.09533436584472656, 0.09528934478759765, 0.09553305816650391, 0.09544703674316406, 0.09521151733398438, 0.09566413116455078, 0.09526067352294922, 0.09563340759277343, 0.09538915252685547, 0.09552281951904297, 0.09544505310058594, 0.0956154556274414, 0.09547917175292969, 0.09513638305664063, 0.09560835266113281, 0.09530115509033203, 0.09558930969238282, 0.09536505889892578, 0.09564371490478515, 0.09568988800048828, 0.09738735961914062, 0.09605734252929687, 0.09581104278564453, 0.09514361572265626, 0.09545107269287109, 0.09554771423339843, 0.09487417602539062, 0.09468009948730469, 0.09539212799072265, 0.09500096130371094, 0.09537888336181641, 0.09610460662841797, 0.09601910400390624, 0.09546956634521485, 0.09556969451904297, 0.09549142456054688, 0.09494207763671875, 0.09542790222167968, 0.09507705688476563, 0.09569280242919923, 0.09546546936035157, 0.09590169525146484, 0.09590386962890625, 0.09521724700927735, 0.09545756530761719, 0.09527516937255859, 0.09495945739746094, 0.0952558364868164, 0.09462448120117188, 0.0953133773803711, 0.09517062377929687, 0.09516844940185547, 0.0953013153076172, 0.09499734497070313, 0.09543475341796875, 0.09536102294921875, 0.09517465972900391, 0.09476914978027344, 0.09578291320800782, 0.09557823944091796, 0.09531378936767577, 0.09541404724121094, 0.09923811340332031, 0.09708073425292969, 0.09593711853027344, 0.09516767883300781, 0.09718605041503907, 0.09511174774169921, 0.09495552062988281, 0.09525043487548829, 0.0951319351196289, 0.09571536254882812, 0.09533261108398437, 0.09576188659667968, 0.09540201568603515, 0.09504000091552735, 0.09556307220458984, 0.09535536193847656, 0.09638934326171875, 0.0958292465209961, 0.09817779541015625, 0.09588317108154297, 0.09602256011962891, 0.09649088287353516, 0.09688748931884765, 0.0955146255493164, 0.09555315399169922, 0.09536345672607421, 0.09523715209960938, 0.09508541107177734, 0.09622345733642577, 0.095567138671875, 0.09675603485107422, 0.09751347351074219, 0.09642329406738281, 0.09541673278808593, 0.09554179382324218, 0.09584844970703126, 0.09514598083496094, 0.0950203857421875, 0.09511360168457031, 0.09554351806640625, 0.09538742065429688, 0.09573948669433593, 0.09619526672363281, 0.09596479797363282, 0.09506473541259766, 0.09570275115966796, 0.09554329681396484, 0.09524160003662109, 0.09489788818359375, 0.0962278060913086, 0.09536761474609375, 0.09517257690429687, 0.09514595031738281, 0.09517062377929687, 0.09536921691894532, 0.09498009490966797, 0.0950087661743164, 0.09520127868652344, 0.09509670257568359, 0.09516435241699218, 0.09535302734375, 0.09493504333496093, 0.09797529602050781, 0.095710205078125, 0.09473638153076172, 0.09668608093261719, 0.09554534149169921, 0.09584432220458984, 0.09513977813720703, 0.09486140441894532, 0.09550847625732421, 0.09570912170410156, 0.09512185668945312, 0.09517177581787109, 0.09517686462402344, 0.09509750366210938, 0.09819446563720703, 0.09455916595458984, 0.09487471771240234, 0.09535174560546875, 0.0952278060913086, 0.09458819580078125, 0.09474265289306641, 0.09653523254394532, 0.09501286315917969, 0.09470921325683594, 0.09481836700439453, 0.0945887680053711, 0.09440835571289062, 0.09454300689697266, 0.0953250274658203, 0.09484796905517578, 0.0951800308227539, 0.09489389038085938, 0.0943543701171875, 0.09472819519042969, 0.09477043151855469, 0.09434803009033203, 0.09513990020751953, 0.09429395294189453, 0.09452134704589844, 0.09548185729980468, 0.09566108703613281, 0.10011443328857422, 0.09604195404052734, 0.09477279663085937, 0.09519366455078125, 0.09732492828369141, 0.09501081848144531, 0.09462560272216797, 0.09631763458251953, 0.09457868957519532, 0.09501696014404297, 0.09477091217041016, 0.09446018981933593, 0.09473228454589844, 0.09451014709472656, 0.09430316925048828, 0.09516441345214843, 0.094814208984375, 0.09434086608886719, 0.09504179382324218, 0.09655500793457031, 0.09458688354492187, 0.09409513854980468, 0.0951789779663086, 0.09607497406005859, 0.09442755126953126, 0.0946794204711914, 0.09503453063964844, 0.09454009246826171, 0.09428787231445312, 0.09502304077148438, 0.09482265472412109, 0.09539993286132813, 0.094885986328125, 0.09506396484375, 0.0951880340576172, 0.09481712341308594, 0.09519631958007813, 0.09447110748291015, 0.09527295684814453, 0.09527455902099609, 0.09503942108154297, 0.09489663696289062, 0.09516754913330078, 0.0948725128173828, 0.09484083557128906, 0.09458595275878906, 0.09455094146728515, 0.09504496002197266, 0.09464080047607422, 0.0955699234008789, 0.09479481506347656, 0.094831298828125, 0.09455232238769531, 0.09480806732177735, 0.09530691528320312, 0.09470169830322266, 0.09444425964355468, 0.09529753875732422, 0.09472000122070312, 0.09498194885253906, 0.09459117126464844, 0.09463516998291016, 0.09653129577636718, 0.09523200225830078, 0.0962960662841797, 0.09484786987304687, 0.09505382537841797, 0.09480397033691407, 0.09541017913818359, 0.09526886749267578, 0.09534361267089844, 0.0951363525390625, 0.09515254211425782, 0.09542451477050781, 0.09439231872558594, 0.09477120208740235, 0.09475628662109375, 0.0944830093383789, 0.09537875366210938, 0.0947504653930664, 0.09484989166259766, 0.09499222564697266, 0.09538175964355469, 0.09550028991699219, 0.0957624282836914, 0.09530368041992188, 0.095006591796875, 0.09476668548583984, 0.09510966491699219, 0.09850265502929688, 0.0957501449584961, 0.09721183776855469, 0.0953144302368164, 0.09525254058837891, 0.09479167938232422, 0.0950753631591797, 0.09522598266601562, 0.09475084686279298, 0.09484349060058593, 0.09480614471435547, 0.09539302062988281, 0.09457119750976563, 0.0948919677734375, 0.09755648040771485, 0.09636876678466796, 0.09534464263916016, 0.09475389099121094, 0.09601116943359375, 0.09485024261474609, 0.09498041534423828, 0.09601216125488281, 0.09522444915771484, 0.09509478759765624, 0.09493500518798828, 0.09459305572509766, 0.09472819519042969, 0.09494127655029297, 0.09469123077392579, 0.09448242950439453, 0.09469337463378906, 0.09489612579345703, 0.09561251068115234, 0.09778422546386718, 0.09504153442382812, 0.09478086090087891, 0.09510559844970704, 0.09625190734863281, 0.09632972717285156, 0.09646694183349609, 0.09475849914550781, 0.09521987152099609, 0.09470182037353515, 0.09542041778564453, 0.09564495849609375, 0.09511599731445312, 0.09636614227294922, 0.09552508544921876, 0.09487545776367187, 0.09489449310302735, 0.09485667419433594, 0.09503183746337891, 0.0949288330078125, 0.09499037170410156, 0.09496482849121093, 0.09601273345947266, 0.0950248031616211, 0.09614415740966797, 0.09475055694580078, 0.09495724487304688, 0.09479222106933594, 0.09451929473876954, 0.09503539276123046, 0.0950149154663086, 0.09565184020996094, 0.09522176361083984, 0.09610841369628906, 0.09607590484619141, 0.09486553955078125, 0.09503724670410156, 0.09517763519287109, 0.0948674545288086, 0.09514598083496094, 0.09499033355712891, 0.09527910614013672, 0.09555689239501954, 0.09921558380126953, 0.09515878295898438, 0.09592422485351562, 0.09487359619140626, 0.09516022491455078, 0.09462572479248046, 0.09496377563476563, 0.09501824188232422, 0.09478230285644532, 0.09506752014160157, 0.09518962860107422, 0.09545430755615235, 0.09480592346191406, 0.0948111343383789, 0.09508659362792969, 0.09515424346923829, 0.09483257293701172, 0.09537248229980469, 0.09560147094726562, 0.09484204864501954, 0.0952287368774414, 0.09534239959716796, 0.09668739318847656, 0.0952492446899414, 0.09504959869384766, 0.09498643493652344, 0.09503129577636718, 0.0952074203491211, 0.095604736328125, 0.09538735961914062, 0.09552515411376954, 0.09538118743896484, 0.09627798461914062, 0.0957573471069336, 0.09543843078613282, 0.09502333068847656, 0.09485091400146485, 0.09486531066894531, 0.09597772979736328, 0.09592243194580079, 0.09550605010986328, 0.09567449951171875, 0.09613926696777343, 0.10292633819580078, 0.09523574066162109, 0.09553539276123046, 0.09755052947998047, 0.09510489654541016, 0.09658707427978516, 0.09617273712158203, 0.09553510284423829, 0.09479084777832031, 0.09483347320556641, 0.09971916961669922, 0.09505503845214844, 0.09481625366210937, 0.09465446472167968, 0.09469491577148438, 0.09491007995605469, 0.09744473266601562, 0.09503072357177735, 0.09460384368896485, 0.09478102111816407, 0.10202665710449219, 0.09482291412353516, 0.09453209686279297, 0.09448242950439453, 0.0942589111328125, 0.09524457550048829, 0.09495491027832031, 0.09455840301513672, 0.09507606506347656, 0.09454252624511719, 0.09779170989990234, 0.09925564575195313, 0.09464908599853515, 0.09440070343017579, 0.0944043197631836, 0.09440898895263672, 0.0943301773071289, 0.09481817626953125, 0.09452047729492187, 0.0940512924194336, 0.0947504653930664, 0.09533132934570313, 0.1000136947631836, 0.09462556457519532, 0.09502365112304688, 0.09491251373291015, 0.09481215667724609, 0.09541120147705077, 0.09455513763427735, 0.09463603210449219, 0.09492479705810547, 0.09437907409667969, 0.09498844909667968, 0.10032157135009766, 0.09445356750488282, 0.09420217895507813, 0.09460979461669922, 0.09519660949707032, 0.09498886108398437, 0.09508659362792969, 0.09528524780273437, 0.096110595703125, 0.09504332733154297, 0.09468339538574219, 0.09950179290771484, 0.09443949127197265, 0.09442326354980468, 0.09422643280029297, 0.0949043197631836, 0.09443302154541015, 0.0942266845703125, 0.09499177551269532, 0.09451171112060547, 0.09427558135986328, 0.09838998413085938, 0.095010498046875, 0.09469983673095703, 0.09504268646240234, 0.09530159759521484, 0.09465302276611329, 0.09443689727783203, 0.09517750549316406, 0.09500672149658203, 0.09450457763671875, 0.0998792953491211, 0.09467890930175782, 0.09465254211425782, 0.09421619415283203, 0.09479695892333985, 0.09486000061035156, 0.09523827362060547, 0.09511321258544922, 0.09452339172363282, 0.09424809265136719, 0.09438813018798828, 0.10007443237304688, 0.09428797149658204, 0.094467041015625, 0.09441916656494141, 0.09451126098632813, 0.09575917053222656, 0.09520626831054688, 0.09687948608398438, 0.09848934173583984, 0.09469849395751953, 0.09712950134277344, 0.09979917144775391, 0.09440137481689453, 0.09399488067626953, 0.09467209625244141, 0.09433385467529297, 0.0940579833984375, 0.09441331481933593, 0.09430556488037109, 0.09456304168701171, 0.09461145782470703, 0.09482444763183594, 0.09962496185302734, 0.09467903900146485, 0.09453510284423829, 0.09754019165039063, 0.09553167724609375, 0.09857769775390625, 0.09632339477539062, 0.0948532485961914, 0.09514454650878906, 0.0955902099609375, 0.10042182159423828, 0.09465446472167968, 0.09411174774169923, 0.09501286315917969, 0.09505382537841797, 0.09501273345947266, 0.09616169738769531, 0.095301025390625, 0.09466102600097656, 0.09541878509521484, 0.0949710693359375]",tokens/s,10.480938095034421,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,3971.54304,2050.883584,0.0,1648.361472,1630.36416,s,1,12.38266015625,12.38266015625,0.0,12.38266015625,12.38266015625,12.38266015625,12.38266015625,[12.38266015625],,kWh,0.00015169681132085012,1.6720719403092892e-05,4.815670519200954e-05,0.00021657423591595255,,MB,2412.904448,2172.5184,0.0,1749.024768,1728.72192,s,10,0.7740492858886718,0.07740492858886719,0.0004153977936237488,0.07726408004760742,0.0779494369506836,0.0780817741394043,0.07818764389038087,"[0.07736585235595703, 0.07749523162841797, 0.07779430389404297, 0.078214111328125, 0.0770567398071289, 0.07792002868652344, 0.07716230773925781, 0.07698329925537109, 0.07698873901367187, 0.07706867218017578]",tokens/s,3307.282942662896,kWh,2.27631507322327e-06,2.5103850163224166e-07,1.2079187096590055e-06,3.735272284514517e-06,tokens/kWh,68535833.66902341,MB,2425.397248,2216.558592,0.0,1790.967808,1717.705216,s,10,48.8072763671875,4.88072763671875,0.021919991143565975,4.8780751953124994,4.908030712890625,4.914505834960938,4.919685932617187,"[4.88252001953125, 4.906591796875, 4.92098095703125, 4.8976708984375, 4.892908203125, 4.87363037109375, 4.85806591796875, 4.85659619140625, 4.85917529296875, 4.85913671875]",tokens/s,12.90791142001812,kWh,0.00014218308731594036,1.5683219823796507e-05,5.146190120393875e-05,0.00020932820834367562,tokens/kWh,300962.7823143952,,s,630,48.804846084594764,0.07746800965808687,0.0009023424453233514,0.07729206466674805,0.07831865768432617,0.07904382514953612,0.08110149559020997,"[0.07672172546386719, 0.07699030303955078, 0.07668592071533203, 0.0768511962890625, 0.07748812866210937, 0.07702732849121094, 0.07699005126953125, 0.07693968200683594, 0.07710105895996094, 0.07734674835205078, 0.07730198669433594, 0.07718867492675781, 0.07742041778564453, 0.077633056640625, 0.07741462707519531, 0.0771529312133789, 0.07696562957763672, 0.0770560302734375, 0.07691490936279297, 0.07718614196777343, 0.07726383972167969, 0.07777887725830078, 0.07825718688964843, 0.07831436920166016, 0.0772232666015625, 0.07716095733642578, 0.07745356750488282, 0.07736524963378906, 0.07707852935791015, 0.08479126739501953, 0.07762515258789063, 0.07777833557128906, 0.07807059478759766, 0.07779737854003907, 0.07754956817626953, 0.07728128051757813, 0.07744102478027344, 0.0769306869506836, 0.07709728240966797, 0.07748818969726562, 0.07694950103759765, 0.07707756805419921, 0.07705411529541016, 0.0770546875, 0.07762876892089844, 0.07770185852050782, 0.07766191864013672, 0.0776031036376953, 0.07814473724365234, 0.07804803466796875, 0.07746047973632812, 0.07761974334716797, 0.07769964599609375, 0.07738972473144531, 0.07707443237304687, 0.0770164794921875, 0.07704550170898437, 0.07733744049072265, 0.07759024047851562, 0.07743926239013672, 0.07747583770751953, 0.07801593780517578, 0.07806015777587891, 0.0790087661743164, 0.07731881713867188, 0.07798966217041016, 0.07750662231445313, 0.07736927795410156, 0.07761190032958984, 0.07736697387695313, 0.07811277008056641, 0.07761686706542968, 0.07767584228515625, 0.07795744323730469, 0.07794755554199219, 0.07755971527099609, 0.07800374603271484, 0.07757062530517578, 0.07801241302490235, 0.07779939270019531, 0.07756134033203126, 0.07779177856445313, 0.07764310455322265, 0.07744271850585938, 0.07738470458984376, 0.07793062591552734, 0.07815507507324218, 0.07777693176269532, 0.07759721374511719, 0.07790509033203125, 0.07750672149658203, 0.07786470031738281, 0.07813951873779297, 0.07886310577392579, 0.07917775726318359, 0.07865042877197266, 0.0778290557861328, 0.07773792266845703, 0.07771961975097656, 0.07874765014648437, 0.07810569763183593, 0.07814015960693359, 0.07820425415039063, 0.07783042907714843, 0.07940748596191406, 0.07814546966552735, 0.07761721801757812, 0.0773674545288086, 0.07737548828125, 0.0773038101196289, 0.0771534423828125, 0.07728009796142578, 0.07777040100097657, 0.07725910186767578, 0.07743260955810546, 0.07754895782470703, 0.07742924499511719, 0.07796153259277344, 0.0778608627319336, 0.07771135711669921, 0.07811257934570312, 0.07950672149658203, 0.07841603088378907, 0.07763590240478516, 0.07820252990722656, 0.07775660705566406, 0.07733395385742188, 0.07754621124267579, 0.07782806396484375, 0.0774268798828125, 0.07832559967041015, 0.07837696075439453, 0.07815974426269531, 0.07844467163085937, 0.07831673431396484, 0.0780068130493164, 0.07847555541992188, 0.07874765014648437, 0.07947058868408204, 0.07865753936767578, 0.07862067413330077, 0.0783073272705078, 0.07799327850341797, 0.07838380432128907, 0.07842124938964844, 0.07762406158447266, 0.07808204650878907, 0.07811676788330078, 0.07804431915283203, 0.0786192626953125, 0.07831788635253906, 0.07813734436035157, 0.07815782165527344, 0.07762124633789062, 0.07740211486816406, 0.08027718353271485, 0.0830794906616211, 0.07825730895996094, 0.07795798492431641, 0.07767040252685548, 0.0774103012084961, 0.07742655944824219, 0.07760294342041016, 0.079072509765625, 0.07733750152587891, 0.0772544937133789, 0.07917362976074219, 0.07950713348388672, 0.07874559783935547, 0.07803321838378906, 0.07819209289550781, 0.07798790740966798, 0.07691661071777343, 0.07693577575683594, 0.07796326446533203, 0.07819459533691406, 0.07801042938232422, 0.07772150421142578, 0.07708070373535156, 0.07729097747802735, 0.07695999908447265, 0.07698384094238281, 0.07675129699707031, 0.0803076171875, 0.07781798553466797, 0.07717536163330078, 0.07707670593261719, 0.07771238708496093, 0.07781068420410156, 0.07802483367919921, 0.07739068603515625, 0.07770140838623046, 0.07737913513183593, 0.0773050537109375, 0.07737401580810546, 0.07759638214111328, 0.07859677124023437, 0.07791401672363281, 0.07742620849609375, 0.07781785583496094, 0.07761325073242188, 0.07777113342285157, 0.07838719940185547, 0.07818406677246094, 0.07809702301025391, 0.078255615234375, 0.07769728088378906, 0.07819058990478515, 0.07845606231689453, 0.07823027038574219, 0.07767056274414062, 0.07836188507080079, 0.0781379165649414, 0.0780574722290039, 0.07842012786865235, 0.07824777221679688, 0.07829910278320312, 0.07798377227783203, 0.07787315368652344, 0.08071987152099609, 0.08194048309326171, 0.07911833953857422, 0.07814329528808593, 0.07762294769287109, 0.07742518615722656, 0.07795507049560547, 0.07700096130371094, 0.07700863647460937, 0.07770317077636718, 0.07693641662597656, 0.08019795227050781, 0.07731452941894532, 0.07665869140625, 0.07749632263183594, 0.07720063781738282, 0.07734137725830079, 0.07707654571533203, 0.07724221038818359, 0.07732649230957031, 0.07709254455566407, 0.0771259536743164, 0.07695359802246093, 0.07648188781738281, 0.07670579528808594, 0.07644226837158204, 0.07661481475830079, 0.07650415802001953, 0.0773834228515625, 0.07671807861328125, 0.07739801788330078, 0.07699842834472656, 0.07717295837402344, 0.07694502258300781, 0.07737753295898438, 0.07667145538330078, 0.07683318328857422, 0.0766746597290039, 0.07776000213623047, 0.0770323486328125, 0.07709081268310547, 0.07662963104248047, 0.077046142578125, 0.076906494140625, 0.07675885009765625, 0.0767544937133789, 0.07698905944824219, 0.07842809295654297, 0.07716767883300782, 0.07673753356933594, 0.07664383697509766, 0.07664486694335937, 0.07678157043457032, 0.07824713897705078, 0.07697830200195313, 0.07695609283447266, 0.07766242980957032, 0.0775633316040039, 0.07720178985595703, 0.0779306869506836, 0.07788246154785156, 0.07743990325927734, 0.07744102478027344, 0.07949440002441406, 0.08133817291259765, 0.07829395294189453, 0.07837628936767578, 0.07809827423095703, 0.0778551025390625, 0.07811122894287109, 0.07796717071533203, 0.07809574127197266, 0.07830528259277343, 0.07808035278320312, 0.07847103881835937, 0.0779876480102539, 0.07780556488037109, 0.07763017272949219, 0.07801219177246094, 0.07894979095458984, 0.07754163360595703, 0.07767916870117188, 0.07824998474121093, 0.07776416015625, 0.07783190155029297, 0.07841574096679688, 0.07769789123535156, 0.07755689239501953, 0.07748707580566407, 0.07731391906738282, 0.07743475341796875, 0.07745753479003906, 0.0777706527709961, 0.07776470184326172, 0.0782576675415039, 0.07823206329345703, 0.07778262329101562, 0.07949737548828124, 0.07774153900146484, 0.07769558715820313, 0.07736956787109375, 0.07747789001464844, 0.07740815734863281, 0.07872726440429688, 0.07752044677734375, 0.07763398742675781, 0.07907328033447265, 0.07916738891601563, 0.07839334106445313, 0.07803263854980469, 0.07787551879882812, 0.07827049255371094, 0.07685075378417969, 0.07674463653564453, 0.07693363189697265, 0.07713587188720702, 0.07674278259277344, 0.07696575927734375, 0.0764986572265625, 0.07694159698486328, 0.07736319732666015, 0.07700889587402343, 0.07681606292724609, 0.07702559661865234, 0.07686985778808594, 0.07665641784667969, 0.07633920288085938, 0.07867711639404297, 0.08023683166503906, 0.07826902770996094, 0.07727536010742188, 0.07688169860839844, 0.0769269790649414, 0.07719232177734375, 0.07690287780761719, 0.07709523010253906, 0.07705404663085938, 0.07804108428955078, 0.07943730926513672, 0.0774966049194336, 0.07676137542724609, 0.07697606658935546, 0.07671017456054688, 0.07681100463867188, 0.07636688232421875, 0.0766258544921875, 0.07656243133544922, 0.07661727905273437, 0.07701753234863282, 0.07697612762451173, 0.07663410949707031, 0.07625894165039063, 0.07638668823242188, 0.07643135833740235, 0.07642012786865235, 0.0769730224609375, 0.07966515350341796, 0.07668511962890626, 0.07648480224609375, 0.07655558776855469, 0.07679385375976562, 0.07933203125, 0.07733468627929688, 0.0764823989868164, 0.07646150207519531, 0.07729315185546876, 0.07685628509521485, 0.07648159790039062, 0.07665660858154297, 0.07673136138916016, 0.07678959655761719, 0.07679606628417969, 0.0765810546875, 0.07662982177734375, 0.07783558654785157, 0.07751052856445312, 0.07728591918945313, 0.07678594970703125, 0.07665254211425782, 0.0765296630859375, 0.0768511962890625, 0.07677977752685547, 0.07648844909667969, 0.07640262603759766, 0.07724038696289062, 0.07671523284912109, 0.07667750549316406, 0.07708499145507812, 0.07738902282714843, 0.07646092987060547, 0.07986790466308594, 0.0816448974609375, 0.07943644714355469, 0.07730397033691407, 0.07722940826416015, 0.07692671966552735, 0.07773014068603516, 0.07672259521484374, 0.07669554901123046, 0.07673651123046875, 0.07696383666992188, 0.07745484924316406, 0.07711180877685547, 0.07705107116699218, 0.07654291534423828, 0.07703334045410157, 0.07709900665283204, 0.0772011489868164, 0.07697427368164063, 0.07720118713378907, 0.07697555541992188, 0.07662287902832031, 0.07696688079833984, 0.07665110778808594, 0.07723442840576172, 0.07689942169189454, 0.07680445098876953, 0.07657529449462891, 0.07700886535644531, 0.0765522232055664, 0.07721385955810547, 0.07697369384765625, 0.07697526550292969, 0.07699935913085937, 0.0773570556640625, 0.07681024169921875, 0.07730995178222656, 0.07705599975585938, 0.07677133178710938, 0.0767119369506836, 0.07668326568603516, 0.07698611450195313, 0.07706034851074219, 0.07672124481201172, 0.0768602523803711, 0.07674018859863281, 0.07679641723632813, 0.07707177734375, 0.07669014739990235, 0.07723811340332032, 0.07713174438476562, 0.07721372985839844, 0.07675414276123046, 0.07685199737548828, 0.07673571014404297, 0.07704450988769532, 0.07674374389648438, 0.07713702392578126, 0.07660431671142579, 0.07720819091796875, 0.07760514831542968, 0.07741667175292968, 0.07754524993896485, 0.07726070404052734, 0.07818659210205078, 0.08113692474365235, 0.07731478118896484, 0.077198974609375, 0.07689183807373047, 0.07662767791748047, 0.07685526275634766, 0.07734569549560547, 0.07642736053466796, 0.07655014038085937, 0.07650099182128907, 0.076787841796875, 0.07969296264648437, 0.07724703979492188, 0.07682848358154297, 0.07683516693115235, 0.07659315490722657, 0.07668940734863282, 0.07656559753417969, 0.077107421875, 0.07646841430664063, 0.07701964569091797, 0.07718009948730468, 0.07696476745605468, 0.07699417877197266, 0.07680569458007812, 0.07688233947753906, 0.07708515167236328, 0.07700441741943359, 0.07745123291015625, 0.07705216217041015, 0.07716083526611328, 0.07718211364746094, 0.07672396850585937, 0.07701920318603515, 0.07665535736083984, 0.0770396499633789, 0.07686863708496093, 0.07705244445800781, 0.07777222442626953, 0.07702627563476562, 0.07670988464355469, 0.07772160339355469, 0.07680960083007812, 0.07711321258544922, 0.07685968017578125, 0.07715484619140625, 0.07705757141113281, 0.0767451171875, 0.07659315490722657, 0.07675270080566406, 0.07671826934814453, 0.07700220489501954, 0.07669971466064453, 0.07770979309082031, 0.07662825775146484, 0.07670140838623046, 0.07691366577148437, 0.07699353790283203, 0.07669760131835937, 0.0766402587890625, 0.07648883056640625, 0.07679373168945312, 0.07670374298095703, 0.07889622497558593, 0.07828572845458984, 0.08205052947998047, 0.07768934631347656, 0.07745462036132812, 0.07689059448242187, 0.07732173156738281, 0.07713072204589844, 0.0769024658203125, 0.0769596176147461, 0.07664832305908204, 0.0765606689453125, 0.0769676513671875, 0.07648665618896484, 0.07657062530517578, 0.07691264343261718, 0.07754505920410157, 0.07713545227050782, 0.0770179214477539, 0.07702912139892579, 0.07653606414794922, 0.07676518249511718, 0.0766890869140625, 0.07672659301757813, 0.07665180969238282, 0.07732093048095703, 0.07653734588623047, 0.0765549087524414, 0.07702073669433594, 0.08101475524902343, 0.07688803100585938, 0.07694064331054687, 0.07647622680664062, 0.07671250915527343, 0.07725049591064453, 0.0765116195678711, 0.0764537582397461, 0.07738780975341797, 0.07669779205322266, 0.07640259552001953, 0.07636787414550782, 0.07725055694580078, 0.07685104370117188, 0.07681644439697266, 0.07675094604492187, 0.07669289398193359, 0.07634355163574219, 0.07697808074951172, 0.07651773071289063, 0.07664240264892579, 0.07675084686279297, 0.07683277130126953, 0.0764881591796875, 0.0764332504272461, 0.07679369354248047, 0.07684284973144531, 0.07677439880371094, 0.07696383666992188, 0.07652352142333985, 0.07669350433349609, 0.07717193603515625, 0.07642729949951171, 0.07669942474365235, 0.07655113220214843, 0.07824352264404297, 0.07890975952148438, 0.07762342071533203, 0.0774224624633789, 0.07800045013427734, 0.07726041412353515, 0.07773388671875, 0.0771580810546875, 0.07718073272705078, 0.07720575714111329, 0.07700921630859375, 0.07728537750244141, 0.07981625366210937, 0.07771564483642578, 0.07733683013916015, 0.07751427459716796, 0.07756976318359375, 0.07776080322265624, 0.07770137786865235, 0.07753750610351562, 0.0774103012084961, 0.07737318420410157, 0.07791750335693359, 0.0771655044555664, 0.07708988952636718, 0.07731705474853516, 0.07720566558837891, 0.07740799713134766, 0.07713593292236329]",tokens/s,12.908554181443465,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,,MB,5782.605824,3908.960256,0.0,3548.381184,3425.068032,s,1,14.8977392578125,14.8977392578125,0.0,14.8977392578125,14.8977392578125,14.8977392578125,14.8977392578125,[14.8977392578125],,kWh,0.00023595894329581974,2.6020830533045144e-05,7.750200644601746e-05,0.00033948178027488234,,MB,2787.520512,4617.797632,0.0,4190.109696,3823.07072,s,10,4.310599395751954,0.4310599395751954,0.0030060991912692374,0.4302279663085937,0.43590193481445316,0.4363443542480469,0.4366982897949219,"[0.43678677368164065, 0.4320989074707031, 0.43580361938476564, 0.4319052429199219, 0.431262451171875, 0.429104736328125, 0.42868020629882814, 0.4280606384277344, 0.4291934814453125, 0.4277033386230469]",tokens/s,593.8849252665071,kWh,1.2603233270833886e-05,1.3897764142180062e-06,5.192816654248783e-06,1.9185826339300673e-05,tokens/kWh,13343183.424714103,MB,2791.747584,4619.894784,0.0,4192.206848,3823.07328,s,10,65.14712988281251,6.514712988281251,0.018106380647313524,6.52373291015625,6.53283330078125,6.536978662109375,6.540294951171875,"[6.5247216796875, 6.52346875, 6.531912109375, 6.52428271484375, 6.5411240234375, 6.5239970703125, 6.495091796875, 6.5053505859375, 6.4954111328125, 6.48177001953125]",tokens/s,9.670418345877279,kWh,0.00019122792198124802,2.1093627574139584e-05,7.764299961435448e-05,0.0002899645491697421,tokens/kWh,217267.93906492507,,s,630,65.14537198638915,0.10340535235934786,0.0008743592209517402,0.10322900772094726,0.10426836166381835,0.10501304512023925,0.10678032257080079,"[0.10407603454589844, 0.10361033630371094, 0.10396876525878906, 0.10392985534667969, 0.10343987274169922, 0.10329497528076172, 0.10282630157470703, 0.1034405746459961, 0.10287657928466797, 0.10358844757080078, 0.10374575805664063, 0.10304390716552735, 0.10335536193847657, 0.10321295928955078, 0.10304521942138672, 0.10472038269042969, 0.103642333984375, 0.10374018859863281, 0.10354688262939453, 0.10458834838867187, 0.10324063873291016, 0.10322857666015625, 0.103070556640625, 0.10289548492431641, 0.10318041229248047, 0.10353887939453126, 0.10352620697021485, 0.1038397445678711, 0.10307174682617187, 0.10372096252441407, 0.10320889282226563, 0.10414495849609375, 0.10287923431396484, 0.10313129425048828, 0.10340879821777343, 0.10385238647460937, 0.10426383972167969, 0.10662095642089844, 0.10445225524902343, 0.10345407867431641, 0.10541257476806641, 0.10317462158203125, 0.10251487731933594, 0.10312710571289062, 0.10302662658691407, 0.10319644927978516, 0.10342626953125, 0.10325599670410156, 0.10354080200195312, 0.10670883178710938, 0.10356342315673828, 0.10458089447021485, 0.10282825469970704, 0.10266934204101562, 0.1027921600341797, 0.10286646270751953, 0.10297187042236328, 0.10414297485351562, 0.10329833221435547, 0.10328227233886719, 0.10321408081054688, 0.10334130859375, 0.10319120025634766, 0.10438451385498047, 0.10335027313232421, 0.10349158477783203, 0.10374553680419922, 0.10384384155273438, 0.10423091125488282, 0.10380425262451172, 0.10360844421386718, 0.10590803527832031, 0.10429862213134766, 0.10372509002685547, 0.10350649261474609, 0.10519551849365234, 0.10317414093017578, 0.10288662719726563, 0.10302953338623047, 0.10332569885253906, 0.10351411437988281, 0.10372089385986329, 0.10328275299072266, 0.10291609954833984, 0.10301235198974609, 0.10315516662597657, 0.10273619079589844, 0.10282211303710938, 0.103225341796875, 0.10472652435302734, 0.10729881286621094, 0.10410761260986329, 0.1043071060180664, 0.10297663879394531, 0.10246640014648438, 0.10292332458496094, 0.10246623992919922, 0.10250640106201171, 0.10281817626953126, 0.10235903930664063, 0.10330726623535157, 0.10315555572509766, 0.10321731567382812, 0.10340147399902344, 0.103017822265625, 0.10321167755126953, 0.102940673828125, 0.10395635223388672, 0.1032152328491211, 0.10342998504638672, 0.10316793823242187, 0.10474518585205078, 0.10341990661621094, 0.10331938934326172, 0.10283638763427734, 0.10308566284179688, 0.10307807922363281, 0.10574006652832031, 0.10343430328369141, 0.10345938873291016, 0.10328028869628907, 0.10377846527099609, 0.10351001739501953, 0.10409923553466797, 0.10317289733886718, 0.1034644775390625, 0.10578495788574219, 0.10381174468994141, 0.1067512664794922, 0.10346076965332031, 0.10307158660888673, 0.1031091537475586, 0.10332819366455079, 0.10342400360107422, 0.10336569976806641, 0.1032625274658203, 0.10414345550537109, 0.10409760284423829, 0.10324803161621093, 0.10477779388427734, 0.10369638061523437, 0.10321715545654297, 0.1038086395263672, 0.1031520004272461, 0.10303692626953125, 0.10468761444091797, 0.10348316955566406, 0.10338326263427734, 0.10310393524169922, 0.1035976333618164, 0.10295603179931641, 0.10326175689697266, 0.10375820922851563, 0.10356511688232421, 0.10324198150634765, 0.10290585327148437, 0.10298473358154298, 0.10328371429443359, 0.10306966400146485, 0.10315161895751954, 0.10293212890625, 0.10304956817626953, 0.10333152008056641, 0.1031618881225586, 0.10312313842773438, 0.10430630493164063, 0.10387670135498046, 0.10606781005859375, 0.10341993713378907, 0.10277113342285156, 0.10278304290771484, 0.10313343811035157, 0.10332569885253906, 0.10296498870849609, 0.1029775390625, 0.10370169830322265, 0.10358822631835937, 0.10487586975097657, 0.10339295959472657, 0.10334300994873047, 0.10325110626220703, 0.10579647827148438, 0.10405270385742188, 0.10351837158203125, 0.1043160629272461, 0.10641069030761718, 0.10334003448486329, 0.10441311645507813, 0.10353056335449219, 0.1043642578125, 0.10399472045898438, 0.10274063873291016, 0.10284198760986328, 0.10308844757080078, 0.10290582275390625, 0.10358383941650391, 0.10370591735839843, 0.10338511657714844, 0.1035414047241211, 0.1044684829711914, 0.10406502532958985, 0.10282086181640625, 0.10326937866210938, 0.10311475372314453, 0.1033175048828125, 0.1034629135131836, 0.103299072265625, 0.10311824035644532, 0.10338774108886718, 0.1040345916748047, 0.10372476959228516, 0.10340755462646484, 0.1041259536743164, 0.10321756744384766, 0.10362691497802734, 0.10391667175292969, 0.10304137420654297, 0.10655382537841797, 0.10488838195800781, 0.10400768280029297, 0.10352435302734375, 0.10639676666259766, 0.10346585845947266, 0.1033380126953125, 0.1032162857055664, 0.10364559936523438, 0.10327289581298828, 0.10336870574951172, 0.10386665344238281, 0.10348528289794921, 0.10304908752441407, 0.10303282928466796, 0.10385100555419922, 0.10315395355224609, 0.10476617431640625, 0.10346701049804688, 0.10330931091308594, 0.10331938934326172, 0.10467753601074219, 0.10382028961181641, 0.10292530822753906, 0.10295629119873047, 0.1034512939453125, 0.10301449584960938, 0.10316799926757812, 0.10292633819580078, 0.10278015899658204, 0.10308480072021485, 0.10392704010009765, 0.10293325042724609, 0.10308963012695313, 0.10280400085449219, 0.10342240142822266, 0.10430815887451172, 0.10400406646728516, 0.10504115295410156, 0.10433001708984375, 0.10326432037353515, 0.1037107162475586, 0.10370374298095703, 0.1035654067993164, 0.10296572875976563, 0.10322943878173828, 0.10415507507324219, 0.10338719940185546, 0.10320521545410156, 0.10356880187988281, 0.103293212890625, 0.10304118347167969, 0.105289794921875, 0.10373939514160156, 0.10292633819580078, 0.10388275146484376, 0.10422067260742188, 0.10338224029541015, 0.10306639862060547, 0.10368732452392578, 0.10337161254882812, 0.10364723205566406, 0.10329702758789062, 0.10335958099365235, 0.10292691040039062, 0.10355133056640625, 0.10431465911865234, 0.10303228759765624, 0.10346697235107422, 0.10320976257324219, 0.10409101104736328, 0.10396320343017579, 0.10388470458984375, 0.10338726043701171, 0.10399906921386719, 0.1041146240234375, 0.10426777648925781, 0.10319033813476562, 0.1035487060546875, 0.10297936248779296, 0.10280524444580078, 0.10392655944824218, 0.10321520233154297, 0.10331292724609376, 0.10343241882324218, 0.10497869110107422, 0.10399948883056641, 0.10371654510498046, 0.10379446411132813, 0.10369468688964843, 0.10402835083007812, 0.10390684509277344, 0.10447714996337891, 0.10725312042236328, 0.10371033477783204, 0.1049730224609375, 0.10836758422851563, 0.10336109161376954, 0.10389004516601562, 0.10348633575439453, 0.10740924835205078, 0.10414710235595703, 0.10348544311523437, 0.10346495819091797, 0.10313465881347657, 0.10355731201171875, 0.10395065307617188, 0.10388851165771484, 0.10348550415039062, 0.10492054748535157, 0.10373619079589844, 0.10327565002441406, 0.10363772583007813, 0.10356451416015625, 0.10316489410400391, 0.10340249633789063, 0.10298579406738281, 0.10312185668945313, 0.10293443298339844, 0.10265961456298828, 0.10327916717529297, 0.1028505630493164, 0.10264575958251954, 0.1028853759765625, 0.10309123229980469, 0.10359471893310547, 0.10339933013916015, 0.10358604431152343, 0.10331145477294922, 0.10352973175048828, 0.1038814697265625, 0.10665184020996094, 0.10367161560058594, 0.10381094360351563, 0.10386342620849609, 0.10291506958007812, 0.10342400360107422, 0.10323763275146484, 0.1029939193725586, 0.1036042251586914, 0.10341756439208985, 0.10375580596923828, 0.1028958740234375, 0.10301644897460938, 0.10689286041259766, 0.10406934356689453, 0.10290166473388672, 0.10421177673339843, 0.10279987335205078, 0.1036657943725586, 0.10275615692138672, 0.10282028961181641, 0.10319181060791016, 0.10314636993408204, 0.10329087829589843, 0.10296524810791016, 0.1030492172241211, 0.10398515319824218, 0.10299801635742188, 0.10391756439208985, 0.10253311920166015, 0.10287104034423829, 0.10373529815673828, 0.10346601867675781, 0.10348668670654297, 0.10349107360839843, 0.10286093139648438, 0.10302067565917969, 0.1025802230834961, 0.10323353576660156, 0.10253517150878906, 0.10285881805419922, 0.10266009521484375, 0.10280892944335937, 0.10292301177978516, 0.103397216796875, 0.10254940795898437, 0.10276643371582031, 0.10302057647705078, 0.10278841400146485, 0.102542236328125, 0.10470604705810548, 0.10360755157470704, 0.10302130889892579, 0.10334803009033203, 0.10517113494873047, 0.10249190521240234, 0.10260915374755859, 0.10272509002685547, 0.1020278091430664, 0.1027972183227539, 0.10258748626708984, 0.10257510375976563, 0.10338713836669922, 0.10347731018066406, 0.10321094512939454, 0.10296924591064453, 0.10313314819335938, 0.10329414367675781, 0.10283309173583985, 0.10314342498779297, 0.10392781066894531, 0.10329293060302734, 0.10376383972167968, 0.1035832977294922, 0.10295967864990234, 0.10528358459472656, 0.10307778930664062, 0.10287068939208985, 0.10276032257080078, 0.1029017562866211, 0.10262995147705078, 0.102753662109375, 0.10298339080810547, 0.10328089904785157, 0.10275516510009766, 0.10258735656738281, 0.10277359771728516, 0.10264530944824218, 0.10274598693847656, 0.1026497573852539, 0.10327107238769531, 0.10310028839111328, 0.10358409881591797, 0.103395263671875, 0.10281187438964844, 0.10266815948486328, 0.10271536254882813, 0.10292201232910156, 0.10292041778564454, 0.1028485107421875, 0.10262118530273437, 0.10266377258300781, 0.10651689910888672, 0.10293247985839844, 0.10272767639160156, 0.10427362823486327, 0.1055684814453125, 0.10241849517822266, 0.10268988800048828, 0.10298429107666016, 0.10261945343017578, 0.10289766693115235, 0.10259645080566407, 0.10270326232910157, 0.10289504241943359, 0.10355961608886718, 0.10278256225585937, 0.10254195404052735, 0.10354166412353516, 0.10297650909423828, 0.10472447967529297, 0.10274208068847657, 0.10257373046875, 0.1029420166015625, 0.10326627349853515, 0.1037583999633789, 0.10322742462158203, 0.10277251434326172, 0.10358029174804688, 0.10272310638427734, 0.1028603515625, 0.10350006103515624, 0.10277542114257812, 0.10273772430419922, 0.10291423797607421, 0.10316595458984375, 0.10334320068359375, 0.10307062530517579, 0.10359808349609376, 0.10319446563720704, 0.10335453033447266, 0.10322124481201173, 0.10289766693115235, 0.10565837097167968, 0.10355302429199219, 0.10294671630859376, 0.10279331207275391, 0.10899251556396485, 0.10317164611816407, 0.10319264221191406, 0.10302281951904296, 0.10288963317871094, 0.10275004577636719, 0.1028812484741211, 0.10268498992919922, 0.10290060424804688, 0.10317215728759765, 0.10283465576171875, 0.10252845001220703, 0.10359795379638671, 0.10256044769287109, 0.10256140899658203, 0.10323375701904297, 0.10336876678466797, 0.10287932586669922, 0.10282393646240234, 0.1035726089477539, 0.1031928939819336, 0.10314803314208984, 0.10312710571289062, 0.10335775756835938, 0.10261369323730468, 0.10277273559570313, 0.10329702758789062, 0.10392339324951172, 0.1036987533569336, 0.10378822326660156, 0.10347551727294922, 0.10406710052490234, 0.10381718444824219, 0.10365328216552734, 0.10532086181640625, 0.10325164794921875, 0.10359808349609376, 0.10338098907470702, 0.10297049713134766, 0.10258521270751954, 0.10244252777099609, 0.10241264343261719, 0.10208393859863281, 0.10231068420410157, 0.10320182037353516, 0.10234268951416016, 0.10293958282470703, 0.10356729888916015, 0.1063620834350586, 0.10302281951904296, 0.10293106842041015, 0.10321100616455078, 0.10279052734375, 0.10241244506835938, 0.1020379867553711, 0.10258236694335937, 0.10244905853271484, 0.10260070037841797, 0.10266214752197265, 0.10269286346435547, 0.10277273559570313, 0.10300409698486328, 0.10292140960693359, 0.10279821014404297, 0.10257196807861328, 0.10286265563964844, 0.10353465270996094, 0.10307603454589843, 0.10275833892822266, 0.10277251434326172, 0.1044319076538086, 0.10250879669189453, 0.10284031677246094, 0.10306800079345703, 0.10288288116455079, 0.10312713623046875, 0.10278912353515625, 0.10264752197265625, 0.10225897979736329, 0.10235004425048828, 0.10233936309814454, 0.10242457580566407, 0.10271334075927735, 0.10254541015625, 0.10307692718505859, 0.10415337371826172, 0.10282460784912109, 0.10293977355957032, 0.1022820816040039, 0.10534300994873047, 0.10280960083007812, 0.10230918121337891, 0.1024703369140625, 0.10273996734619141, 0.10238566589355469, 0.10256588745117187, 0.1024368667602539, 0.10679219055175782, 0.104010498046875, 0.10362989044189454, 0.10270159912109375, 0.10249030303955078, 0.10304653167724609, 0.10358975982666016, 0.10351305389404297, 0.10421234893798828, 0.10300224304199218, 0.10273945617675781, 0.10251315307617187, 0.10250649261474609, 0.10260070037841797, 0.10212290954589844, 0.10216304016113281, 0.10248393249511718, 0.10221981048583985, 0.10220543670654297, 0.10230188751220703, 0.10205779266357422, 0.10221743774414062, 0.10236473846435547, 0.10251123046875, 0.10264995574951172, 0.10218109130859375, 0.10259433746337891, 0.10271488189697266, 0.10295270538330079, 0.10279350280761719, 0.10560320281982422, 0.10310896301269531, 0.10316390228271484, 0.1029222412109375, 0.10285014343261718, 0.10265436553955078, 0.10247190093994141, 0.10262710571289063]",tokens/s,9.670679294480445,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,12395.528192,6990.790656,0.0,6595.54304,6586.72896,s,1,28.8501015625,28.8501015625,0.0,28.8501015625,28.8501015625,28.8501015625,28.8501015625,[28.8501015625],,kWh,0.0006341500994791687,6.99444564694861e-05,0.00023871046874599997,0.0009428050246946548,,MB,1252.286464,7332.626432,0.0,6916.407296,6839.599104,s,10,1.1810994644165038,0.11810994644165038,0.000396576550289102,0.1181243667602539,0.11869516143798828,0.11872881393432616,0.11875573593139647,"[0.11758112335205079, 0.11809552001953125, 0.11868768310546875, 0.11876246643066406, 0.11821939086914063, 0.11815321350097656, 0.11805347442626953, 0.11794924926757812, 0.11741353607177735, 0.11818380737304687]",tokens/s,2167.471984473985,kWh,3.5150750766566676e-06,3.876503588161218e-07,2.334165187277139e-06,6.236890622749929e-06,tokens/kWh,41046094.197356015,MB,1274.49088,7374.569472,0.0,6958.350336,6910.272512,s,10,62.07667578125,6.207667578125,0.159087421132171,6.15906494140625,6.231176708984375,6.457334448242187,6.638260639648437,"[6.14124267578125, 6.14519091796875, 6.18091943359375, 6.1425810546875, 6.13955224609375, 6.1597587890625, 6.15880078125, 6.16580859375, 6.1593291015625, 6.6834921875]",tokens/s,10.148739314264132,kWh,0.0001798534508991761,1.9838553442701504e-05,9.415996756352254e-05,0.0002938519719054002,tokens/kWh,214393.66083369893,,s,630,62.07341981506347,0.09852923780168805,0.003793224256528723,0.09755729675292968,0.0990539306640625,0.10562251091003416,0.116593251953125,"[0.09696540832519532, 0.09753353881835937, 0.09761580657958985, 0.09757484436035156, 0.09748332977294921, 0.09760905456542969, 0.09744038391113281, 0.09725856018066406, 0.097170654296875, 0.09735456085205078, 0.09710304260253906, 0.09688652801513672, 0.0971335678100586, 0.09762723541259766, 0.09740480041503906, 0.09724518585205078, 0.09757657623291016, 0.09806499481201172, 0.09752127838134765, 0.09764249420166016, 0.09720438385009765, 0.09697257232666015, 0.09728841400146485, 0.09741276550292968, 0.09663056182861328, 0.09733177947998047, 0.09661440277099609, 0.09646396636962891, 0.09682425689697266, 0.09659200286865234, 0.09654051208496094, 0.09650176239013672, 0.09670009613037109, 0.09744620513916015, 0.09676799774169922, 0.09645875549316406, 0.09689529418945313, 0.09690694427490235, 0.09789158630371093, 0.09698783874511718, 0.0973210220336914, 0.09927062225341797, 0.09761312103271484, 0.09732755279541015, 0.09772061157226562, 0.097451904296875, 0.09886732482910156, 0.09790585327148438, 0.10541343688964844, 0.09823846435546875, 0.09815039825439453, 0.09759539031982421, 0.09692160034179688, 0.09750323486328125, 0.09784925079345703, 0.09721785736083985, 0.09688758087158203, 0.0976527328491211, 0.09726383972167969, 0.09765865325927735, 0.09721855926513671, 0.09758924865722657, 0.09764864349365235, 0.09775222778320312, 0.09713782501220704, 0.09729853057861328, 0.09719583892822266, 0.09734236907958985, 0.0974919662475586, 0.09747561645507813, 0.09722956848144532, 0.09821324920654297, 0.09762246704101563, 0.09892291259765625, 0.09761177825927735, 0.09733257293701172, 0.09767993927001953, 0.09804338836669922, 0.09764717102050781, 0.10099686431884766, 0.0972314224243164, 0.09719779205322265, 0.10010214233398437, 0.09786777496337891, 0.09772646331787109, 0.09727180480957032, 0.097185791015625, 0.09700527954101562, 0.09739933013916016, 0.09713638305664063, 0.09708863830566407, 0.09743577575683594, 0.09724620819091796, 0.09714002990722656, 0.09787232208251953, 0.09708338928222657, 0.09740902709960937, 0.09733020782470703, 0.09760355377197266, 0.09761894226074219, 0.0975370864868164, 0.09771295928955079, 0.09750556945800781, 0.09761366271972656, 0.09775513458251953, 0.09787548828125, 0.09803414154052735, 0.09738854217529297, 0.09712870025634765, 0.09830368041992188, 0.09701923370361328, 0.09688764953613281, 0.09711949157714844, 0.09703865814208984, 0.09763641357421875, 0.09771164703369141, 0.0969039077758789, 0.09733529663085938, 0.09695795440673828, 0.09746073913574219, 0.09702188873291015, 0.09694384002685547, 0.09717330932617188, 0.0969191665649414, 0.09732189178466796, 0.09760704040527343, 0.09765779113769531, 0.09844048309326171, 0.0981943359375, 0.09768531036376953, 0.09751769256591797, 0.09817862701416015, 0.0979967041015625, 0.09850511932373048, 0.09821593475341797, 0.09710329437255859, 0.09674195098876953, 0.09764383697509765, 0.09743756866455078, 0.09730134582519531, 0.09687350463867188, 0.09899922943115234, 0.09752371215820313, 0.09747865295410156, 0.09722438049316406, 0.09833299255371093, 0.097112060546875, 0.09964134216308594, 0.1003724822998047, 0.09729853057861328, 0.09683328247070312, 0.09722077178955078, 0.09880166625976562, 0.09725782775878906, 0.098868896484375, 0.09785753631591797, 0.0980316162109375, 0.09792511749267578, 0.09793276977539063, 0.09842742156982422, 0.09770598602294922, 0.09837363433837891, 0.09827532958984375, 0.0976135711669922, 0.09820595550537109, 0.09800498962402343, 0.09787702178955078, 0.09729337310791016, 0.09738182067871094, 0.09770646667480469, 0.09713868713378906, 0.09694617462158203, 0.09710694122314453, 0.09760387420654297, 0.09892556762695312, 0.09805385589599609, 0.09802690887451172, 0.09739119720458984, 0.0972484130859375, 0.09805852508544922, 0.10770079803466796, 0.09811558532714844, 0.09762108612060547, 0.10020956420898437, 0.0994978256225586, 0.09857405090332032, 0.09973600006103515, 0.09774479675292969, 0.09780233764648437, 0.09769004821777344, 0.097438720703125, 0.0969208984375, 0.0973267822265625, 0.0979330596923828, 0.09740489959716797, 0.09716121673583984, 0.09950246429443359, 0.09692176055908203, 0.09721011352539062, 0.09713890838623047, 0.09691484832763672, 0.09690313720703125, 0.09720054626464844, 0.09715020751953125, 0.0972869415283203, 0.0971816635131836, 0.09680076599121094, 0.0975257568359375, 0.09709795379638672, 0.09729334259033204, 0.09740742492675782, 0.09744802856445313, 0.09746454620361328, 0.09754214477539062, 0.09806438446044922, 0.09785958099365234, 0.09730662536621094, 0.09780633544921875, 0.09724301147460937, 0.09713062286376953, 0.09703740692138672, 0.09664604949951172, 0.09708338928222657, 0.0976579818725586, 0.09756556701660156, 0.09717884826660156, 0.09952873229980469, 0.09715583801269531, 0.0975189437866211, 0.0973395233154297, 0.09704633331298829, 0.09690799713134765, 0.09740902709960937, 0.09670861053466796, 0.09743666839599609, 0.09726387023925781, 0.09681990051269532, 0.0971014404296875, 0.0970101089477539, 0.09750313568115235, 0.09762006378173828, 0.09734070587158203, 0.09948614501953125, 0.09815289306640625, 0.09773554992675781, 0.09768141174316407, 0.09778240203857422, 0.09719229125976563, 0.09804185485839843, 0.1019775390625, 0.09741779327392579, 0.09667378997802735, 0.0969466552734375, 0.09692364501953125, 0.09724313354492188, 0.09719113922119141, 0.09638582611083985, 0.09623481750488282, 0.09680464172363282, 0.0969896011352539, 0.09684019470214844, 0.09712406158447266, 0.0971103057861328, 0.09711001586914063, 0.09803775787353515, 0.09795513916015625, 0.0974691162109375, 0.09751353454589844, 0.09759532928466796, 0.09802457427978516, 0.09786009979248046, 0.09791356658935547, 0.09768524932861328, 0.09764774322509766, 0.09797238159179687, 0.09713318634033204, 0.09861865234375, 0.09699827575683594, 0.09690438079833984, 0.09711382293701172, 0.09693484497070312, 0.1039974365234375, 0.09687446594238282, 0.09873923492431641, 0.09806758117675782, 0.09722662353515625, 0.09725660705566407, 0.09707810974121094, 0.09686573028564453, 0.09697494506835938, 0.09711443328857422, 0.09710610961914062, 0.09700361633300782, 0.0981226577758789, 0.09779225921630859, 0.09829759979248047, 0.09787696075439453, 0.09751328277587891, 0.09760377502441406, 0.09738854217529297, 0.09802540588378907, 0.09724630737304688, 0.09685091400146484, 0.09697676849365235, 0.09692111968994141, 0.09751721954345703, 0.09731372833251953, 0.09736761474609375, 0.0973193588256836, 0.09678265380859374, 0.09777097320556641, 0.09704255676269531, 0.09688492584228516, 0.0970462417602539, 0.09698934173583984, 0.09723577880859376, 0.09714278411865235, 0.09729804992675781, 0.0973700180053711, 0.09746070098876954, 0.09781145477294922, 0.09768991851806641, 0.09772723388671875, 0.09787795257568359, 0.09865408325195313, 0.09784333038330079, 0.09878323364257813, 0.0981909408569336, 0.09758761596679688, 0.09720182037353516, 0.09721510314941406, 0.09709500885009766, 0.09707151794433594, 0.0971263656616211, 0.09769107055664063, 0.09775312042236328, 0.09732150268554687, 0.09725746917724609, 0.09755580902099609, 0.09720694732666016, 0.09728975677490234, 0.09787836456298828, 0.10003657531738282, 0.09798796844482421, 0.09771190643310547, 0.0989665298461914, 0.09749497222900391, 0.09758313751220703, 0.10172937774658203, 0.10127388763427735, 0.09799648284912109, 0.09752470397949219, 0.0977427215576172, 0.09757497406005859, 0.09768931579589844, 0.09880409240722657, 0.09783599853515625, 0.09711833953857422, 0.09710454559326172, 0.09730687713623047, 0.09798652648925782, 0.09780873870849609, 0.09764173126220703, 0.09797878265380859, 0.09701187133789063, 0.09833360290527343, 0.09803469085693359, 0.09723670196533203, 0.09739469146728516, 0.09696192169189453, 0.09821222686767578, 0.09682377624511719, 0.09702604675292968, 0.09672704315185547, 0.09736985778808593, 0.09754425811767578, 0.0981542739868164, 0.09732937622070313, 0.09745391845703125, 0.09815830230712891, 0.09772486114501953, 0.10134687805175781, 0.09757663726806641, 0.10163890838623046, 0.09768048095703125, 0.0981083221435547, 0.09684278106689453, 0.09684207916259766, 0.096482177734375, 0.09705171203613282, 0.09717420959472656, 0.0966328353881836, 0.09660620880126954, 0.0963864974975586, 0.09668665313720703, 0.09787564849853515, 0.0972886734008789, 0.0972470703125, 0.0973469467163086, 0.09739110565185546, 0.09739791870117187, 0.09690415954589844, 0.09707315063476563, 0.09727798461914063, 0.09725510406494141, 0.09806025695800781, 0.09778336334228516, 0.09781916809082031, 0.09760307312011719, 0.09780713653564453, 0.09807046508789062, 0.09691779327392579, 0.09668704223632812, 0.09682358551025391, 0.09685453033447265, 0.09755238342285157, 0.09807001495361328, 0.09723481750488282, 0.09682393646240234, 0.09752722930908203, 0.09700409698486329, 0.09722374725341797, 0.09861209869384766, 0.09739065551757813, 0.09658367919921874, 0.09743974304199218, 0.09788825225830078, 0.10003865814208984, 0.10473881530761718, 0.09739376068115234, 0.09801821136474609, 0.09805209350585938, 0.09806556701660156, 0.09792969512939453, 0.09768131256103516, 0.09796246337890625, 0.09859212493896484, 0.09865484619140626, 0.0982999038696289, 0.09842073822021484, 0.09739878082275391, 0.0968852767944336, 0.09936201477050781, 0.09741734313964844, 0.09740486145019531, 0.09800291442871094, 0.097870849609375, 0.09736934661865235, 0.09847977447509766, 0.09902985382080078, 0.09806134033203125, 0.09730147552490234, 0.09785472106933593, 0.09725414276123047, 0.097091552734375, 0.09753600311279297, 0.09814617919921875, 0.09776860809326172, 0.10032434844970703, 0.09769481658935547, 0.09880636596679687, 0.09799817657470702, 0.09776947021484375, 0.09758342742919922, 0.09723356628417969, 0.09784268951416016, 0.09766553497314454, 0.09726771545410157, 0.09755878448486328, 0.09683299255371093, 0.09747833251953125, 0.09764243316650391, 0.09706358337402343, 0.09719602966308594, 0.0972220458984375, 0.09772198486328125, 0.09770902252197265, 0.09726528167724609, 0.09727616119384766, 0.09692787170410157, 0.09791849517822265, 0.09764201354980469, 0.10104307556152343, 0.09757907104492188, 0.0989675521850586, 0.09807574462890625, 0.09794652557373047, 0.09851273345947266, 0.09767542266845704, 0.09798006439208984, 0.09772271728515625, 0.09822777557373047, 0.09808531188964843, 0.09702642822265625, 0.09720178985595704, 0.0970478744506836, 0.09742540740966797, 0.09791522979736328, 0.0975722885131836, 0.09781954956054688, 0.09729363250732422, 0.09813676452636719, 0.10174262237548828, 0.09803363037109375, 0.09718479919433594, 0.09807341003417969, 0.09729561614990234, 0.09723177337646484, 0.0971277084350586, 0.09723363494873047, 0.09743875122070313, 0.09733382415771484, 0.097766845703125, 0.09807357025146485, 0.09788416290283203, 0.09859609222412109, 0.09808358764648438, 0.09822550201416015, 0.09805891418457031, 0.09818045043945313, 0.09758310699462891, 0.09741145324707032, 0.09704627227783204, 0.09711260986328125, 0.09714911651611328, 0.09721222686767578, 0.09799449920654296, 0.0993828125, 0.09784390258789062, 0.09732713317871093, 0.09782179260253906, 0.097272705078125, 0.09686656188964844, 0.09696803283691406, 0.09692316436767578, 0.09766806030273438, 0.09799852752685546, 0.09726496124267578, 0.09716806030273438, 0.09677619171142578, 0.09778201293945313, 0.09802137756347656, 0.09787171173095703, 0.10172022247314454, 0.09834223937988282, 0.09880671691894531, 0.09861090850830079, 0.09843917083740235, 0.09785529327392578, 0.0986272964477539, 0.09977903747558593, 0.0978634262084961, 0.09729856109619141, 0.09728383636474609, 0.09699686431884766, 0.09702285003662109, 0.09791808319091796, 0.0982847671508789, 0.0976767349243164, 0.09757308959960938, 0.09723075103759765, 0.09739273834228515, 0.09748220825195313, 0.09742115020751953, 0.09739539337158203, 0.09715312194824219, 0.09855084991455078, 0.09786924743652343, 0.09778387451171874, 0.09765715026855469, 0.097974365234375, 0.09777974700927734, 0.0975255355834961, 0.09798246765136719, 0.09776438140869141, 0.09765174102783203, 0.09752336120605469, 0.09723632049560547, 0.09812700653076172, 0.0973629150390625, 0.09778275299072266, 0.09796387481689453, 0.10113433837890624, 0.09850675201416016, 0.0997266845703125, 0.09761039733886719, 0.09785753631591797, 0.0974457244873047, 0.0977143325805664, 0.09708544158935548, 0.09742054748535156, 0.097970947265625, 0.09726726531982421, 0.09879801940917969, 0.11686080169677734, 0.11873017883300781, 0.1197339859008789, 0.11571094512939453, 0.11624038696289063, 0.11676831817626954, 0.1166094741821289, 0.11470642852783203, 0.1151338882446289, 0.11478438568115235, 0.11532742309570312, 0.11550694274902344, 0.11477340698242187, 0.11440624237060547, 0.11785215759277344, 0.10521702575683593, 0.10968109130859376, 0.10991468811035156, 0.1159331817626953, 0.1149908447265625, 0.11611714935302735, 0.11606486511230468, 0.11169586944580077, 0.11484979248046875, 0.1167831039428711, 0.10999603271484375, 0.09804595184326172, 0.10643222045898437, 0.11655353546142579, 0.10579357147216797, 0.10686508941650391, 0.09892390441894532, 0.09874905395507813, 0.09810329437255859, 0.1093570556640625, 0.11542521667480468]",tokens/s,10.149271650844613,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,4833.247232,2562.588672,0.0,2160.06656,2133.479936,s,1,12.2845693359375,12.2845693359375,0.0,12.2845693359375,12.2845693359375,12.2845693359375,12.2845693359375,[12.2845693359375],,kWh,0.000149020971137467,1.6424179350817344e-05,4.730170450800708e-05,0.00021274685499629142,,MB,4888.51456,2667.446272,0.0,2243.95264,2227.365376,s,10,0.8796755218505858,0.08796755218505858,0.0006577396396732701,0.08811176300048829,0.08874981613159179,0.0887866512298584,0.08881611930847169,"[0.08720588684082031, 0.08702646636962891, 0.08704118347167969, 0.08874163055419922, 0.08796272277832032, 0.08862083435058593, 0.088823486328125, 0.08771695709228515, 0.08826080322265625, 0.08827555084228515]",tokens/s,2910.163959791096,kWh,2.584164053798041e-06,2.8498811394102814e-07,1.2277369703894899e-06,4.096889138128559e-06,tokens/kWh,62486435.77329985,MB,4892.643328,2711.486464,0.0,2285.89568,2216.086528,s,10,55.10044921875,5.510044921875,0.027450188222528215,5.514744873046874,5.5446395019531245,5.546572436523437,5.548118784179687,"[5.46329833984375, 5.46391064453125, 5.507693359375, 5.5442099609375, 5.51824658203125, 5.51346044921875, 5.516029296875, 5.49767724609375, 5.52741796875, 5.54850537109375]",tokens/s,11.43366358954509,kWh,0.00016151410190953804,1.7815588403322598e-05,5.8027115989008314e-05,0.00023735680630186895,tokens/kWh,265423.1870641071,,s,630,55.098264793396,0.08745756316412064,0.0010247304598753836,0.0872806396484375,0.08844811096191407,0.08938607177734376,0.09121196327209473,"[0.08946812438964843, 0.08683302307128907, 0.08707164764404297, 0.08686099243164062, 0.08669612884521484, 0.0864527359008789, 0.08620995330810546, 0.08631756591796876, 0.08627362823486329, 0.08717699432373047, 0.0865074234008789, 0.08672557067871094, 0.08710694122314454, 0.08705702209472656, 0.08630384063720703, 0.0863543701171875, 0.0859504623413086, 0.08633392333984374, 0.08602166748046874, 0.08595913696289062, 0.08647475433349609, 0.08734925079345703, 0.09095577239990234, 0.08678604888916015, 0.0863229751586914, 0.08631318664550781, 0.08584982299804687, 0.08595062255859375, 0.08618816375732422, 0.08604022216796875, 0.08597948455810547, 0.0866275177001953, 0.08603731536865235, 0.0864644775390625, 0.08644815826416016, 0.0863477783203125, 0.08617068481445313, 0.08639785766601563, 0.08626383972167968, 0.08671459197998047, 0.08688822174072265, 0.08713801574707031, 0.08620396423339843, 0.08651542663574219, 0.0862955551147461, 0.0865423355102539, 0.08638668823242188, 0.08694169616699218, 0.08671142578125, 0.09061260986328125, 0.08672608184814454, 0.0866412124633789, 0.0865259552001953, 0.08633753967285156, 0.0865054702758789, 0.08632319641113281, 0.08636937713623047, 0.08699791717529297, 0.08662834930419921, 0.08831385803222656, 0.08739635467529297, 0.08647398376464843, 0.08625638580322266, 0.08769737243652344, 0.08614466857910157, 0.0858054428100586, 0.08599263763427735, 0.08946294403076172, 0.08725571441650391, 0.0880373764038086, 0.08715865325927734, 0.08722230529785156, 0.08650870513916016, 0.08659635162353516, 0.08652777862548829, 0.08624944305419922, 0.0859959716796875, 0.08613817596435547, 0.08582624053955078, 0.08645136260986327, 0.08691593933105468, 0.08617164611816407, 0.08654643249511719, 0.08617369842529297, 0.08641535949707031, 0.08622284698486328, 0.08627324676513672, 0.08609436798095703, 0.08622652435302734, 0.08619280242919922, 0.08611634826660156, 0.08669593811035156, 0.08740643310546875, 0.08668399810791015, 0.08643360137939453, 0.08628364562988282, 0.08625532531738281, 0.08622991943359375, 0.08629043579101563, 0.08611138916015625, 0.09041785430908203, 0.08724262237548829, 0.08701744079589843, 0.08692768096923828, 0.08710963439941406, 0.08681072235107422, 0.08627129364013672, 0.08673535919189453, 0.08694544219970703, 0.08652617645263672, 0.08638690948486329, 0.08639692687988282, 0.087193603515625, 0.08731238555908204, 0.08977613067626954, 0.08734099578857422, 0.08730630493164063, 0.08683519744873047, 0.08642559814453125, 0.08664166259765625, 0.0862402572631836, 0.08626153564453125, 0.08621202850341797, 0.0859999008178711, 0.08599603271484375, 0.08655773162841797, 0.08677629089355468, 0.08652352142333984, 0.08624598693847656, 0.08599346923828124, 0.0885791015625, 0.08638972473144531, 0.08644812774658203, 0.08648294067382813, 0.08595005035400391, 0.08646492767333984, 0.0866693115234375, 0.08653823852539062, 0.08730009460449219, 0.08689180755615235, 0.08638662719726563, 0.08710749053955077, 0.0875815658569336, 0.08777507019042968, 0.08778089904785157, 0.08749120330810548, 0.08739430236816406, 0.0870830078125, 0.08666726684570313, 0.08647468566894531, 0.08657647705078125, 0.08639766693115235, 0.08614502716064452, 0.08848348999023438, 0.0863985595703125, 0.08697440338134765, 0.08776172637939453, 0.08706253051757812, 0.08650752258300781, 0.08656665802001953, 0.08641970825195312, 0.08650962829589844, 0.0865156478881836, 0.09605308532714844, 0.08661619567871094, 0.08675945281982422, 0.08679154968261718, 0.08741069030761718, 0.08744569396972657, 0.08772223663330078, 0.08687020874023438, 0.08680413055419922, 0.0892760009765625, 0.0870836181640625, 0.08744156646728515, 0.08728166198730469, 0.08755773162841797, 0.08978189086914062, 0.08796221160888672, 0.08764332580566406, 0.08844758605957032, 0.09036431884765625, 0.08862515258789062, 0.08902655792236328, 0.08861081695556641, 0.08809811401367187, 0.08804627227783203, 0.08814335632324219, 0.08828774261474609, 0.0883978271484375, 0.08821107482910157, 0.09131660461425781, 0.0879310073852539, 0.08806089782714843, 0.0884130859375, 0.08894873809814453, 0.08834201812744141, 0.0886666259765625, 0.08822541046142578, 0.08801023864746094, 0.08838233947753907, 0.08816242980957031, 0.08814169311523437, 0.08789148712158203, 0.08807881927490234, 0.08798127746582031, 0.08784544372558593, 0.08788771057128907, 0.08798604583740234, 0.08789823913574218, 0.0881053466796875, 0.08797154998779297, 0.08819692993164062, 0.08797641754150391, 0.08796979522705078, 0.08770150756835937, 0.08919245147705078, 0.08845283508300782, 0.08807247924804687, 0.08750694274902343, 0.0878571548461914, 0.08772608184814454, 0.0871457290649414, 0.08749951934814452, 0.08876755523681641, 0.08814892578125, 0.09312627410888671, 0.08887042999267578, 0.08916668701171875, 0.08831702423095703, 0.08768605041503906, 0.08746329498291015, 0.0874698257446289, 0.08722930908203125, 0.08713215637207031, 0.08707695770263672, 0.08707472229003907, 0.08719769287109375, 0.08719926452636718, 0.08800624084472657, 0.08787443542480469, 0.08722227478027343, 0.08714444732666016, 0.08776211547851563, 0.08784297943115234, 0.08769779205322266, 0.08681910705566406, 0.08683110046386719, 0.0867799072265625, 0.08710585784912109, 0.0875660171508789, 0.08726515197753906, 0.086974365234375, 0.08745462036132813, 0.08756153869628906, 0.08739485168457031, 0.08775027465820312, 0.08711395263671876, 0.08690105438232422, 0.08697586822509766, 0.08676809692382813, 0.0872531509399414, 0.08756633758544922, 0.08707398223876953, 0.08765952301025391, 0.08745331573486329, 0.08741632080078125, 0.08769123077392578, 0.08712265777587891, 0.08721769714355469, 0.08673942565917969, 0.08780595397949219, 0.0880814437866211, 0.0888022689819336, 0.08811837005615235, 0.09137833404541015, 0.08811158752441406, 0.08730335998535156, 0.08763609313964844, 0.0875074234008789, 0.08704790496826172, 0.086955810546875, 0.08709171295166016, 0.08712191772460938, 0.08972112274169922, 0.08789955139160156, 0.0872451171875, 0.08807218933105469, 0.08866604614257813, 0.09001785278320312, 0.08777523040771484, 0.08709069061279297, 0.08712652587890625, 0.0898623046875, 0.08757798767089844, 0.08762416076660157, 0.08712569427490234, 0.08725536346435547, 0.0872099838256836, 0.08739389038085937, 0.0871223373413086, 0.08734310150146485, 0.08702918243408203, 0.08700752258300781, 0.08677814483642578, 0.08686972808837891, 0.08710377502441406, 0.08749667358398437, 0.08804969787597657, 0.08719680023193359, 0.08680847930908203, 0.08715119934082032, 0.08751094055175782, 0.08761974334716797, 0.0882092514038086, 0.08816639709472657, 0.08835836791992188, 0.08828368377685547, 0.08775679779052735, 0.08763391876220702, 0.08731033325195313, 0.08709734344482421, 0.08755184173583984, 0.088318115234375, 0.08757673645019531, 0.08732633972167969, 0.0871569595336914, 0.08718335723876953, 0.08757657623291015, 0.08810896301269532, 0.09010550689697265, 0.0881731185913086, 0.08759894561767578, 0.08771177673339844, 0.08701542663574219, 0.08749056243896484, 0.08798207855224609, 0.08795247650146484, 0.08861174774169922, 0.08761138916015625, 0.0880432357788086, 0.0874969940185547, 0.08706636810302734, 0.08682521820068359, 0.086744384765625, 0.08687891387939453, 0.08699903869628907, 0.08727961730957032, 0.08713606262207031, 0.08728614044189453, 0.08706208038330078, 0.08732899475097657, 0.08718339538574219, 0.08678150177001953, 0.0867127685546875, 0.086764892578125, 0.08700994873046874, 0.08690278625488282, 0.08686370849609375, 0.0870148468017578, 0.09067183685302735, 0.08720387268066407, 0.08703766632080077, 0.08697679901123047, 0.08717072296142578, 0.08729631805419921, 0.08782582092285156, 0.08767549133300781, 0.08720178985595703, 0.08730009460449219, 0.08760044860839844, 0.08764281463623047, 0.0873164825439453, 0.08705843353271485, 0.08718524932861328, 0.08822185516357423, 0.08721762847900391, 0.08758531188964844, 0.08821820831298828, 0.08787331390380859, 0.08771401977539063, 0.08770902252197266, 0.08756291198730469, 0.08706185913085937, 0.08707753753662109, 0.08703590393066406, 0.08750899505615234, 0.09024018859863281, 0.08740252685546875, 0.08719439697265625, 0.08689049530029297, 0.0871192626953125, 0.08798678588867187, 0.08751103973388671, 0.08832784271240235, 0.08833987426757813, 0.09000214385986328, 0.0884902114868164, 0.0875162582397461, 0.08715277099609375, 0.08742979431152344, 0.08860006713867187, 0.0869464340209961, 0.08691712188720703, 0.08694377899169922, 0.09140838623046875, 0.08841417694091797, 0.08820735931396484, 0.08780134582519532, 0.0876138916015625, 0.0879043197631836, 0.0875101089477539, 0.08730659484863282, 0.0876358413696289, 0.0877198715209961, 0.0882325439453125, 0.08750505828857422, 0.08742092895507812, 0.08767488098144531, 0.08758220672607422, 0.08823040008544922, 0.0867301788330078, 0.0869135971069336, 0.0874567642211914, 0.08722866821289063, 0.08682806396484374, 0.08667692565917968, 0.0865426254272461, 0.08650240325927734, 0.08727616119384765, 0.08760153961181641, 0.08722978973388672, 0.08698537445068359, 0.08703385925292968, 0.0869722900390625, 0.08734233856201172, 0.08657107543945312, 0.08661260986328125, 0.08662179565429687, 0.08658345794677734, 0.0871651840209961, 0.08821046447753907, 0.08692425537109374, 0.08662601470947266, 0.08682870483398437, 0.08682764434814454, 0.08683468627929687, 0.08663235473632812, 0.08817014312744141, 0.08825894165039062, 0.08907411193847656, 0.08875020599365234, 0.08788365173339843, 0.08763919830322266, 0.08735577392578125, 0.08717938995361328, 0.0871674575805664, 0.08709529876708984, 0.08690419006347656, 0.0871041259765625, 0.08692675018310547, 0.08748210906982422, 0.0868597412109375, 0.08690367889404296, 0.08871731567382812, 0.08807628631591796, 0.086614013671875, 0.08671027374267579, 0.08731577301025391, 0.08665158081054687, 0.08685081481933593, 0.08684953308105468, 0.0865697250366211, 0.0868823013305664, 0.08652185821533204, 0.08718646240234375, 0.08717206573486327, 0.08673280334472656, 0.08692940521240235, 0.0867962875366211, 0.08690870666503907, 0.08653437042236328, 0.08735743713378906, 0.08671641540527343, 0.086840576171875, 0.0866065902709961, 0.08673849487304687, 0.08720387268066407, 0.08850883483886719, 0.08725504302978515, 0.08688355255126953, 0.08754019165039062, 0.08687760162353515, 0.0872921600341797, 0.08692189025878906, 0.08678585815429687, 0.08688162994384765, 0.08674518585205078, 0.08670489501953126, 0.08820658874511719, 0.08841407775878907, 0.09028684997558593, 0.0876725082397461, 0.08737427520751953, 0.08727353668212891, 0.08746070098876953, 0.087117919921875, 0.08705225372314453, 0.09019599914550781, 0.08793087768554687, 0.087766845703125, 0.08768121337890625, 0.08747122955322266, 0.08941606140136718, 0.08684198760986328, 0.08686115264892579, 0.08776553344726562, 0.08839081573486328, 0.09075318145751952, 0.08711347198486329, 0.08734815979003906, 0.08743711853027344, 0.08710777282714843, 0.08753724670410157, 0.08690306854248046, 0.08819622039794922, 0.08696470642089844, 0.08666738891601562, 0.0874617919921875, 0.08939075469970703, 0.08682697296142577, 0.08692582702636718, 0.08740902709960938, 0.08705843353271485, 0.08709939575195312, 0.08725094604492188, 0.08716902160644531, 0.08733692932128906, 0.08837670135498046, 0.08725475311279297, 0.08711023712158203, 0.08715644836425782, 0.08820800018310547, 0.08785874938964844, 0.08803782653808594, 0.08771305847167969, 0.09134950256347656, 0.08737200164794921, 0.08742912292480469, 0.08773129272460937, 0.08745855712890625, 0.08725849914550782, 0.08709916687011719, 0.08708914947509766, 0.08730550384521485, 0.08708067321777344, 0.08664883422851563, 0.08723661041259766, 0.08801036834716797, 0.08801728057861329, 0.08741683197021484, 0.08757209777832031, 0.08816883087158203, 0.08809008026123047, 0.0917938232421875, 0.08775897979736329, 0.08839801788330078, 0.0880271987915039, 0.08835679626464844, 0.09064857482910156, 0.088133056640625, 0.08826659393310547, 0.08826982116699218, 0.08845878601074218, 0.08854653167724609, 0.08778339385986328, 0.08871014404296874, 0.0878787841796875, 0.08785395050048828, 0.08798207855224609, 0.08847564697265625, 0.08819644927978515, 0.08819779205322266, 0.08793670654296876, 0.08798390197753907, 0.0880308837890625, 0.08799321746826172, 0.08785027313232421, 0.08775545501708984, 0.0880008316040039, 0.08848969268798829, 0.08831366729736329, 0.08997264099121094, 0.08938034820556641, 0.0886607666015625, 0.08819712066650391, 0.08777523040771484, 0.08751308441162109, 0.08715398406982422, 0.08730489349365235, 0.08757071685791015, 0.08723017883300781, 0.08744713592529296, 0.08852489471435547, 0.08761494445800781, 0.08760406494140625, 0.08754761505126953, 0.08754108428955078, 0.0873235855102539, 0.08723401641845703, 0.08723043060302735, 0.08763174438476562, 0.08720044708251953, 0.09026521301269531, 0.08821702575683593, 0.08789292907714844, 0.08756633758544922, 0.08746623992919922, 0.08760704040527344, 0.0876968994140625, 0.08744601440429688, 0.08956272125244141, 0.08864195251464843, 0.08773017883300781, 0.08790425872802735, 0.08796160125732422, 0.08958566284179688, 0.08796268463134765, 0.08762054443359375, 0.08736930847167969]",tokens/s,11.434116888477963,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,6527.143936,3722.313728,0.0,3319.791616,3239.455232,s,1,17.214466796875,17.214466796875,0.0,17.214466796875,17.214466796875,17.214466796875,17.214466796875,[17.214466796875],,kWh,0.00029433284332083077,3.246008745183445e-05,9.805396733199301e-05,0.00042484689810465823,,MB,1941.004288,4005.429248,0.0,3581.935616,3476.932096,s,10,0.8319776306152343,0.08319776306152343,0.0008983214685211439,0.08296804809570313,0.0843398208618164,0.08441792678833009,0.08448041152954101,"[0.0820752944946289, 0.08275094604492188, 0.08266831970214844, 0.08318515014648438, 0.08231539154052735, 0.08209983825683594, 0.08449603271484375, 0.08407123565673828, 0.08399295806884766, 0.08432246398925781]",tokens/s,3077.0058061620243,kWh,2.456403164705989e-06,2.708956621918573e-07,1.555026920958066e-06,4.282325747855913e-06,tokens/kWh,59780599.39232199,MB,1962.135552,4007.5264,0.0,3581.935616,3472.45568,s,10,51.21604150390624,5.121604150390625,0.06878369406727511,5.11907080078125,5.198025390625,5.202694580078125,5.206429931640625,"[5.04357763671875, 5.08715966796875, 5.02495654296875, 5.0642353515625, 5.05906982421875, 5.15098193359375, 5.19212841796875, 5.20736376953125, 5.18958056640625, 5.19698779296875]",tokens/s,12.30083351818492,kWh,0.00015127436079654367,1.668604067375648e-05,6.223763522424035e-05,0.0002301980366945405,tokens/kWh,273677.3992716426,,s,630,51.21349683380125,0.08129126481555758,0.0014128444694732273,0.08121585845947266,0.08266603698730468,0.08311273384094238,0.0857038871765137,"[0.0794748764038086, 0.07982284545898438, 0.0794808349609375, 0.07932915496826172, 0.07939807891845703, 0.08004844665527344, 0.07955017852783203, 0.08049919891357422, 0.08023900604248047, 0.07988374328613282, 0.07973737335205078, 0.07985779571533203, 0.07958252716064453, 0.0795481948852539, 0.07958403015136718, 0.07941446685791016, 0.07977394866943359, 0.07968361663818359, 0.07981484985351563, 0.07997475433349609, 0.08078131103515625, 0.07994982147216798, 0.07992934417724609, 0.07965257263183594, 0.08025238037109375, 0.08040636444091796, 0.0802573471069336, 0.08011843109130859, 0.08015666961669922, 0.0794439697265625, 0.07950883483886718, 0.07942530822753906, 0.07945238494873047, 0.08013807678222656, 0.07943020629882812, 0.07976969909667969, 0.08024899291992188, 0.07988140869140625, 0.07958019256591797, 0.07935772705078124, 0.07973856353759766, 0.07951593780517578, 0.08054991912841797, 0.0795525131225586, 0.07939481353759766, 0.080057373046875, 0.08025596618652343, 0.08023056030273437, 0.08011289978027344, 0.0813135986328125, 0.08059318542480469, 0.08063549041748047, 0.08208988952636719, 0.08093952178955079, 0.08025084686279296, 0.08041321563720703, 0.08021721649169922, 0.08032755279541015, 0.08112127685546874, 0.08085036468505859, 0.08085868835449218, 0.0811673583984375, 0.0807213134765625, 0.08036768341064453, 0.0803465576171875, 0.08042144012451172, 0.08026505279541016, 0.080228515625, 0.08013823699951172, 0.08007884979248046, 0.08094003295898437, 0.08049129486083985, 0.0808911361694336, 0.08081302642822266, 0.08047849273681641, 0.080633056640625, 0.08052992248535157, 0.08062770843505859, 0.08042086029052735, 0.08014777374267579, 0.08068985748291016, 0.08082588958740235, 0.08100857543945313, 0.0802442855834961, 0.08025801849365234, 0.08118271636962891, 0.08209008026123046, 0.08460643005371093, 0.08153241729736328, 0.08316182708740234, 0.08204428863525391, 0.082031005859375, 0.08071826934814454, 0.08064998626708984, 0.08076927947998047, 0.08061440277099609, 0.08085401916503906, 0.08071782684326172, 0.0812125473022461, 0.0806368637084961, 0.08063951873779297, 0.080376220703125, 0.0801075210571289, 0.08013414764404297, 0.08018534088134766, 0.08195104217529296, 0.08070652770996094, 0.08058905792236327, 0.08062950134277344, 0.08190182495117188, 0.080859619140625, 0.08074185943603515, 0.08044393920898438, 0.08004812622070312, 0.08023792266845703, 0.08100624084472656, 0.08029491424560548, 0.08020492553710938, 0.08000918579101562, 0.0801965103149414, 0.08022220611572266, 0.08010546875, 0.08042192077636719, 0.08042323303222657, 0.0806297607421875, 0.08018627166748046, 0.08071372985839843, 0.08008089447021484, 0.07976140594482421, 0.07956476593017578, 0.07918380737304688, 0.0795505599975586, 0.07948255920410156, 0.07936774444580078, 0.07930547332763672, 0.07915519714355469, 0.07975222778320312, 0.07920249938964843, 0.07919900512695313, 0.079067138671875, 0.07887667083740234, 0.07932249450683594, 0.079538818359375, 0.07961350250244141, 0.07941779327392579, 0.07926274871826172, 0.07956531524658203, 0.07916387176513671, 0.07975116729736328, 0.08035533142089844, 0.07964588928222656, 0.08147026824951172, 0.0796610565185547, 0.07923506927490234, 0.07931289672851563, 0.07918172454833984, 0.07944348907470702, 0.07967555236816407, 0.08102684783935547, 0.08008153533935547, 0.07979631805419922, 0.08004803466796875, 0.0799109115600586, 0.07967871856689453, 0.07980086517333984, 0.08027500915527344, 0.08038646697998048, 0.07977152252197266, 0.08020620727539063, 0.08025619506835938, 0.08009951782226563, 0.08040512084960938, 0.07947673797607421, 0.07970320129394531, 0.08021846771240235, 0.07993395233154296, 0.0796262435913086, 0.08017110443115234, 0.07993692779541016, 0.07972300720214844, 0.07948697662353515, 0.07980032348632812, 0.07969996643066406, 0.07961804962158203, 0.07996006774902344, 0.07962828826904297, 0.0796684799194336, 0.08005862426757812, 0.08037014770507812, 0.08015142059326172, 0.08022022247314453, 0.08013200378417969, 0.08010345458984375, 0.0800260467529297, 0.0800732192993164, 0.08041471862792969, 0.0800227813720703, 0.07969868469238281, 0.08004198455810548, 0.08011942291259766, 0.07998297882080078, 0.08021372985839843, 0.08014466857910156, 0.08003964996337891, 0.07998655700683593, 0.08009932708740235, 0.08049705505371094, 0.08050617980957031, 0.08041065979003906, 0.08025564575195313, 0.08600559997558593, 0.08197881317138672, 0.0805211181640625, 0.08026604461669921, 0.0816185302734375, 0.080197021484375, 0.08011468505859375, 0.08023808288574219, 0.08033126068115234, 0.08099430084228515, 0.0803594207763672, 0.08009891510009766, 0.08029020690917969, 0.08033280181884765, 0.08027043151855469, 0.08037468719482421, 0.08040857696533203, 0.08092588806152344, 0.08330118560791015, 0.0805212173461914, 0.08019500732421875, 0.08038768005371094, 0.08088790130615234, 0.08038185882568359, 0.08010441589355469, 0.08025218963623047, 0.07971094512939453, 0.08035123443603516, 0.08035708618164063, 0.08128336334228516, 0.07953202819824219, 0.07958457946777343, 0.07994847869873047, 0.07964672088623047, 0.07962214660644532, 0.08043119812011719, 0.07973468780517579, 0.08004402923583985, 0.07956626892089844, 0.07983708953857421, 0.07967987060546874, 0.08016294097900391, 0.08009584045410156, 0.08097586822509766, 0.07996230316162109, 0.07994866943359374, 0.08068550109863282, 0.08010134124755859, 0.07997494506835938, 0.08007270050048829, 0.07990886688232422, 0.0798268814086914, 0.08017926025390625, 0.08000685119628906, 0.080067138671875, 0.08025389099121094, 0.08026300811767578, 0.07992784118652344, 0.0804172134399414, 0.08167587280273438, 0.08022262573242188, 0.08050086212158203, 0.08027737426757812, 0.08042803192138671, 0.08016998291015626, 0.08029987335205079, 0.08023670196533203, 0.0801648941040039, 0.08017696380615234, 0.08037187194824219, 0.07996387481689453, 0.08128336334228516, 0.08037149047851562, 0.08010553741455079, 0.08038825225830078, 0.0801009292602539, 0.08012025451660157, 0.08022438049316406, 0.08007667541503906, 0.08042934417724609, 0.08007984161376953, 0.08015283203125, 0.0800932159423828, 0.08043158721923828, 0.08052735900878906, 0.08019967651367188, 0.0801377944946289, 0.08041295623779297, 0.07991849517822265, 0.07972550201416016, 0.079785888671875, 0.0808603515625, 0.08004988861083985, 0.0799832000732422, 0.07991574096679688, 0.08015990447998046, 0.08121916961669921, 0.0804115219116211, 0.08000723266601563, 0.08019967651367188, 0.07969980621337891, 0.08025513458251952, 0.08522137451171875, 0.07962214660644532, 0.07949027252197266, 0.07962588500976563, 0.07973907470703125, 0.08052681732177734, 0.08009561920166015, 0.08035276794433593, 0.08011756896972656, 0.07986688232421875, 0.0800153579711914, 0.08025497436523438, 0.07976271820068359, 0.0801714859008789, 0.08062083435058594, 0.08011273956298828, 0.08036262512207032, 0.07970630645751953, 0.08090016174316406, 0.07984732818603515, 0.08925001525878906, 0.08403123474121094, 0.08268809509277343, 0.08058729553222656, 0.08085708618164063, 0.08099625396728516, 0.08140605163574219, 0.08341709136962891, 0.08206358337402343, 0.0820531234741211, 0.08247078704833985, 0.08178678131103516, 0.08155545806884766, 0.08189542388916016, 0.08191340637207031, 0.0817034912109375, 0.08209417724609375, 0.08292124938964844, 0.08782335662841798, 0.08225827026367187, 0.08208652496337891, 0.08189724731445312, 0.08265910339355469, 0.0841138916015625, 0.08208739471435547, 0.08181407928466797, 0.08188499450683594, 0.08170307159423829, 0.0816329574584961, 0.08126080322265625, 0.08162902069091797, 0.08146761322021484, 0.08210431671142578, 0.081723388671875, 0.08196300506591797, 0.08266521453857421, 0.08200217437744141, 0.0824238052368164, 0.08205072021484375, 0.08245110321044923, 0.08205075073242188, 0.08206553649902344, 0.08223117065429687, 0.08200601959228515, 0.08268185424804687, 0.08221900939941407, 0.08277606201171875, 0.08257257843017578, 0.08244316864013672, 0.08337798309326172, 0.08305055999755859, 0.08219641876220703, 0.08233984375, 0.08256204986572266, 0.08228352355957032, 0.08218605041503907, 0.08221510314941406, 0.08238489532470702, 0.08241766357421874, 0.0823306884765625, 0.08237583923339843, 0.0821368637084961, 0.08249549102783203, 0.08238880157470703, 0.08214752197265625, 0.08214060974121094, 0.08212742614746094, 0.08238079833984376, 0.08294195556640625, 0.08265523529052735, 0.08270816040039063, 0.08238102722167968, 0.0833845443725586, 0.0827615966796875, 0.08217737579345703, 0.08214527893066406, 0.08216876983642578, 0.08293529510498047, 0.08220317077636718, 0.08248636627197266, 0.08253683471679688, 0.0825792007446289, 0.08312425231933594, 0.08237100982666015, 0.08227635192871094, 0.0825765151977539, 0.08218303680419922, 0.08232681274414062, 0.08222105407714844, 0.08212566375732422, 0.08208985900878907, 0.08234719848632813, 0.08232832336425781, 0.08244140625, 0.08287430572509766, 0.08290582275390625, 0.08238511657714843, 0.08246659088134765, 0.08198681640625, 0.08221775817871094, 0.0821559066772461, 0.08211027526855469, 0.0819093475341797, 0.0828583984375, 0.08193401336669921, 0.08194080352783203, 0.08212384033203125, 0.08218041229248046, 0.08201251220703125, 0.08192912292480468, 0.0821710433959961, 0.08218428802490234, 0.08242777252197266, 0.08231820678710937, 0.08239718627929687, 0.08317040252685547, 0.0824881591796875, 0.08229689788818359, 0.08234803009033204, 0.08296688079833985, 0.08665392303466797, 0.0852261734008789, 0.0826429443359375, 0.0820531234741211, 0.08319795227050782, 0.08226742553710938, 0.0823732452392578, 0.0847790756225586, 0.0835645751953125, 0.0829683837890625, 0.08241340637207031, 0.08233932495117187, 0.08293666839599609, 0.08208403015136718, 0.08219795227050782, 0.08211235046386718, 0.0821990737915039, 0.08589900970458984, 0.08293606567382812, 0.08262246704101563, 0.08262397003173828, 0.08276432037353515, 0.08283135986328125, 0.08298700714111328, 0.08247296142578125, 0.08249897766113282, 0.08220527648925781, 0.08215049743652343, 0.0821072998046875, 0.08446361541748047, 0.0819252166748047, 0.08225990295410156, 0.08187184143066406, 0.0822408676147461, 0.08219209289550782, 0.08224044799804688, 0.08229853057861328, 0.08233843231201173, 0.08207945251464843, 0.08235008239746094, 0.08221478271484375, 0.08215974426269532, 0.08225587463378906, 0.08255228424072265, 0.08217449951171875, 0.08222045135498048, 0.08246460723876953, 0.08230374145507813, 0.08250777435302735, 0.08252620697021484, 0.0822430419921875, 0.08240592193603516, 0.08299091339111328, 0.0823416976928711, 0.08243846130371094, 0.08301136016845703, 0.08264002990722656, 0.08227315521240235, 0.08230659484863281, 0.082368896484375, 0.08267343902587891, 0.08216127777099609, 0.08218278503417968, 0.08184153747558594, 0.08207020568847656, 0.08189475250244141, 0.08225965118408203, 0.081955810546875, 0.08303001403808594, 0.08266143798828125, 0.08231852722167969, 0.08197376251220703, 0.08248550415039063, 0.08218402862548828, 0.08455593872070312, 0.09028025817871094, 0.08196243286132812, 0.08208322906494141, 0.08224034881591796, 0.08207727813720703, 0.08197695922851563, 0.08241798400878907, 0.08221574401855469, 0.08205276489257812, 0.08227737426757813, 0.08188416290283203, 0.08181868743896484, 0.08197420501708984, 0.08177664184570313, 0.08194249725341797, 0.08443087768554687, 0.08196240234375, 0.08186121368408203, 0.08238694763183593, 0.08208294677734375, 0.08193113708496094, 0.08236637115478515, 0.0820811538696289, 0.08203282928466797, 0.08198607635498047, 0.08186470031738281, 0.08196272277832031, 0.08185884857177735, 0.08270438385009765, 0.08204601287841796, 0.08261660766601563, 0.0819178237915039, 0.08199452972412109, 0.08235008239746094, 0.08203446197509766, 0.08185174560546875, 0.08182048034667969, 0.08176032257080078, 0.08194876861572266, 0.08185667419433594, 0.0822706527709961, 0.08250093078613281, 0.08284233856201172, 0.08273916625976563, 0.0842260513305664, 0.087552001953125, 0.0825692138671875, 0.0821463394165039, 0.08381053161621094, 0.08200883483886719, 0.08173363494873047, 0.08309865570068359, 0.08213398742675782, 0.08333721923828125, 0.08205484771728516, 0.082032958984375, 0.08216480255126953, 0.08276678466796875, 0.08226306915283203, 0.08218211364746093, 0.08211558532714844, 0.08293341064453125, 0.08363657379150391, 0.08233273315429687, 0.08217459106445313, 0.08254492950439453, 0.08361702728271485, 0.08280944061279297, 0.08350064086914062, 0.08242556762695312, 0.082206787109375, 0.08253043365478516, 0.08219308471679687, 0.08214937591552735, 0.08200396728515626, 0.08211891174316406, 0.08194156646728516, 0.08252896118164063, 0.08287830352783203, 0.08212496185302734, 0.08246886444091797, 0.08219852447509765, 0.08196540832519532, 0.08196819305419922, 0.08188976287841797, 0.08198381042480468, 0.081802978515625, 0.08216531372070313, 0.0818570556640625, 0.08188851165771484, 0.08257926177978515, 0.08205558776855469, 0.08176191711425781, 0.08222191619873047, 0.08204032135009766, 0.08235475158691406, 0.08192588806152344, 0.08205542755126953, 0.08224982452392578, 0.08217107391357421, 0.08386032104492187, 0.08216162872314453, 0.08188713836669922]",tokens/s,12.301444715725708,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,15393.005568,9514.647552,0.0,9112.12544,9086.72256,s,1,33.0484375,33.0484375,0.0,33.0484375,33.0484375,33.0484375,33.0484375,[33.0484375],,kWh,0.000757331854037496,8.353208276449442e-05,0.0002461160302260068,0.0010869799670279972,,MB,4051.206144,9690.80832,0.0,9265.217536,9236.043264,s,10,1.2591574554443359,0.1259157455444336,0.0005874979846937586,0.12572275161743163,0.12688801651000975,0.1270107921600342,0.12710901268005373,"[0.12686073303222656, 0.1260794219970703, 0.12538195037841796, 0.1271335678100586, 0.12536153411865233, 0.1256412124633789, 0.12553263854980468, 0.12580429077148436, 0.12544457244873047, 0.1259175338745117]",tokens/s,2033.1055412737226,kWh,3.685687829322906e-06,4.064418089054211e-07,2.4405227857498407e-06,6.532652423978167e-06,tokens/kWh,39187757.64961096,MB,4055.457792,9707.585536,0.0,9281.994752,9177.325568,s,10,79.10445068359375,7.910445068359375,0.013057307954110342,7.914423095703125,7.922350244140625,7.9261565673828125,7.929201625976563,"[7.8845009765625, 7.929962890625, 7.9138154296875, 7.92150439453125, 7.920720703125, 7.89619140625, 7.899861328125, 7.91749267578125, 7.91503076171875, 7.9053701171875]",tokens/s,7.9641536545131695,kWh,0.00022662251716817566,2.4997557406988484e-05,0.00010292314483845051,0.0003545432194136147,tokens/kWh,177693.42791041615,,s,630,79.10193534851074,0.12555862753731864,0.0010657015214558115,0.12532328033447265,0.12663866271972654,0.1275750358581543,0.1288457716369629,"[0.12932188415527343, 0.12679283142089845, 0.12533984375, 0.12610387420654298, 0.1259540786743164, 0.12538668823242188, 0.12592374420166016, 0.1258694076538086, 0.12555318450927735, 0.1250940170288086, 0.12506857299804688, 0.1248038101196289, 0.12486450958251953, 0.12490140533447265, 0.12501631927490234, 0.12481097412109375, 0.12453020477294922, 0.1247168960571289, 0.12658751678466798, 0.1254787826538086, 0.1258249282836914, 0.12515318298339845, 0.12491423797607422, 0.1248592987060547, 0.12484284973144531, 0.12459212493896485, 0.12525885009765625, 0.12475689697265625, 0.1253232650756836, 0.12511357116699218, 0.12494518280029297, 0.12459008026123047, 0.12469574737548828, 0.12448960113525391, 0.12459228515625, 0.12442499542236328, 0.12423782348632813, 0.12434162902832031, 0.1244590072631836, 0.12489766693115234, 0.1249384994506836, 0.12459756469726563, 0.12478534698486328, 0.1252085723876953, 0.12530278778076173, 0.12469971466064453, 0.1251685791015625, 0.12428697967529297, 0.12457743835449218, 0.12446755218505859, 0.12446720123291016, 0.12500390625, 0.12483570861816407, 0.1246777572631836, 0.1249775390625, 0.12445629119873047, 0.12588492584228517, 0.12450422668457031, 0.12427263641357422, 0.12792626953125, 0.12682035064697267, 0.124731201171875, 0.12523065948486328, 0.12519612884521483, 0.1246709442138672, 0.1280696258544922, 0.1252157745361328, 0.1250313949584961, 0.12798566436767578, 0.12533055877685548, 0.12521875, 0.12490233612060547, 0.12510969543457032, 0.12489990234375, 0.12501197052001953, 0.12583081817626954, 0.1253645782470703, 0.12557462310791015, 0.12516400146484374, 0.12498108673095704, 0.12514530944824218, 0.12502220916748047, 0.12646595001220703, 0.1260401611328125, 0.12521881866455079, 0.1250218276977539, 0.12497344207763672, 0.12498291015625, 0.12588668823242188, 0.12534524536132813, 0.1258722915649414, 0.12629865264892579, 0.12561545562744142, 0.1268180160522461, 0.12595906829833983, 0.12599708557128905, 0.12610355377197266, 0.12850784301757812, 0.1266973419189453, 0.12692704010009764, 0.1259699172973633, 0.12620230102539062, 0.12540326690673828, 0.1255688018798828, 0.1256388168334961, 0.12783001708984376, 0.12600115203857423, 0.12708850860595702, 0.12621427154541015, 0.1281078643798828, 0.12648719787597656, 0.12626432037353516, 0.12563760375976563, 0.1252696304321289, 0.1280188446044922, 0.12555059051513673, 0.12586598205566407, 0.12583526611328125, 0.12537651062011718, 0.12558364868164062, 0.12532911682128905, 0.12636978912353516, 0.12522700500488282, 0.12556873321533202, 0.1252635498046875, 0.12559420776367186, 0.12571727752685546, 0.12550758361816405, 0.12456864166259765, 0.12503679656982422, 0.12461692810058594, 0.12476464080810547, 0.12527616119384766, 0.12465151977539063, 0.12527212524414064, 0.12625270080566406, 0.12493443298339844, 0.12734259033203124, 0.12572876739501954, 0.125085693359375, 0.1260260467529297, 0.12527378845214843, 0.12549462127685546, 0.13673846435546874, 0.12487894439697265, 0.1264178237915039, 0.12615193939208985, 0.12638899230957032, 0.12539494323730468, 0.12479007720947266, 0.12473939514160157, 0.124621826171875, 0.12459814453125, 0.12438313293457032, 0.12491136169433593, 0.12425456237792969, 0.1261069793701172, 0.1251129608154297, 0.12499507141113281, 0.12444265747070313, 0.12502819061279297, 0.1248240966796875, 0.12515750122070313, 0.12440576171875, 0.12500172424316405, 0.12468428802490235, 0.126455810546875, 0.12521881866455079, 0.12456092834472657, 0.12455484771728516, 0.12456845092773437, 0.12455731201171875, 0.1254625244140625, 0.12538880157470703, 0.12610678100585937, 0.12510704040527343, 0.12677222442626954, 0.12592845153808593, 0.12624479675292968, 0.12572882843017577, 0.12567552185058595, 0.12612198638916017, 0.12701491546630858, 0.1269507827758789, 0.12611837005615234, 0.12604985809326172, 0.12667673492431641, 0.12608393859863282, 0.12661126708984374, 0.12714598083496093, 0.12653469085693358, 0.12595008087158202, 0.12605270385742187, 0.12608972930908202, 0.1254645767211914, 0.12599910736083986, 0.12677254486083983, 0.12665731048583984, 0.12624813079833985, 0.1264475555419922, 0.12856585693359374, 0.1270273895263672, 0.1265492477416992, 0.12561074829101562, 0.1257919692993164, 0.12594818878173827, 0.1256036834716797, 0.12567529296875, 0.12577830505371093, 0.12533123016357423, 0.1261305923461914, 0.12543775939941407, 0.12601567840576172, 0.12771308898925782, 0.1255198745727539, 0.12782406616210937, 0.12575955200195313, 0.12524671936035156, 0.12549692535400392, 0.12486569976806641, 0.1250444793701172, 0.1260748825073242, 0.12730777740478516, 0.12538015747070314, 0.12463142395019532, 0.1267733154296875, 0.12463689422607421, 0.12459446716308593, 0.12460873413085938, 0.12542953491210937, 0.1279073944091797, 0.12495916748046874, 0.12518195343017577, 0.12507299041748046, 0.12509868621826173, 0.12452457427978515, 0.12460944366455078, 0.12553689575195312, 0.12539510345458985, 0.1255895004272461, 0.12514508819580078, 0.12514713287353516, 0.12531302642822265, 0.1249486083984375, 0.12505235290527345, 0.12525599670410156, 0.126115966796875, 0.12517581176757814, 0.1250766067504883, 0.12458223724365235, 0.1250923843383789, 0.1247448959350586, 0.12536160278320313, 0.12803744506835937, 0.1252962875366211, 0.1254461135864258, 0.1256021728515625, 0.12504473876953126, 0.12493344116210937, 0.12552671813964844, 0.12545843505859375, 0.12601744079589844, 0.12537184143066407, 0.12561885070800782, 0.1251939163208008, 0.12537273406982422, 0.12510617828369142, 0.12562022399902345, 0.12601548767089843, 0.126553955078125, 0.12566524505615234, 0.1257023391723633, 0.12520448303222656, 0.1256300811767578, 0.12563446044921875, 0.12611804962158202, 0.12531129455566406, 0.12621004486083984, 0.12593766021728517, 0.12614246368408202, 0.12568576049804686, 0.12594790649414062, 0.12548300933837891, 0.12542566680908204, 0.1252638702392578, 0.12605232238769531, 0.12588854217529297, 0.12673638153076172, 0.1281815948486328, 0.1262475814819336, 0.12553421020507813, 0.12512665557861327, 0.12582707214355468, 0.12539494323730468, 0.12856930541992187, 0.12582233428955078, 0.1248563232421875, 0.12555455780029298, 0.1248509750366211, 0.1246904296875, 0.12619347381591797, 0.12660076904296874, 0.1257492141723633, 0.12521129608154297, 0.12854066467285155, 0.12517302703857422, 0.1248939208984375, 0.12498738861083984, 0.12460816192626953, 0.12519664001464845, 0.12525363159179687, 0.12635340881347656, 0.1250672607421875, 0.12612403106689454, 0.12425395202636719, 0.12424467468261718, 0.12618326568603516, 0.1255302734375, 0.12519574737548828, 0.12507804870605468, 0.12445315551757813, 0.12443004608154297, 0.12477993774414063, 0.12448204803466797, 0.1251229782104492, 0.12521440124511718, 0.12529401397705078, 0.12481388854980469, 0.12484198760986329, 0.1248358383178711, 0.124727294921875, 0.12512393951416015, 0.12714256286621095, 0.12563990020751953, 0.12554934692382813, 0.12523056030273438, 0.12497360229492188, 0.12472115325927734, 0.12492790222167968, 0.12515744018554686, 0.12498947143554688, 0.12489727783203125, 0.12558335876464843, 0.1254228515625, 0.1254544677734375, 0.12477327728271484, 0.1249966049194336, 0.12486918640136718, 0.1308712615966797, 0.12517804718017578, 0.12527206420898437, 0.12528844451904297, 0.12533046722412108, 0.12494649505615234, 0.12475222778320312, 0.12483436584472657, 0.12553529357910156, 0.12517472076416017, 0.12663807678222655, 0.12579430389404297, 0.12565708923339844, 0.12456345367431641, 0.12495257568359375, 0.12485606384277344, 0.125261474609375, 0.124844482421875, 0.12531523132324218, 0.1248563232421875, 0.1251409912109375, 0.12541718292236329, 0.12512284851074218, 0.12640460968017578, 0.1260330276489258, 0.12710128021240236, 0.12594435119628905, 0.12578342437744142, 0.12550342559814454, 0.12487446594238281, 0.12712550354003907, 0.12572259521484375, 0.1259070053100586, 0.12565090942382812, 0.1289586944580078, 0.12649654388427733, 0.12551577758789062, 0.1253397445678711, 0.12493199920654297, 0.1252158432006836, 0.12491254425048828, 0.1255313949584961, 0.12560870361328125, 0.12601344299316405, 0.1250785903930664, 0.12489619445800781, 0.12483897399902344, 0.1262314910888672, 0.1259520034790039, 0.1255198745727539, 0.12535603332519532, 0.12490576171875, 0.12506492614746093, 0.12504064178466798, 0.12489727783203125, 0.12523929595947267, 0.12500991821289062, 0.12593766021728517, 0.1250847396850586, 0.12574610900878908, 0.12529657745361328, 0.1252470703125, 0.12586441802978515, 0.12582630157470703, 0.1259948501586914, 0.1260195846557617, 0.12553308868408203, 0.1254840316772461, 0.12546559906005858, 0.12555264282226564, 0.125447265625, 0.12474380493164063, 0.12572064208984374, 0.12516566467285156, 0.12507609558105467, 0.124801025390625, 0.12504064178466798, 0.12452851104736327, 0.12474790191650391, 0.1245633316040039, 0.12550678253173828, 0.12491804504394531, 0.12487321472167968, 0.12439769744873047, 0.1249423370361328, 0.12479283142089843, 0.12451631927490234, 0.12443055725097656, 0.12553517150878907, 0.12528729248046874, 0.1266503677368164, 0.12460198211669922, 0.12529702758789063, 0.12460137939453125, 0.12454780578613281, 0.12514659118652344, 0.1253977279663086, 0.12550348663330077, 0.12426403045654297, 0.12482998657226563, 0.12493782043457032, 0.1251164779663086, 0.12774566650390626, 0.1265407028198242, 0.1268507843017578, 0.12626553344726563, 0.1255810546875, 0.1254768981933594, 0.12608748626708985, 0.13063523864746093, 0.12526032257080078, 0.12594771575927735, 0.12565542602539062, 0.12725433349609375, 0.12484812927246093, 0.12513219451904298, 0.12487232208251953, 0.1271162567138672, 0.12532736206054687, 0.12558534240722657, 0.1253458557128906, 0.12490547180175782, 0.12473260498046874, 0.12840428161621092, 0.12523693084716797, 0.1257004165649414, 0.12597657775878907, 0.12610128021240236, 0.12536217498779298, 0.12534806060791015, 0.12486243438720702, 0.1249233627319336, 0.12468624114990234, 0.12502902221679688, 0.12473270416259766, 0.12562505340576172, 0.12514002990722656, 0.1253651809692383, 0.12507955169677734, 0.12519792175292968, 0.12490486145019532, 0.12787983703613282, 0.12543830108642579, 0.1265459213256836, 0.12502243041992187, 0.12559292602539063, 0.12466220855712891, 0.12465561676025391, 0.12502783966064454, 0.12529503631591796, 0.12766828918457032, 0.12723200225830078, 0.12635340881347656, 0.12692205047607422, 0.12485088348388672, 0.12491958618164062, 0.12526815795898438, 0.12500563049316407, 0.12482355499267578, 0.1250365447998047, 0.12568370819091798, 0.1250672607421875, 0.12611500549316407, 0.12460224151611328, 0.12506553649902344, 0.12532329559326172, 0.1252317123413086, 0.1251184616088867, 0.12532310485839843, 0.12476636505126953, 0.12465353393554687, 0.12456134033203126, 0.1246693115234375, 0.12486115264892578, 0.12608512115478515, 0.12579634857177735, 0.12526592254638672, 0.124583740234375, 0.12470496368408203, 0.12603785705566406, 0.12466191864013672, 0.13195263671875, 0.1261968307495117, 0.12658985900878905, 0.12666060638427734, 0.1254686737060547, 0.12510002899169922, 0.1265971221923828, 0.1261854705810547, 0.12599501037597657, 0.1260083236694336, 0.12567977905273436, 0.12824252319335938, 0.12503858947753907, 0.12528230285644532, 0.12484198760986329, 0.12463420867919922, 0.12493920135498048, 0.12659709167480468, 0.1255789794921875, 0.1259247360229492, 0.12468316650390625, 0.12512620544433595, 0.12446896362304688, 0.125251708984375, 0.12563241577148437, 0.12573519897460939, 0.1259708480834961, 0.12588236999511718, 0.12544204711914062, 0.12580249786376954, 0.12538265228271483, 0.12575539398193358, 0.12540332794189454, 0.12642211151123048, 0.12790652465820312, 0.12633293151855468, 0.1259315185546875, 0.1258082275390625, 0.12841574096679687, 0.12609718322753907, 0.12643862152099608, 0.12619468688964844, 0.12578729248046874, 0.12552496337890626, 0.12568358612060546, 0.12583062744140625, 0.1274610595703125, 0.12524015808105468, 0.12669664001464845, 0.12832237243652345, 0.12643475341796875, 0.1255591049194336, 0.12490573120117188, 0.12473747253417969, 0.12468582153320312, 0.12456556701660157, 0.12450457763671875, 0.13041571044921876, 0.12627152252197266, 0.12494898986816406, 0.12454131317138672, 0.12416822052001954, 0.1248460464477539, 0.12517581176757814, 0.12587007904052736, 0.12563251495361327, 0.12546630096435546, 0.12478495788574219, 0.12475801849365234, 0.12445078277587891, 0.12440988922119141, 0.12463465881347656, 0.12462332916259766, 0.1259521942138672, 0.12524729919433594, 0.12480716705322266, 0.12446514892578125, 0.12544774627685548, 0.12639891052246094, 0.12664393615722655, 0.12612841796875, 0.12608278656005859, 0.12550972747802736, 0.12449593353271485, 0.12433830261230469, 0.1278935012817383, 0.12501510620117187, 0.1254757766723633, 0.1250888977050781, 0.12516172790527344, 0.12846144104003906, 0.12511641693115233, 0.12470713806152343, 0.12416169738769531, 0.12430748748779297, 0.12412457275390625, 0.12480572509765625, 0.12413897705078125, 0.12462655639648437, 0.12425507354736329, 0.12419078063964843]",tokens/s,7.964406903880652,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.13 GiB is free. Process 99004 has 13.61 GiB memory in use. Of the allocated memory 13.36 GiB is allocated by PyTorch, and 136.95 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,1099.497472,634.322944,0.0,239.075328,216.531968,s,1,8.0439541015625,8.0439541015625,0.0,8.0439541015625,8.0439541015625,8.0439541015625,8.0439541015625,[8.0439541015625],,kWh,3.223055435832786e-05,3.5478752692028186e-06,1.1524175886002547e-05,4.730260551353322e-05,,MB,1245.77792,720.306176,0.0,304.08704,261.878272,s,10,0.39191001510620116,0.03919100151062012,0.0002534118295146534,0.03913985633850098,0.03950299415588379,0.039584153938293454,0.03964908176422119,"[0.03966531372070312, 0.038930496215820315, 0.03888412857055664, 0.0389372787475586, 0.039473918914794924, 0.039484958648681644, 0.03917206573486328, 0.03906051254272461, 0.03910764694213867, 0.039193695068359374]",tokens/s,6532.111712700891,kWh,1.1507683333825479e-06,1.2690596006270026e-07,5.114396042519743e-07,1.7891138976972225e-06,tokens/kWh,143087592.31567028,MB,1279.717376,732.889088,0.0,316.669952,261.880832,s,10,23.84888134765625,2.3848881347656254,0.007355478053398507,2.3853553466796873,2.393446435546875,2.39452265625,2.3953836328125,"[2.389367919921875, 2.38419921875, 2.382704833984375, 2.38542041015625, 2.385290283203125, 2.379917724609375, 2.3673994140625, 2.395598876953125, 2.385775390625, 2.393207275390625]",tokens/s,26.416333362399538,kWh,7.005794998745129e-05,7.726870229722669e-06,2.666772102794837e-05,0.00010445254124512234,tokens/kWh,603144.7320382159,,s,630,23.843350658416735,0.03784658834669325,0.0006985298675227379,0.03780875205993652,0.03830564613342285,0.03857856121063232,0.03927424137115479,"[0.037569313049316405, 0.03796480178833008, 0.03774262237548828, 0.03781760025024414, 0.03812931060791016, 0.03788832092285156, 0.03765542221069336, 0.0375711669921875, 0.03786576080322265, 0.03745792007446289, 0.037572608947753904, 0.03774185562133789, 0.037722976684570315, 0.037728126525878904, 0.038363391876220704, 0.03798400115966797, 0.037617664337158206, 0.03752329635620117, 0.03761174392700195, 0.03756025695800781, 0.03746358489990234, 0.03739644622802735, 0.037491199493408206, 0.037369857788085936, 0.03793920135498047, 0.038158336639404294, 0.03844710540771484, 0.03828668975830078, 0.03822572708129883, 0.03808095932006836, 0.0392380485534668, 0.0381110725402832, 0.0380272331237793, 0.03818751907348633, 0.03843859100341797, 0.03819420623779297, 0.03848291015625, 0.03910166549682617, 0.038433567047119144, 0.038141952514648435, 0.03815254211425781, 0.03822956848144531, 0.03816236877441406, 0.03813308715820313, 0.03801520156860352, 0.03789065551757813, 0.03767257690429687, 0.03780441665649414, 0.03817398452758789, 0.03784473419189453, 0.03773126220703125, 0.03780160140991211, 0.037666336059570316, 0.03755097579956055, 0.03774054336547852, 0.03773440170288086, 0.03784473419189453, 0.03803366470336914, 0.037586399078369144, 0.03740339279174805, 0.03767075347900391, 0.03791251373291016, 0.03776716613769531, 0.03773440170288086, 0.03766070556640625, 0.038439041137695314, 0.0378901138305664, 0.03770710372924805, 0.03740428924560547, 0.037458751678466795, 0.03743334579467773, 0.03773440170288086, 0.037709217071533206, 0.037524063110351565, 0.03748233413696289, 0.03847516632080078, 0.03820006561279297, 0.037887966156005856, 0.037807487487792966, 0.037999263763427736, 0.03771619033813477, 0.03761446380615235, 0.03801590347290039, 0.037733505249023434, 0.037781822204589845, 0.037902782440185544, 0.03807654571533203, 0.0373043212890625, 0.03712393569946289, 0.03720518493652344, 0.037784191131591795, 0.03777881622314453, 0.03852105712890625, 0.03868659210205078, 0.03793395233154297, 0.03801497650146484, 0.03797129440307617, 0.03810969543457031, 0.037955680847167966, 0.03812768173217773, 0.037953792572021486, 0.03797139358520508, 0.03792070388793945, 0.037911102294921874, 0.038074176788330076, 0.03799244689941406, 0.038080352783203125, 0.037797279357910156, 0.038263553619384764, 0.03808201599121094, 0.038023712158203125, 0.03794755172729492, 0.03798204803466797, 0.03788800048828125, 0.0378996810913086, 0.03783676910400391, 0.03883440017700195, 0.03760780715942383, 0.0375880012512207, 0.03780710220336914, 0.03738623809814453, 0.03712944030761719, 0.03721894454956055, 0.03754512023925781, 0.03741593551635742, 0.03760537719726562, 0.03666873550415039, 0.0368798713684082, 0.03693049621582031, 0.03717695999145508, 0.03726492691040039, 0.03762377548217773, 0.0377393913269043, 0.037824001312255856, 0.039024673461914065, 0.03786127853393555, 0.03766944122314453, 0.038196704864501954, 0.03799692916870117, 0.03770998382568359, 0.03744099044799805, 0.03759337615966797, 0.03759110260009765, 0.037568702697753906, 0.03748044967651367, 0.03750713729858399, 0.03713836669921875, 0.03720601654052735, 0.03735334396362305, 0.03720118331909179, 0.037235553741455076, 0.03715033721923828, 0.03706508636474609, 0.03724492645263672, 0.03727990341186523, 0.037169055938720705, 0.03754905700683594, 0.03779654312133789, 0.03790873718261719, 0.03777526473999023, 0.037857376098632815, 0.0384266242980957, 0.038088638305664065, 0.037779102325439455, 0.03783030319213867, 0.03771292877197266, 0.03766278457641602, 0.037878623962402345, 0.0381038703918457, 0.0379576301574707, 0.038084606170654296, 0.0384266242980957, 0.03799836730957031, 0.03821590423583984, 0.03842838287353516, 0.038287487030029294, 0.038303905487060544, 0.03815836715698242, 0.038111198425292966, 0.0380682258605957, 0.03786576080322265, 0.038096126556396485, 0.03822844696044922, 0.03911913681030273, 0.039479007720947264, 0.03835007858276367, 0.03845391845703125, 0.03843695831298828, 0.03791030502319336, 0.037262016296386716, 0.03729571151733398, 0.03725990295410156, 0.03706243133544922, 0.03702524948120117, 0.03759183883666992, 0.03735340881347656, 0.03772601699829101, 0.038194911956787106, 0.03763792037963867, 0.0375376968383789, 0.037508033752441404, 0.037187232971191406, 0.03723468780517578, 0.03720102310180664, 0.03746432113647461, 0.03755072021484375, 0.037157920837402346, 0.037450721740722656, 0.03794473648071289, 0.03802140808105469, 0.038013023376464845, 0.03741465759277344, 0.03785929489135742, 0.037399166107177736, 0.03735334396362305, 0.03729747009277344, 0.037165889739990236, 0.03762694549560547, 0.03710047912597656, 0.037199550628662106, 0.03785542297363281, 0.04685027313232422, 0.03798115158081055, 0.03782844924926758, 0.03733135986328125, 0.0370972785949707, 0.03719443130493164, 0.037074142456054685, 0.03731727981567383, 0.037776607513427735, 0.03760838317871094, 0.037491039276123045, 0.037404510498046876, 0.0374554557800293, 0.037658592224121094, 0.04119136047363281, 0.03814348983764648, 0.03836604690551758, 0.038209312438964846, 0.037967777252197264, 0.038332672119140626, 0.03796192169189453, 0.038168449401855466, 0.03794527816772461, 0.03789823913574219, 0.03808467102050781, 0.038016094207763675, 0.03839478302001953, 0.03860604858398437, 0.03817478561401367, 0.03794150543212891, 0.03841686248779297, 0.03785318374633789, 0.038102081298828125, 0.03818796920776367, 0.03860262298583984, 0.03804921722412109, 0.03784569549560547, 0.03781631851196289, 0.037738494873046875, 0.038219615936279296, 0.03800915145874023, 0.038147937774658205, 0.03802521514892578, 0.038226272583007814, 0.03756403350830078, 0.03759942245483398, 0.037453662872314455, 0.037371967315673826, 0.03737510299682617, 0.03728047943115234, 0.03708844757080078, 0.037917312622070314, 0.03788623809814453, 0.03806617736816406, 0.03798780822753906, 0.037656318664550784, 0.037445823669433595, 0.03747011184692383, 0.03747910308837891, 0.03743532943725586, 0.03756579208374024, 0.03737238311767578, 0.037423358917236325, 0.03760771179199219, 0.03751500701904297, 0.03797155380249023, 0.038395454406738284, 0.038125953674316405, 0.037970367431640624, 0.03787776184082031, 0.03775513458251953, 0.03771804809570312, 0.03756412887573242, 0.03757567977905273, 0.03748147201538086, 0.03741900634765625, 0.03773545455932617, 0.03912803268432617, 0.037781600952148435, 0.03786108779907227, 0.03928902435302734, 0.038199295043945314, 0.037674625396728514, 0.0378392333984375, 0.037914592742919924, 0.03766483306884766, 0.037713886260986325, 0.03771596908569336, 0.03791791915893555, 0.03811811065673828, 0.03813702392578125, 0.03813999938964844, 0.037997344970703124, 0.038594558715820314, 0.03785558319091797, 0.03832796859741211, 0.037904735565185546, 0.03781001663208008, 0.037859745025634765, 0.03780278396606445, 0.03774303817749024, 0.03796355056762695, 0.0379598388671875, 0.03823593521118164, 0.037964481353759766, 0.0379024658203125, 0.038045665740966794, 0.03779126358032227, 0.03766697692871094, 0.037474655151367185, 0.03755353546142578, 0.03742319869995117, 0.037288734436035156, 0.03711155319213867, 0.0385986557006836, 0.03859225463867187, 0.03777500915527344, 0.03756092834472656, 0.03791571044921875, 0.03792575836181641, 0.03791468811035156, 0.037865150451660154, 0.0376036491394043, 0.03742710494995117, 0.03725260925292969, 0.03748518371582031, 0.03725104141235352, 0.03719987106323242, 0.037253120422363284, 0.03773820877075195, 0.037775646209716796, 0.03896470260620117, 0.038806049346923825, 0.03835027313232422, 0.0378842544555664, 0.037740768432617186, 0.03775088119506836, 0.03734444808959961, 0.03744611358642578, 0.03728355026245117, 0.0373458251953125, 0.03757670211791992, 0.0375109748840332, 0.03766828918457031, 0.03766502380371094, 0.03818307113647461, 0.03798255920410156, 0.038152286529541016, 0.03797318267822265, 0.03770032119750977, 0.037576801300048826, 0.03755596923828125, 0.03754819107055664, 0.03754393768310547, 0.037410560607910155, 0.03760380935668945, 0.0379815673828125, 0.03749529647827148, 0.03801702499389648, 0.038027263641357424, 0.037959232330322265, 0.03801747131347656, 0.03801520156860352, 0.037907806396484375, 0.038012863159179684, 0.037911041259765625, 0.03785334396362305, 0.037875553131103516, 0.03798015975952149, 0.037822463989257815, 0.03808287811279297, 0.038561824798583985, 0.03828211212158203, 0.038361503601074216, 0.037969566345214846, 0.037800926208496094, 0.03796758270263672, 0.03784236907958984, 0.03731747055053711, 0.03730377578735351, 0.03746384048461914, 0.0379683837890625, 0.03772934341430664, 0.03775379180908203, 0.03770070266723633, 0.03728067016601563, 0.036956512451171875, 0.03694966506958008, 0.036859264373779295, 0.036921630859375, 0.037042720794677735, 0.037136478424072264, 0.03709497451782227, 0.03703231811523437, 0.03722608184814453, 0.037826625823974606, 0.03761939239501953, 0.037745185852050785, 0.03816233444213867, 0.037806079864501956, 0.03786137771606445, 0.037117279052734375, 0.037276256561279295, 0.03736070251464844, 0.03723929595947266, 0.037227008819580076, 0.037203582763671875, 0.03715110397338867, 0.03744128036499023, 0.03769772720336914, 0.03774675369262695, 0.03759545516967774, 0.03745561599731445, 0.03732851028442383, 0.03727596664428711, 0.0372305908203125, 0.036939777374267575, 0.03694182586669922, 0.037072158813476565, 0.03706752014160156, 0.03768998336791992, 0.03760652923583984, 0.040088161468505856, 0.03803801727294922, 0.03824614334106445, 0.03824367904663086, 0.03812745666503906, 0.03787295913696289, 0.03784259033203125, 0.03786547088623047, 0.03807436752319336, 0.037943294525146484, 0.03804300689697265, 0.03816435241699219, 0.038392833709716793, 0.03832131195068359, 0.03842108917236328, 0.03806982421875, 0.03790892791748047, 0.0380211181640625, 0.03801804733276367, 0.03795251083374023, 0.03800064086914062, 0.03828940963745117, 0.037971935272216796, 0.03796556854248047, 0.038107425689697265, 0.038100990295410156, 0.03781232070922851, 0.03761769485473633, 0.037906017303466794, 0.038271488189697264, 0.03791439819335937, 0.03732880020141602, 0.03745801544189453, 0.03744908905029297, 0.037744319915771485, 0.037725120544433596, 0.03769772720336914, 0.03790150451660156, 0.03808489608764649, 0.03818547058105469, 0.03773596954345703, 0.03786892700195312, 0.03775174331665039, 0.037739681243896483, 0.037472190856933596, 0.03745868682861328, 0.03731705474853515, 0.0371360969543457, 0.03725289535522461, 0.037402847290039065, 0.037506145477294923, 0.03758278274536133, 0.03745481491088867, 0.04710403060913086, 0.03776480102539063, 0.03859632110595703, 0.03775116729736328, 0.03743670272827149, 0.03751731109619141, 0.037352031707763675, 0.03736608123779297, 0.03722963333129883, 0.03777030563354492, 0.03796652984619141, 0.037515262603759765, 0.03754537582397461, 0.03775296020507812, 0.03806256103515625, 0.03768550491333008, 0.03761331176757812, 0.03766387176513672, 0.03748953628540039, 0.037689342498779296, 0.03752364730834961, 0.037934913635253906, 0.03793913650512695, 0.03810924911499024, 0.038125568389892575, 0.03806208038330078, 0.038071327209472657, 0.03786646270751953, 0.03882393646240234, 0.03818905639648437, 0.03798425674438476, 0.038201343536376955, 0.037950817108154296, 0.037934814453125, 0.03793638229370117, 0.038700736999511716, 0.03810300827026367, 0.038133792877197266, 0.03790444946289063, 0.038653377532958985, 0.038025726318359376, 0.038098270416259766, 0.0387242546081543, 0.03779545593261719, 0.038082847595214846, 0.03745536041259766, 0.03734527969360352, 0.03746892929077148, 0.037189472198486326, 0.0371992301940918, 0.037351646423339845, 0.03745238494873047, 0.0374736328125, 0.03773855972290039, 0.03803734588623047, 0.03803376007080078, 0.037539039611816406, 0.03754703903198242, 0.037684799194335934, 0.03767705535888672, 0.03759775924682617, 0.0375334701538086, 0.03920832061767578, 0.03832617568969727, 0.037727294921875, 0.03796752166748047, 0.03777347183227539, 0.03781391906738281, 0.03772848129272461, 0.03745775985717773, 0.038059425354003903, 0.03687628936767578, 0.037185535430908204, 0.03719168090820312, 0.03734732818603516, 0.03781801605224609, 0.03781391906738281, 0.038795936584472654, 0.03787104034423828, 0.037851905822753905, 0.038694465637207034, 0.03839353561401367, 0.03769334411621094, 0.03774233627319336, 0.037704414367675784, 0.0374233283996582, 0.037392383575439454, 0.037697662353515626, 0.03780412673950195, 0.03792668914794922, 0.03819724655151367, 0.03834463882446289, 0.037829822540283206, 0.037862239837646486, 0.03807353591918945, 0.0379422721862793, 0.03787964630126953, 0.03794694519042969, 0.03791686248779297, 0.03846169662475586, 0.039175167083740234, 0.038214942932128904, 0.038163551330566405, 0.038158592224121095, 0.03801331329345703, 0.0379832649230957, 0.03840534210205078, 0.03819734573364258, 0.0381416015625, 0.038010879516601564, 0.03801833724975586, 0.03803414535522461, 0.0379576301574707, 0.038133758544921875, 0.04145971298217774, 0.038113121032714845, 0.039220703125, 0.039076030731201174, 0.03800848007202148, 0.03755510330200195, 0.03751283264160156, 0.03772208023071289, 0.03759369659423828, 0.037502815246582034, 0.037287841796875, 0.03733411026000977, 0.037392734527587894, 0.037485118865966796, 0.037864479064941406, 0.03779888153076172, 0.037908191680908206, 0.03762204742431641, 0.03773440170288086, 0.0381684799194336]",tokens/s,26.422460879155373,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,,MB,2011.9552,1264.451584,0.0,861.929472,840.411136,s,1,9.3494306640625,9.3494306640625,0.0,9.3494306640625,9.3494306640625,9.3494306640625,9.3494306640625,[9.3494306640625],,kWh,6.170553121249515e-05,6.79923829137657e-06,1.96644601759971e-05,8.816922967986882e-05,,MB,2074.693632,1449.00096,0.0,1025.507328,1002.229248,s,10,0.7805734939575195,0.07805734939575196,0.0010008850600849797,0.07776943969726563,0.07965878143310547,0.07998767776489259,0.08025079483032227,"[0.07798127746582031, 0.07784796905517578, 0.07788953399658204, 0.079585693359375, 0.07763011169433594, 0.0773153305053711, 0.0803165740966797, 0.07724492645263673, 0.0770711669921875, 0.07769091033935546]",tokens/s,3279.639931175168,kWh,2.2888282714517296e-06,2.5241812031742606e-07,9.707342835310996e-07,3.5119806753002553e-06,tokens/kWh,72893339.5905185,MB,2083.90144,1501.42976,0.0,1077.936128,1002.231808,s,10,47.06882666015625,4.706882666015625,0.02760380380984115,4.710118896484374,4.743515576171875,4.746589331054687,4.749048334960937,"[4.74283251953125, 4.7496630859375, 4.72247412109375, 4.71050927734375, 4.68658447265625, 4.709728515625, 4.66871728515625, 4.6735283203125, 4.67778369140625, 4.72700537109375]",tokens/s,13.384654870381437,kWh,0.00013807231269813175,1.5229289378252043e-05,4.8925750078069e-05,0.0002022273521544528,tokens/kWh,311530.55869456887,,s,630,47.06561630249025,0.0747073274642702,0.0010312168847922596,0.07456313705444337,0.075347891998291,0.07632484321594238,0.07970539199829102,"[0.07480384063720703, 0.075659423828125, 0.07625055694580078, 0.07526412963867188, 0.07467072296142578, 0.07466358184814453, 0.0753372802734375, 0.07467810821533204, 0.0747213134765625, 0.07513760375976562, 0.07482505798339843, 0.08001721954345703, 0.07489826965332032, 0.07509798431396485, 0.0747416000366211, 0.07463760375976562, 0.0750755844116211, 0.07505919647216797, 0.07467212677001953, 0.07523737335205079, 0.0750877456665039, 0.07510355377197266, 0.07625811004638672, 0.07487385559082031, 0.07499382019042969, 0.07993968200683593, 0.07460646057128906, 0.07465254211425781, 0.07478662109375, 0.07476377868652344, 0.0746584930419922, 0.07462671661376953, 0.07621871948242187, 0.07481139373779297, 0.07460620880126953, 0.07495088195800781, 0.07457807922363281, 0.07456326293945313, 0.07735718536376954, 0.07702751922607422, 0.07485968017578125, 0.07487693023681641, 0.07488540649414062, 0.0757458267211914, 0.07481542205810547, 0.07456777954101562, 0.07479219055175781, 0.07446189117431641, 0.07469785308837891, 0.07447216033935547, 0.07441222381591797, 0.07410867309570313, 0.0794401626586914, 0.07470432281494141, 0.07478534698486328, 0.0747540512084961, 0.07493373107910156, 0.0754582748413086, 0.07837983703613281, 0.0755484161376953, 0.07448953247070313, 0.07488774108886719, 0.07452848052978515, 0.07442265319824219, 0.0745533447265625, 0.0795077133178711, 0.07472921752929687, 0.07494009399414063, 0.0782333755493164, 0.07453135681152344, 0.07510377502441407, 0.07459625244140625, 0.0746215057373047, 0.07476019287109376, 0.07514857482910156, 0.0774755859375, 0.07507247924804687, 0.07518822479248047, 0.07498137664794922, 0.07994300842285157, 0.0746432342529297, 0.07515670776367188, 0.07510790252685547, 0.07472547149658203, 0.07545974731445312, 0.07483888244628906, 0.07513884735107422, 0.07461500549316406, 0.07474086761474609, 0.07460950469970704, 0.07506947326660156, 0.07449171447753906, 0.07968787384033203, 0.07498287963867188, 0.07507766723632812, 0.07498393249511719, 0.0748075180053711, 0.07498470306396485, 0.07493276977539062, 0.075136962890625, 0.0747376937866211, 0.07482479858398437, 0.07489017486572265, 0.07450959777832031, 0.07484284973144531, 0.07468208312988281, 0.0797125473022461, 0.07473961639404297, 0.07494239807128907, 0.07474601745605469, 0.0744816665649414, 0.07532733154296875, 0.07480665588378907, 0.07459305572509765, 0.07692848205566406, 0.07560038757324218, 0.07460594940185547, 0.07512064361572265, 0.07471171569824218, 0.07478681945800782, 0.07985971069335937, 0.07588614654541016, 0.07496873474121094, 0.07477283477783203, 0.0745230712890625, 0.07475794982910157, 0.07419449615478516, 0.07528902435302734, 0.0744361572265625, 0.07446572875976562, 0.07453695678710938, 0.07476223754882813, 0.07606476593017578, 0.07560415649414062, 0.07886009979248047, 0.07498374176025391, 0.07543571472167969, 0.07534304046630859, 0.07489164733886719, 0.07459645080566406, 0.07468048095703125, 0.07465593719482422, 0.07460457611083984, 0.0751390380859375, 0.07435059356689454, 0.07453414154052734, 0.07469888305664063, 0.07485874938964844, 0.07489715576171875, 0.07457965087890625, 0.07502483367919922, 0.07444735717773437, 0.07478681945800782, 0.07584928131103516, 0.07467565155029297, 0.07460684967041016, 0.07456159973144531, 0.0746236801147461, 0.07434793853759766, 0.07455804443359375, 0.07484435272216797, 0.0749422378540039, 0.07511759948730469, 0.07611670684814453, 0.07482144165039062, 0.07448828887939453, 0.07460819244384766, 0.07468409729003907, 0.07458688354492188, 0.07471222686767579, 0.0744840316772461, 0.07450406646728516, 0.07868482971191407, 0.07512226867675781, 0.07543583679199219, 0.07444963073730469, 0.07444601440429688, 0.07520559692382812, 0.07452604675292969, 0.07463565063476563, 0.07459430694580078, 0.07449171447753906, 0.07442787170410156, 0.07449254608154297, 0.07444198608398438, 0.07533859252929688, 0.07473561859130859, 0.07494246673583985, 0.0763180160522461, 0.074949951171875, 0.07450444793701172, 0.07447993469238282, 0.0746824951171875, 0.07447478485107421, 0.07524950408935546, 0.07464435577392578, 0.07465164947509766, 0.07512191772460937, 0.07523788452148437, 0.0767224349975586, 0.07507762908935547, 0.07477565002441407, 0.0746628189086914, 0.07450342559814453, 0.07454796600341797, 0.07458329772949218, 0.07478963470458984, 0.07506739044189453, 0.07459385681152343, 0.07456102752685546, 0.0751502685546875, 0.0749928970336914, 0.07459241485595704, 0.07496054077148437, 0.07512924957275391, 0.07459075164794922, 0.07524111938476563, 0.07454959869384765, 0.07449772644042969, 0.07471536254882813, 0.07468418884277343, 0.07577426910400391, 0.07469779205322266, 0.07469971466064453, 0.07448576354980468, 0.07479193878173829, 0.07456374359130859, 0.07457622528076172, 0.07517644500732422, 0.07476374053955079, 0.07475007629394531, 0.07445085144042969, 0.07415039825439453, 0.07490080261230468, 0.07445999908447265, 0.07432585906982422, 0.07436825561523437, 0.07448982238769532, 0.07448451232910157, 0.07447142028808594, 0.07479484558105469, 0.07533366394042969, 0.07639360046386719, 0.07477865600585938, 0.07441506958007812, 0.07440774536132813, 0.07680223846435547, 0.07403858947753907, 0.07379542541503906, 0.07400534057617188, 0.07373123168945313, 0.07433296203613281, 0.07408844757080078, 0.07454025268554687, 0.07436367797851562, 0.07728105926513672, 0.07416649627685547, 0.07395516967773437, 0.0740906524658203, 0.07407174682617187, 0.07405804443359375, 0.07393894195556641, 0.07393427276611328, 0.07405625915527343, 0.07595212554931641, 0.07447142028808594, 0.07456768035888672, 0.07410073852539062, 0.07468646240234375, 0.0745697250366211, 0.07444889831542968, 0.08208589172363281, 0.07384678649902343, 0.07397945404052735, 0.07393075561523438, 0.07417900848388671, 0.0740126724243164, 0.07421337890625, 0.07374368286132812, 0.07441248321533203, 0.07426278686523438, 0.07419904327392578, 0.07463321685791016, 0.07415142059326171, 0.07524403381347657, 0.073838623046875, 0.074082275390625, 0.07375849914550782, 0.07400879669189453, 0.07410912322998046, 0.07403823852539063, 0.07418303680419921, 0.07395120239257813, 0.07413811492919922, 0.07402896118164062, 0.0744442596435547, 0.07457590484619141, 0.07444131469726563, 0.07452671813964844, 0.07412870025634766, 0.07407891082763672, 0.07375667572021484, 0.07406182098388672, 0.07394003295898438, 0.07431993865966798, 0.07431871795654296, 0.07435263824462891, 0.07382585906982422, 0.07467667388916016, 0.07399833679199219, 0.07400041961669922, 0.07452054595947266, 0.07387955474853515, 0.07391436767578125, 0.07412531280517579, 0.07475628662109375, 0.07489510345458984, 0.07468297576904297, 0.07483596801757812, 0.07675289916992188, 0.07719222259521484, 0.07611420440673829, 0.07500460815429688, 0.07507718658447265, 0.07460294342041016, 0.07487283325195312, 0.07479910278320312, 0.07485794830322266, 0.07469136047363281, 0.07465049743652344, 0.07458255767822265, 0.07469206237792969, 0.07699136352539063, 0.07502236938476563, 0.07476841735839844, 0.07475523376464843, 0.0746659164428711, 0.07491180419921875, 0.07501026916503906, 0.07539155578613281, 0.07468374633789063, 0.07487907409667968, 0.07471366119384766, 0.07499366760253906, 0.07465916442871094, 0.07486940765380859, 0.07456301116943359, 0.0752663345336914, 0.074961181640625, 0.07454621124267578, 0.07447420501708985, 0.07513104248046874, 0.07444054412841797, 0.07486239624023437, 0.07452281951904297, 0.07435494232177735, 0.07436287689208984, 0.07434588623046876, 0.07447801971435547, 0.07438540649414062, 0.07442208099365234, 0.07428950500488281, 0.07428096008300782, 0.07404051208496094, 0.0741240997314453, 0.07540716552734375, 0.07393472290039063, 0.07389421081542968, 0.07451014709472656, 0.07478473663330078, 0.07394121551513672, 0.07384419250488282, 0.07467996978759765, 0.07371250915527344, 0.0753070068359375, 0.07391027069091796, 0.07385088348388671, 0.07439564514160156, 0.07402816009521485, 0.07402540588378906, 0.07378141021728515, 0.07373033905029297, 0.07393283081054687, 0.07390972900390624, 0.07396812438964843, 0.07375580596923828, 0.07426486206054687, 0.07412793731689453, 0.07397171020507813, 0.07366655731201172, 0.07388569641113281, 0.07476172637939453, 0.0744487075805664, 0.07415618896484374, 0.07385142517089843, 0.07384473419189454, 0.07587430572509765, 0.07428656005859376, 0.0741536636352539, 0.07354659271240234, 0.07472969818115234, 0.07388531494140625, 0.073916259765625, 0.07370310211181641, 0.0739621124267578, 0.07392460632324219, 0.074461181640625, 0.0741416015625, 0.07404918670654297, 0.074219970703125, 0.07404544067382812, 0.07422771453857421, 0.07410435485839843, 0.07399881744384766, 0.07569203186035156, 0.07401254272460937, 0.07371186828613281, 0.07506633758544921, 0.07407504272460938, 0.07383197021484375, 0.0740902099609375, 0.07379535675048828, 0.07428195190429687, 0.07380992126464844, 0.07361331176757813, 0.07419267272949219, 0.07403558349609375, 0.07517167663574219, 0.07380899047851562, 0.07385330963134766, 0.07394921875, 0.074050048828125, 0.07346988677978515, 0.07383395385742188, 0.07411270141601563, 0.07452703857421875, 0.07413545227050782, 0.07419779205322266, 0.07408767700195312, 0.07375059509277344, 0.07390169525146484, 0.0739532470703125, 0.07386608123779297, 0.07363391876220703, 0.07366223907470704, 0.0737335662841797, 0.07380384063720703, 0.07398678588867187, 0.07376627349853515, 0.0737470703125, 0.07380738830566407, 0.07392918395996094, 0.07749836730957031, 0.07434444427490235, 0.074102783203125, 0.07378943634033203, 0.07366854095458984, 0.07374562835693359, 0.07357270050048828, 0.07360972595214844, 0.07367052459716797, 0.07361901092529297, 0.07387789154052735, 0.07357644653320312, 0.07629209899902344, 0.08128121948242187, 0.07390003204345703, 0.07398767852783203, 0.07378985595703125, 0.07594393920898437, 0.07387545776367188, 0.07385651397705079, 0.07391897583007813, 0.07372509002685547, 0.07379996490478516, 0.07404144287109375, 0.07365411376953125, 0.07393859100341797, 0.07440688323974609, 0.07372083282470702, 0.07515238189697265, 0.0742007064819336, 0.07416255950927735, 0.07381401824951171, 0.07385529327392579, 0.07401849365234375, 0.07394652557373046, 0.07413951873779297, 0.07406870269775391, 0.07408844757080078, 0.07396147155761719, 0.0741396484375, 0.07414073944091797, 0.07417536163330078, 0.07400371551513672, 0.07428179168701173, 0.07377481842041016, 0.07394143676757813, 0.0737155532836914, 0.07402265930175782, 0.07396089935302734, 0.07411772918701172, 0.0743938217163086, 0.07402496337890625, 0.07382099151611328, 0.07399014282226563, 0.07528857421875, 0.07453443145751953, 0.07422796630859375, 0.07503024291992187, 0.07501465606689453, 0.07405094146728515, 0.07389373016357421, 0.0738414077758789, 0.07383776092529297, 0.07419785308837891, 0.0739205093383789, 0.0745902099609375, 0.07382457733154296, 0.07422943878173828, 0.07374163055419922, 0.07410121917724609, 0.07395760345458985, 0.07382588958740234, 0.07417488098144531, 0.07368252563476563, 0.07400080108642579, 0.07353654479980469, 0.07510115051269531, 0.07781785583496094, 0.07424614715576172, 0.07391426849365235, 0.07433634948730469, 0.07432803344726563, 0.07456924438476563, 0.0741091537475586, 0.07402703857421875, 0.07385318756103515, 0.07417382049560547, 0.07393548583984375, 0.07388774108886718, 0.07378739166259765, 0.0743050537109375, 0.0738636474609375, 0.07445913696289062, 0.07396351623535156, 0.07434844970703125, 0.07453091430664062, 0.07439385223388671, 0.07633042907714843, 0.07431609344482422, 0.073744384765625, 0.07389718627929688, 0.07360591888427734, 0.07379488372802734, 0.07436768341064454, 0.07430963134765625, 0.0742476806640625, 0.0740869140625, 0.07381763458251953, 0.07454768371582031, 0.07399235534667968, 0.07427875518798828, 0.0740598373413086, 0.07421331024169922, 0.07401881408691406, 0.07455744171142578, 0.07446627044677734, 0.0750405731201172, 0.0742973403930664, 0.07548518371582032, 0.07456358337402344, 0.07441817474365234, 0.07478272247314453, 0.07471440124511719, 0.07508656311035156, 0.07492403411865234, 0.07495430755615234, 0.07445958709716796, 0.0746185302734375, 0.07490185546875, 0.07490764617919922, 0.07460639953613281, 0.07500630187988282, 0.07623049926757812, 0.07460454559326171, 0.07502851104736329, 0.07456329345703125, 0.07455696105957031, 0.07451312255859376, 0.07481084442138672, 0.07451497650146484, 0.07457759857177734, 0.07489568328857422, 0.074497314453125, 0.07439842987060546, 0.0747315216064453, 0.07716390228271484, 0.07695219421386719, 0.07463279724121094, 0.07468688201904297, 0.07492537689208985, 0.07474361419677734, 0.0746864013671875, 0.0747754898071289, 0.07446063995361328, 0.07471571350097657, 0.074627197265625, 0.07458755493164063, 0.07446681976318359, 0.0758773422241211, 0.07479055786132813, 0.07547331237792969, 0.07463724517822265, 0.07477772521972656, 0.07486752319335938, 0.07482937622070313, 0.07493398284912109, 0.07467257690429688, 0.07968595123291015, 0.07522303771972656, 0.07523069000244141, 0.07527302551269531, 0.07533948516845704, 0.07624089813232422, 0.07642934417724609, 0.07496620941162109, 0.07495484924316406, 0.07487097930908203, 0.0750268783569336]",tokens/s,13.385567841096494,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,13838.872576,7509.77024,0.0,7107.248128,7106.945536,s,1,32.07680078125,32.07680078125,0.0,32.07680078125,32.07680078125,32.07680078125,32.07680078125,[32.07680078125],,kWh,0.0007154708155291663,7.891462448283676e-05,0.00023269435282199935,0.0010270797928340024,,MB,1330.241536,7786.594304,0.0,7363.100672,7335.826944,s,10,1.212924072265625,0.1212924072265625,0.0013188175320680451,0.1207828483581543,0.12244108581542969,0.12354857635498047,0.1244345687866211,"[0.12157955169677734, 0.12465606689453125, 0.12056559753417968, 0.1207103042602539, 0.12185433959960937, 0.12219497680664063, 0.12085539245605469, 0.12023808288574218, 0.12004454040527343, 0.1202252197265625]",tokens/s,2110.6020224482536,kWh,3.5274412060240855e-06,3.8878459107020796e-07,2.3277695998313293e-06,6.243995396925624e-06,tokens/kWh,40999389.609743714,MB,1367.359488,7788.691456,0.0,7363.100672,7289.69216,s,10,76.04806494140625,7.604806494140625,0.028152564197289585,7.598107177734375,7.6443898925781255,7.645182006835937,7.645815698242187,"[7.6442138671875, 7.6386416015625, 7.59535791015625, 7.64597412109375, 7.58462939453125, 7.6116826171875, 7.56838330078125, 7.6008564453125, 7.5662783203125, 7.59204736328125]",tokens/s,8.284234457318597,kWh,0.00022513234183564078,2.483340494619208e-05,9.775194935276835e-05,0.00034771769613460124,tokens/kWh,181181.460421309,,s,630,76.04523002624512,0.1207067143273732,0.0012658323084796075,0.12042904281616211,0.12197489624023437,0.12336477127075195,0.12522071617126465,"[0.12117526245117187, 0.12376303863525391, 0.12193599700927735, 0.1219052505493164, 0.12207513427734375, 0.12063334655761719, 0.12052889251708984, 0.12080928039550781, 0.1234167709350586, 0.12126191711425781, 0.12117826843261718, 0.12126207733154297, 0.12148121643066406, 0.12126761627197266, 0.12047216033935547, 0.1206393585205078, 0.12011942291259765, 0.11994911956787109, 0.12000412750244141, 0.12079888153076172, 0.12022476959228516, 0.12074518585205078, 0.12005251312255859, 0.12023193359375, 0.11993702697753907, 0.1202339859008789, 0.1203916778564453, 0.12026470184326171, 0.12056166076660156, 0.120659423828125, 0.1210660171508789, 0.1205186538696289, 0.12020326232910156, 0.12037481689453125, 0.12440828704833984, 0.12060671997070313, 0.12030976104736328, 0.12024626922607422, 0.12106752014160156, 0.12019414520263672, 0.12062716674804687, 0.12026694488525391, 0.12233805084228516, 0.1207193603515625, 0.12009677124023438, 0.12053218841552735, 0.12392527770996094, 0.1225707550048828, 0.12393244934082032, 0.1228637466430664, 0.12495471954345704, 0.124640380859375, 0.12395609283447266, 0.122238525390625, 0.12173971557617187, 0.12225721740722656, 0.12112710571289062, 0.12060380554199218, 0.1209229736328125, 0.12105522918701171, 0.12095487976074219, 0.12230860900878907, 0.12234114837646484, 0.12424806213378906, 0.12330121612548828, 0.12194812774658204, 0.12187318420410156, 0.12282822418212891, 0.12137110137939452, 0.12160409545898437, 0.12176383972167969, 0.12161804962158203, 0.12111055755615234, 0.1212702407836914, 0.12073612976074219, 0.12212739562988281, 0.12139209747314453, 0.12062105560302734, 0.12157542419433594, 0.12142591857910157, 0.12086067199707032, 0.12056486511230469, 0.12063977813720703, 0.12250943756103516, 0.12456598663330078, 0.12315795135498046, 0.12162310028076172, 0.12033776092529297, 0.12069267272949219, 0.12287238311767579, 0.12095094299316406, 0.12163590240478515, 0.12088416290283203, 0.12027903747558594, 0.12059648132324219, 0.12064323425292969, 0.12072943878173828, 0.1203114242553711, 0.12086332702636719, 0.12044627380371094, 0.12468502044677734, 0.12063257598876953, 0.12078591918945313, 0.12131702423095703, 0.12110063934326172, 0.12050841522216797, 0.12020121765136718, 0.1205002212524414, 0.12043209838867187, 0.12003382110595703, 0.12070829010009766, 0.12014435577392578, 0.12068227386474609, 0.12050284576416016, 0.12003327941894532, 0.12003513336181641, 0.1246005096435547, 0.12036486053466797, 0.11999456024169922, 0.12135363006591797, 0.11988140869140625, 0.12037417602539062, 0.12007014465332032, 0.12237824249267579, 0.1206702117919922, 0.12040335845947266, 0.12065837097167968, 0.12050959777832031, 0.12125475311279296, 0.12141567993164062, 0.1212252197265625, 0.12185740661621093, 0.12129491424560547, 0.12081005096435547, 0.12134102630615234, 0.12089641571044922, 0.12055510711669921, 0.12128707122802734, 0.12033948516845704, 0.120427490234375, 0.12026985931396485, 0.1199933090209961, 0.1203527069091797, 0.12030777740478515, 0.12038349151611329, 0.12160409545898437, 0.12036505889892578, 0.12062924957275391, 0.11997798156738282, 0.12053298950195312, 0.11985212707519531, 0.12024105834960938, 0.12077875518798828, 0.12058646392822266, 0.11983206176757813, 0.12012480163574218, 0.120017822265625, 0.12086265563964843, 0.11998783874511719, 0.12055731201171875, 0.12015481567382813, 0.12044290924072265, 0.12026582336425781, 0.12045811462402344, 0.12299420928955078, 0.12070345306396485, 0.1201295394897461, 0.12023788452148437, 0.12066409301757812, 0.12028739166259765, 0.12022774505615234, 0.1219748764038086, 0.12033229064941406, 0.11991219329833984, 0.11978163146972656, 0.1200926742553711, 0.12048121643066406, 0.1197409896850586, 0.12084636688232422, 0.11991855621337891, 0.11966019439697266, 0.11993682861328125, 0.12062937927246094, 0.11988214111328124, 0.1214361572265625, 0.12054528045654297, 0.12090102386474609, 0.12095862579345704, 0.12036601257324218, 0.1205164794921875, 0.1201439971923828, 0.12017254638671875, 0.12001894378662109, 0.12045881652832031, 0.12110819244384766, 0.12087779235839843, 0.12047513580322265, 0.12623104095458984, 0.12178412628173828, 0.12068803405761719, 0.1210765151977539, 0.12068431854248046, 0.12107606506347657, 0.12068851470947266, 0.12392400360107422, 0.12173088073730469, 0.1225648956298828, 0.12158505249023438, 0.12125692749023438, 0.12105078125, 0.12144265747070312, 0.12137267303466796, 0.12215074920654297, 0.12110044860839844, 0.12244918060302734, 0.12207997131347656, 0.12153036499023437, 0.1204487075805664, 0.12250758361816406, 0.12528230285644532, 0.1216569595336914, 0.1250403518676758, 0.12116445159912109, 0.12146015930175781, 0.12109062194824219, 0.12100198364257812, 0.12111257934570313, 0.12184928131103516, 0.12051900482177734, 0.12106159973144531, 0.12044083404541016, 0.1208463363647461, 0.1204035873413086, 0.12048153686523437, 0.12067241668701172, 0.1219158706665039, 0.12171878051757813, 0.12044697570800782, 0.1204142074584961, 0.12162662506103515, 0.12085862731933594, 0.1207820816040039, 0.12086093139648438, 0.12097586822509766, 0.12073369598388672, 0.12051971435546875, 0.12217443084716798, 0.12062073516845703, 0.12111698913574219, 0.1211534423828125, 0.12125596618652344, 0.12122732543945312, 0.12445696258544922, 0.12163686370849609, 0.1211123809814453, 0.12088361358642578, 0.12184758758544922, 0.12078476715087891, 0.11990847778320313, 0.119947265625, 0.11968102264404297, 0.11979718780517579, 0.11962944030761719, 0.1205462417602539, 0.11944512176513672, 0.12043500518798828, 0.11951708984375, 0.11991651153564453, 0.11986492919921875, 0.12042233276367187, 0.12037987518310547, 0.12063705444335937, 0.12018045043945312, 0.1202529296875, 0.11960099029541016, 0.12193228912353515, 0.1202708511352539, 0.11957247924804687, 0.11924070739746094, 0.12433817291259766, 0.11958662414550782, 0.11946822357177735, 0.11958067321777344, 0.11975679779052735, 0.11989961242675781, 0.12109468841552734, 0.12064358520507812, 0.12064556884765625, 0.12119859313964844, 0.12021151733398437, 0.12051660919189452, 0.1203609619140625, 0.1206204833984375, 0.12070893096923828, 0.12003609466552734, 0.12063948822021485, 0.1207801284790039, 0.12089411163330079, 0.12083814239501953, 0.11984639739990234, 0.11928390502929688, 0.1203653793334961, 0.1198053741455078, 0.1201885757446289, 0.11981484985351562, 0.11962979125976562, 0.11980569458007813, 0.12043724822998046, 0.1205063705444336, 0.12012940979003907, 0.12007030487060547, 0.1196502685546875, 0.12038349151611329, 0.12049612426757812, 0.12020531463623046, 0.12124976348876954, 0.12147100830078125, 0.12070902252197266, 0.11994441223144531, 0.1210008316040039, 0.12259542083740234, 0.12078492736816407, 0.12101209259033203, 0.12053913879394532, 0.12082380676269532, 0.12039139556884766, 0.12024765014648438, 0.12228498840332032, 0.12000870513916016, 0.11981209564208985, 0.12007628631591796, 0.11984076690673828, 0.12053084564208984, 0.12214688110351563, 0.12292713928222657, 0.1210245132446289, 0.12655615997314454, 0.12305190277099609, 0.12099136352539062, 0.12038719940185547, 0.12041516876220704, 0.12106073760986329, 0.12430188751220703, 0.11970079803466797, 0.11946463775634765, 0.12025241851806641, 0.11993920135498047, 0.12025567626953125, 0.119548095703125, 0.12348889923095703, 0.11971366119384766, 0.119910400390625, 0.11932057952880859, 0.11962166595458984, 0.11997523498535156, 0.11990828704833985, 0.11952153778076172, 0.1199939193725586, 0.12078688049316406, 0.12013053131103515, 0.11953334045410156, 0.12621641540527342, 0.11967810821533204, 0.12136547088623047, 0.12080934143066406, 0.12007628631591796, 0.12026188659667969, 0.12515408325195312, 0.12062716674804687, 0.1202135009765625, 0.11965235137939453, 0.11966873931884765, 0.11977430725097657, 0.11988265228271484, 0.11995340728759765, 0.12016835021972656, 0.11986521911621094, 0.12076441955566407, 0.12049820709228516, 0.12086067199707032, 0.12010291290283204, 0.12000192260742187, 0.11950316619873047, 0.11989842987060546, 0.11972946929931641, 0.12019987487792969, 0.11994220733642579, 0.12003997039794922, 0.12009264373779296, 0.12524793243408203, 0.12121087646484376, 0.11989606475830078, 0.12045718383789063, 0.1203194580078125, 0.11968141174316406, 0.11970374298095703, 0.12043059539794922, 0.11975199890136719, 0.11991929626464844, 0.11962726593017578, 0.12024422454833984, 0.12051712036132813, 0.12085008239746094, 0.11956979370117188, 0.1193440933227539, 0.11921612548828125, 0.11939020538330078, 0.11959705352783204, 0.11974861145019532, 0.12298239898681641, 0.11984076690673828, 0.11908096313476563, 0.11985919952392578, 0.11934515380859376, 0.11956838226318359, 0.12029952239990234, 0.12345958709716796, 0.12033023834228515, 0.12002877044677734, 0.11922211456298829, 0.11935177612304687, 0.11969248199462891, 0.11993901062011719, 0.11993392181396484, 0.11952252960205079, 0.11997376251220702, 0.12047638702392578, 0.12016044616699219, 0.11946934509277343, 0.12020390319824219, 0.1200006103515625, 0.1195287322998047, 0.11956444549560546, 0.12016902160644531, 0.11950863647460938, 0.11986774444580078, 0.11946393585205078, 0.11965030670166016, 0.11999231719970703, 0.12098076629638672, 0.12105769348144531, 0.12086617279052735, 0.1317418212890625, 0.12037923431396484, 0.12058678436279296, 0.12068819427490235, 0.12056992340087891, 0.12025497436523437, 0.12499558258056641, 0.1230192642211914, 0.12148726654052734, 0.11962786865234375, 0.1192591323852539, 0.11913568115234376, 0.11940451049804687, 0.11945830535888671, 0.1198183364868164, 0.1195428466796875, 0.1198458251953125, 0.12553215789794922, 0.1213685760498047, 0.11973836517333984, 0.1193881607055664, 0.11980595397949219, 0.12354934692382813, 0.1199373779296875, 0.12082198333740235, 0.1205163803100586, 0.12017612457275391, 0.12039628601074219, 0.12021046447753907, 0.12099068450927734, 0.12065586853027344, 0.12015155029296876, 0.12048639678955078, 0.12077785491943359, 0.12030655670166016, 0.12069026947021484, 0.12032246398925782, 0.11982003021240234, 0.121106689453125, 0.11949472045898438, 0.11956832122802734, 0.11996886444091796, 0.12086768341064454, 0.12009827423095704, 0.11957308959960937, 0.11942044830322265, 0.11916336059570312, 0.11944866943359375, 0.11950787353515625, 0.12226764678955078, 0.11941651153564453, 0.11974073791503906, 0.12122726440429688, 0.11975475311279297, 0.11961449432373047, 0.12040672302246094, 0.12162076568603515, 0.1201941146850586, 0.1206561279296875, 0.1201957778930664, 0.12093440246582031, 0.12001900482177734, 0.12051683044433593, 0.12008342742919922, 0.11970438385009766, 0.12045219421386719, 0.12160073852539062, 0.11982867431640624, 0.12051376342773437, 0.11937359619140625, 0.11944857788085937, 0.11936959838867188, 0.11974861145019532, 0.1196965789794922, 0.11964921569824219, 0.11962572479248047, 0.11955712127685547, 0.11937657928466797, 0.1194089584350586, 0.11963597106933593, 0.11939430236816406, 0.11954176330566406, 0.12024947357177734, 0.12510409545898438, 0.1211072006225586, 0.12145884704589843, 0.12046745300292969, 0.12049203491210937, 0.12138880157470704, 0.1212376937866211, 0.12107756805419922, 0.11972402954101563, 0.12009088134765625, 0.12006566619873046, 0.12031014251708984, 0.12010086059570313, 0.11979679870605468, 0.11974947357177734, 0.12020861053466797, 0.12010585784912109, 0.11954956817626954, 0.12005619049072265, 0.11994700622558593, 0.11953177642822266, 0.11968307495117188, 0.11928550720214844, 0.11969081878662109, 0.11985350036621094, 0.11952973175048828, 0.11951103973388671, 0.11982438659667968, 0.12005487823486329, 0.11965718078613281, 0.11943059539794922, 0.11948467254638671, 0.1196794204711914, 0.12014093017578124, 0.11945619201660156, 0.12023766326904296, 0.12002323150634765, 0.11956092834472656, 0.12107679748535156, 0.11955091094970703, 0.11995331573486329, 0.12193926239013672, 0.11940739440917969, 0.11953171539306641, 0.12016118621826172, 0.12046281433105469, 0.11960768127441407, 0.12154249572753906, 0.12115372467041016, 0.11987353515625, 0.11983615875244141, 0.11949919891357422, 0.11968841552734374, 0.12062294769287109, 0.1211786880493164, 0.11985919952392578, 0.11950943756103516, 0.12021097564697265, 0.12021193695068359, 0.12000665283203125, 0.11966214752197266, 0.1203218536376953, 0.12022847747802734, 0.12385279846191406, 0.12020732879638672, 0.12058383941650391, 0.11935577392578126, 0.11991798400878906, 0.11986521911621094, 0.1195692138671875, 0.1207656021118164, 0.12037606048583985, 0.11999565124511719, 0.12035750579833984, 0.12050633239746093, 0.12024848175048829, 0.11992870330810547, 0.12024966430664062, 0.12064780426025391, 0.1201568603515625, 0.12014169311523437, 0.11977471923828124, 0.1241053466796875, 0.11994092559814454, 0.12008367919921875, 0.1197100830078125, 0.1198372802734375, 0.12048560333251954, 0.1219750747680664, 0.12110438537597656, 0.1210343017578125, 0.12029337310791016, 0.1203238754272461, 0.12030429077148437, 0.12075827026367188, 0.1218878402709961, 0.12129373168945312, 0.12197020721435547, 0.12126866912841797, 0.12183494567871093, 0.1214900131225586, 0.12080499267578125, 0.1207069091796875, 0.12051010894775391, 0.12093119812011718]",tokens/s,8.284543288021762,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,4147.871744,1936.654336,0.0,1541.40672,1525.63712,s,1,12.555927734375,12.555927734375,0.0,12.555927734375,12.555927734375,12.555927734375,12.555927734375,[12.555927734375],,kWh,0.0001595009761916723,1.7586997426018007e-05,5.85617135159984e-05,0.0002356496871336887,,MB,2569.121792,2083.454976,0.0,1667.23584,1626.061824,s,10,0.7675096588134767,0.07675096588134765,0.0005286314990181236,0.07675532531738281,0.0775672966003418,0.07761641578674316,0.07765571113586425,"[0.07755638122558593, 0.07619564819335937, 0.07629379272460937, 0.07766553497314453, 0.07667078399658203, 0.07685852813720703, 0.07681414031982423, 0.07687443542480468, 0.07588390350341796, 0.0766965103149414]",tokens/s,3335.462909949049,kWh,2.2884811028970994e-06,2.523801323756678e-07,1.3445127943593495e-06,3.885374029632117e-06,tokens/kWh,65888122.494152546,MB,2573.447168,2167.341056,0.0,1751.12192,1664.864256,s,10,46.35454833984375,4.635454833984375,0.021237572042927894,4.638671142578126,4.65755693359375,4.6598412109375,4.6616686328125,"[4.66212548828125, 4.57885400390625, 4.64107568359375, 4.6329560546875, 4.63386181640625, 4.63990234375, 4.6299462890625, 4.63743994140625, 4.64133740234375, 4.65704931640625]",tokens/s,13.590899330551508,kWh,0.00013567638563377008,1.4965091889495944e-05,5.7426534222441e-05,0.00020806801174570706,tokens/kWh,302785.6106828965,,s,630,46.3505684890747,0.07357233093503922,0.0009531439625196623,0.07348726272583009,0.07419739151000977,0.0747845573425293,0.07685058700561524,"[0.07363286590576172, 0.07409756469726562, 0.0744362564086914, 0.07433209228515625, 0.07538934326171876, 0.07499919891357422, 0.0745289306640625, 0.07482441711425782, 0.07418032073974609, 0.07418870544433594, 0.07420249938964844, 0.0743571548461914, 0.07416783905029296, 0.07396227264404297, 0.07418470764160157, 0.07486463928222656, 0.07404326629638672, 0.07400905609130859, 0.07421078491210938, 0.0736135025024414, 0.07364335632324219, 0.0731798095703125, 0.07325251007080077, 0.07327574157714843, 0.07417974090576172, 0.07601004791259766, 0.07395152282714844, 0.07336495971679688, 0.07331046295166016, 0.07320604705810548, 0.07272022247314452, 0.07378345489501953, 0.07450624084472657, 0.07554975891113282, 0.07499257659912109, 0.07408579254150391, 0.07351561737060547, 0.07359270477294921, 0.07390425872802735, 0.0740126724243164, 0.07403724670410156, 0.0737647705078125, 0.0741968994140625, 0.0739186553955078, 0.07388719940185547, 0.07434041595458984, 0.07392675018310547, 0.07404544067382812, 0.07374227142333985, 0.07426227569580078, 0.07396380615234376, 0.07452304077148438, 0.07418418884277343, 0.07326966094970704, 0.07416841888427735, 0.07426048278808593, 0.0731852798461914, 0.07272857666015625, 0.07323820495605468, 0.07423622131347657, 0.07368294525146485, 0.0729393310546875, 0.07306655883789062, 0.07271746826171875, 0.07335830688476562, 0.07344537353515625, 0.07321804809570312, 0.0731852798461914, 0.0731852798461914, 0.07241506958007812, 0.07221673583984375, 0.07236608123779296, 0.07281394958496094, 0.07236006164550782, 0.07230419158935547, 0.07224211120605468, 0.07166761779785157, 0.0713927001953125, 0.07264473724365235, 0.07255072021484375, 0.07473583984375, 0.07268160247802734, 0.07275651550292969, 0.07278435516357422, 0.07289868927001954, 0.07305625915527343, 0.07281868743896484, 0.07282073974609375, 0.07301529693603516, 0.0726566390991211, 0.0725506591796875, 0.0727040023803711, 0.07286579132080079, 0.07556050872802735, 0.07286211395263673, 0.07214316558837891, 0.07196601867675781, 0.07180271911621093, 0.07176371002197265, 0.07261676788330078, 0.07259053039550781, 0.07229727935791015, 0.07170867156982422, 0.07189654541015625, 0.07184028625488281, 0.07237222290039062, 0.07224114990234375, 0.072443359375, 0.07210787200927735, 0.07278224182128906, 0.07250972747802735, 0.07258927917480469, 0.07238220977783204, 0.07290230560302734, 0.07249369812011719, 0.07252374267578125, 0.07265647888183593, 0.07304409790039063, 0.07272684478759765, 0.0730091552734375, 0.07304310607910157, 0.07286415863037109, 0.07288467407226562, 0.07320371246337891, 0.07302758026123046, 0.07314742279052734, 0.07272857666015625, 0.07278931427001953, 0.07241506958007812, 0.07219081878662109, 0.07196466827392578, 0.07222866821289063, 0.07342851257324219, 0.07308354949951172, 0.07262617492675781, 0.07269376373291016, 0.07255449676513671, 0.07333814239501953, 0.07296688079833985, 0.07559101104736328, 0.07369999694824218, 0.07328873443603516, 0.07339107513427734, 0.07309053039550781, 0.07300559997558594, 0.07305625915527343, 0.07314809417724609, 0.07325878143310546, 0.07324931335449218, 0.07400393676757812, 0.07327388763427735, 0.07337983703613281, 0.0735763168334961, 0.07405494689941407, 0.07403401947021485, 0.07396870422363282, 0.07357103729248046, 0.07399183654785156, 0.07347666931152344, 0.07604576110839843, 0.07377958679199219, 0.07350905609130859, 0.07421542358398438, 0.07330201721191407, 0.07312329864501953, 0.07303017425537109, 0.07366655731201172, 0.07864115142822266, 0.08415436553955079, 0.07364742279052734, 0.07329020690917969, 0.07335948944091797, 0.07350486755371094, 0.07385292816162109, 0.07397711944580078, 0.0733716812133789, 0.07359286499023437, 0.07325762939453125, 0.07280748748779296, 0.07296617889404297, 0.07351798248291015, 0.07389759826660157, 0.07337750244140626, 0.07322895812988281, 0.07452009582519531, 0.07337955474853515, 0.07356902313232422, 0.07339008331298828, 0.07353343963623046, 0.07342082977294923, 0.07342489624023438, 0.07695155334472656, 0.07379273223876953, 0.07364892578125, 0.07393689727783204, 0.07352729797363282, 0.07379558563232422, 0.0732938232421875, 0.07336870574951172, 0.07370022583007813, 0.07356928253173828, 0.07346892547607421, 0.073670654296875, 0.07297577667236328, 0.07353376007080079, 0.07315885162353515, 0.07308054351806641, 0.07339628601074219, 0.07283039855957031, 0.07298957061767578, 0.0731025619506836, 0.07237506866455078, 0.0727441635131836, 0.07294236755371093, 0.07471513366699219, 0.07314841461181641, 0.0736048355102539, 0.07299282836914063, 0.07292540740966796, 0.07327523040771484, 0.07334300994873047, 0.0734434585571289, 0.07329154968261718, 0.07288614654541016, 0.07367910766601563, 0.07335740661621094, 0.0735561294555664, 0.07380156707763671, 0.07639987182617188, 0.0742770233154297, 0.0734806365966797, 0.07368924713134765, 0.07410892486572265, 0.07372140502929687, 0.07384722900390625, 0.07395712280273438, 0.07317324829101562, 0.073385986328125, 0.07311974334716796, 0.07318032073974609, 0.07339644622802734, 0.07318592071533203, 0.07310336303710938, 0.07313203430175781, 0.07336131286621093, 0.07365987396240234, 0.07404771423339844, 0.07361558532714843, 0.07413330841064453, 0.07330595397949219, 0.0732267837524414, 0.07335731506347656, 0.07353392028808593, 0.07341056060791015, 0.07363174438476562, 0.07343718719482421, 0.07351423645019531, 0.07327410888671874, 0.07401612854003906, 0.07365036773681641, 0.07361376190185547, 0.07465369415283203, 0.07373619079589844, 0.07375852966308594, 0.07400876617431641, 0.07398194885253906, 0.07402700805664063, 0.07387071990966797, 0.07386518096923828, 0.07487693023681641, 0.07371958160400391, 0.0737608642578125, 0.07510018920898437, 0.07329811096191406, 0.07327391815185547, 0.07270604705810547, 0.07257087707519531, 0.07240022277832031, 0.07253059387207031, 0.07329177856445312, 0.07334912109375, 0.07274444580078125, 0.0728616943359375, 0.0729912338256836, 0.07232307434082032, 0.07251948547363281, 0.07337388610839844, 0.0736153564453125, 0.07432125091552734, 0.07364790344238281, 0.07312236785888672, 0.07343289947509765, 0.07292569732666015, 0.07576985931396485, 0.07389929962158204, 0.0736541748046875, 0.07360185241699219, 0.07345561981201172, 0.07408844757080078, 0.07387340545654297, 0.07383859252929688, 0.07398162841796875, 0.07588006591796875, 0.0740882568359375, 0.07352614593505859, 0.07344278717041015, 0.07388214111328124, 0.07403929901123046, 0.07345938873291015, 0.07262239837646485, 0.07291494750976563, 0.0723719711303711, 0.07230451202392578, 0.07275558471679687, 0.07326515197753906, 0.07347232055664063, 0.07317052459716797, 0.07319344329833985, 0.07348899078369141, 0.07371161651611328, 0.07345331573486329, 0.07324002838134766, 0.07342540740966796, 0.07329596710205079, 0.07360736083984375, 0.07365328216552734, 0.07356893157958984, 0.07330847930908203, 0.07306034851074218, 0.07335116577148437, 0.07307263946533203, 0.07320121765136718, 0.07326969909667969, 0.07365427398681641, 0.07371981048583984, 0.07352114868164063, 0.07386930847167969, 0.07362470245361329, 0.07353024291992187, 0.07378514862060546, 0.07361759948730469, 0.07400857543945312, 0.07386930847167969, 0.07413731384277343, 0.07371910095214844, 0.07327638244628906, 0.07323033905029297, 0.07337471771240234, 0.07535308837890625, 0.07372185516357421, 0.07299276733398438, 0.07303158569335938, 0.07302358245849609, 0.07372799682617187, 0.07341670227050781, 0.07338317108154296, 0.07334783935546875, 0.07330544281005859, 0.07347676849365234, 0.07311542510986328, 0.0735293426513672, 0.07365984344482422, 0.07591385650634766, 0.07370563507080079, 0.07315625762939452, 0.07384496307373047, 0.07345891571044921, 0.07628511810302735, 0.07391808319091797, 0.07362105560302734, 0.07378998565673828, 0.07396937561035156, 0.07345926666259765, 0.07433235168457031, 0.07423011016845703, 0.07394675445556641, 0.07360569763183594, 0.0736911392211914, 0.07377362823486328, 0.07383618927001953, 0.07391295623779297, 0.0738971176147461, 0.07325923156738282, 0.07295244598388671, 0.07318246459960938, 0.0730467529296875, 0.07293341064453125, 0.07340982055664062, 0.07308313751220703, 0.07349247741699219, 0.07309523010253906, 0.07319110107421875, 0.07315325164794922, 0.07337369537353515, 0.07328313446044922, 0.07356460571289063, 0.07350819396972656, 0.07349906921386719, 0.07314771270751953, 0.07299919891357422, 0.07346854400634766, 0.0730439682006836, 0.07285555267333985, 0.07292230224609375, 0.07290480041503906, 0.07315309143066406, 0.07519657897949218, 0.07401881408691406, 0.07404105377197266, 0.07354601287841797, 0.07383245086669922, 0.07370956420898438, 0.07348751831054688, 0.07385929870605469, 0.07365023803710938, 0.07372857666015625, 0.07359487915039062, 0.07417641448974609, 0.07311753845214844, 0.07279641723632813, 0.07354573059082031, 0.07366451263427734, 0.07345542144775391, 0.07298377227783204, 0.07326515197753906, 0.07323699188232421, 0.07360355377197265, 0.0772833251953125, 0.073148193359375, 0.0726876449584961, 0.07279430389404297, 0.07270601654052734, 0.07275933074951171, 0.0733406753540039, 0.07349068450927734, 0.07510015869140625, 0.07467622375488281, 0.07333846282958985, 0.07329526519775391, 0.0729706268310547, 0.07345177459716797, 0.07344287872314453, 0.0732783966064453, 0.07405084991455078, 0.07362582397460937, 0.07370502471923829, 0.07397606658935547, 0.07406668853759765, 0.07388159942626953, 0.0736193618774414, 0.07399203491210937, 0.07390847778320313, 0.07416012573242188, 0.07395308685302734, 0.07350905609130859, 0.07260105895996094, 0.07335507202148438, 0.07308771514892579, 0.07347200012207031, 0.07310131072998047, 0.07352320098876954, 0.07299481964111328, 0.07306649780273437, 0.07273395538330078, 0.07285836791992187, 0.07314828491210937, 0.07322022247314453, 0.07353679656982422, 0.07287471771240234, 0.07285478210449219, 0.07389900970458985, 0.07332838439941407, 0.07323824310302735, 0.07327568054199218, 0.07398512268066407, 0.0735282211303711, 0.07319245147705078, 0.07326822662353516, 0.07330611419677735, 0.07298252868652344, 0.07339622497558594, 0.0739405746459961, 0.07770297241210937, 0.0738289566040039, 0.07369091033935547, 0.0736032943725586, 0.07374348449707031, 0.07405455780029296, 0.0736455078125, 0.07373868560791015, 0.07373625946044922, 0.07392054748535157, 0.07358054351806641, 0.07315660858154296, 0.07368879699707032, 0.07327772521972656, 0.07551385498046875, 0.075683837890625, 0.07353548431396484, 0.07320780944824219, 0.07313203430175781, 0.07348223876953125, 0.0733655014038086, 0.07290265655517578, 0.07260569763183594, 0.07320086669921876, 0.07315916442871094, 0.07353330993652343, 0.07333491516113282, 0.07342832183837891, 0.07371052551269532, 0.07314227294921875, 0.07302143859863282, 0.07370751953125, 0.07368089294433594, 0.07367459106445312, 0.07377935791015625, 0.07438540649414062, 0.07428006744384766, 0.07400537872314453, 0.07431782531738282, 0.07392227172851562, 0.07390995025634765, 0.07448226928710938, 0.07391177368164062, 0.07409062194824219, 0.07399257659912109, 0.07378880310058594, 0.07344400024414062, 0.07318505859375, 0.07306182098388672, 0.07336630249023438, 0.07377715301513672, 0.07372793579101562, 0.07376697540283203, 0.07316070556640625, 0.07347718048095703, 0.07340697479248047, 0.0732656021118164, 0.07319529724121093, 0.07469481658935546, 0.07360723114013672, 0.07332659149169922, 0.07289830780029297, 0.07314969635009766, 0.07314125061035157, 0.07374140930175781, 0.07375555419921875, 0.07340032196044922, 0.0732221450805664, 0.0737628173828125, 0.07332157135009766, 0.07327632141113281, 0.07357014465332032, 0.07454105377197266, 0.07370563507080079, 0.07386930847167969, 0.07355526733398438, 0.0738331527709961, 0.07350873565673828, 0.07565052795410156, 0.07514988708496094, 0.07384893035888672, 0.07375798034667969, 0.07381830596923829, 0.07352374267578125, 0.07337574768066406, 0.08290713500976563, 0.07831756591796875, 0.07396351623535156, 0.07338768005371094, 0.07334127807617187, 0.07366246032714843, 0.07352835083007812, 0.07360157012939453, 0.07343148803710937, 0.0741396484375, 0.07349584197998046, 0.07312662506103515, 0.07303971099853515, 0.07331430053710937, 0.0732733154296875, 0.07362783813476563, 0.07322557067871094, 0.0735013427734375, 0.07346320343017577, 0.07385346984863281, 0.07385298919677734, 0.07446870422363282, 0.07348700714111328, 0.07393484497070313, 0.07396966552734376, 0.074061279296875, 0.07423814392089843, 0.07397411346435546, 0.07403724670410156, 0.07388931274414062, 0.07389231872558594, 0.07379055786132813, 0.07406412506103516, 0.0735666275024414, 0.07350297546386719, 0.0735600357055664, 0.07388368225097657, 0.07351254272460937, 0.07359689331054688, 0.07320006561279296, 0.073635009765625, 0.07366313934326171, 0.07350188446044922, 0.07329027557373047, 0.07324217224121093, 0.07284825897216797, 0.07331948852539062, 0.07365113830566407, 0.0766033935546875, 0.07364749145507812, 0.07342144012451172, 0.07307689666748046, 0.07324604797363281, 0.07338547515869141, 0.07388604736328125, 0.07380035400390625, 0.07372185516357421, 0.0738506851196289, 0.0740040283203125, 0.07414979553222656, 0.07420182037353516, 0.07400857543945312, 0.07376076507568359]",tokens/s,13.592066301160843,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,15358.009344,7937.589248,0.0,7535.067136,7509.663744,s,1,32.64576953125,32.64576953125,0.0,32.64576953125,32.64576953125,32.64576953125,32.64576953125,[32.64576953125],,kWh,0.0007442335754875027,8.208729817367831e-05,0.00024150713765000442,0.0010678280113111853,,MB,1919.721472,8111.652864,0.0,7688.159232,7658.851328,s,10,1.257265350341797,0.1257265350341797,0.0016323760755084422,0.1255154571533203,0.12720218048095702,0.12842002182006834,0.12939429489135743,"[0.12693154907226561, 0.12547350311279296, 0.12534835052490234, 0.12414822387695312, 0.12651465606689452, 0.1296378631591797, 0.12555741119384767, 0.12564643096923828, 0.12455379486083984, 0.12345356750488282]",tokens/s,2036.1652369597596,kWh,3.6324254768003905e-06,4.0059384396007863e-07,2.410821544567644e-06,6.443840865328114e-06,tokens/kWh,39727858.79574398,MB,1936.314368,8126.332928,0.0,7700.742144,7600.133632,s,10,78.62527490234375,7.862527490234375,0.03938629217596123,7.857503173828125,7.906354833984375,7.919159838867188,7.9294038427734375,"[7.93196484375, 7.85331005859375, 7.85054638671875, 7.8418095703125, 7.90350927734375, 7.8935791015625, 7.8616962890625, 7.86826171875, 7.84112548828125, 7.77947216796875]",tokens/s,8.01269058559718,kWh,0.0002312102870623667,2.5503622150087345e-05,0.00010191881747463125,0.0003586327266870853,tokens/kWh,175667.18068919808,,s,630,78.62301280975343,0.12479843303135463,0.0012697736911788265,0.12459076690673829,0.12616445541381838,0.12698219680786133,0.1287993815612793,"[0.12439462280273438, 0.1252258529663086, 0.126293212890625, 0.12498339080810547, 0.12431763458251953, 0.12447846221923828, 0.12415360260009765, 0.12468150329589844, 0.12438601684570312, 0.12453660583496094, 0.12490774536132812, 0.12474918365478516, 0.12412992095947266, 0.1247457275390625, 0.1241374740600586, 0.1266616973876953, 0.12455417633056641, 0.12447743988037109, 0.12485836791992187, 0.12617724609375, 0.12513292694091796, 0.1257473907470703, 0.12554108428955077, 0.12555929565429688, 0.12617164611816406, 0.12683468627929687, 0.1255014419555664, 0.12701491546630858, 0.1251256332397461, 0.12516425323486327, 0.12518019104003905, 0.125085693359375, 0.12879898071289062, 0.12502400207519532, 0.12548300933837891, 0.12597404479980467, 0.1255305938720703, 0.1257943344116211, 0.12515939331054687, 0.12566732788085938, 0.12950527954101562, 0.1258200988769531, 0.1258765411376953, 0.12835475158691406, 0.12744448089599608, 0.12877017211914063, 0.13348294067382813, 0.1261440658569336, 0.12689657592773437, 0.12542361450195313, 0.1258436508178711, 0.12651052856445313, 0.12603187561035156, 0.1256083221435547, 0.12595346832275392, 0.12583379364013672, 0.1265786895751953, 0.12720742034912108, 0.1254092788696289, 0.12756764984130858, 0.12649420928955077, 0.12690505981445313, 0.12571810913085937, 0.12499683380126952, 0.12476700592041015, 0.12428492736816406, 0.12453244781494141, 0.1249940185546875, 0.12431951904296876, 0.12396537780761718, 0.1241703338623047, 0.12444041442871094, 0.12444687652587891, 0.12535603332519532, 0.12516480255126952, 0.12444338989257812, 0.12420323181152344, 0.12453842926025391, 0.1258436813354492, 0.12502019500732423, 0.12512249755859375, 0.12492803192138671, 0.12548233795166017, 0.12462147521972657, 0.12575334167480468, 0.12431183624267578, 0.12398563385009766, 0.12488428497314454, 0.1253272933959961, 0.12454374694824219, 0.12451558685302734, 0.12380572509765625, 0.12410060882568359, 0.12387401580810548, 0.12353330993652344, 0.12412710571289062, 0.12405709075927734, 0.1237448959350586, 0.12350902557373047, 0.12351888275146485, 0.12370297241210937, 0.12354962921142579, 0.12355375671386719, 0.12345161437988281, 0.12370960235595703, 0.12357926177978516, 0.12436723327636719, 0.123808349609375, 0.12420310211181641, 0.1241568603515625, 0.1239644775390625, 0.12749823760986329, 0.12451216125488282, 0.12408329772949218, 0.1258853454589844, 0.12497100830078126, 0.12484607696533204, 0.12464284515380859, 0.1251025619506836, 0.12949609375, 0.1260014419555664, 0.12456620788574219, 0.12527340698242187, 0.12486466979980469, 0.12548473358154297, 0.12656317138671874, 0.12529049682617188, 0.12441168212890626, 0.1242391357421875, 0.12528739166259767, 0.12409228515625, 0.1240679702758789, 0.12440502166748046, 0.12693369293212892, 0.12413475036621094, 0.12554214477539063, 0.12414604949951172, 0.12459062194824219, 0.12407510375976563, 0.12691145324707032, 0.12404962921142579, 0.12469132995605468, 0.12485513305664063, 0.12483379364013672, 0.1245306854248047, 0.12473753356933594, 0.12439740753173828, 0.12430352020263671, 0.1239747543334961, 0.12444332885742188, 0.1250326690673828, 0.12452249908447266, 0.12471523284912109, 0.12481308746337891, 0.1241374740600586, 0.12395724487304688, 0.12361737823486328, 0.12878428649902343, 0.1252084503173828, 0.1252332763671875, 0.12400956726074219, 0.1245348129272461, 0.12426255798339844, 0.12374089813232422, 0.12354150390625, 0.12382585906982421, 0.124255615234375, 0.12451110076904297, 0.125112060546875, 0.12502457427978517, 0.12407603454589844, 0.12478668975830078, 0.12369510650634766, 0.12374137878417969, 0.12422637176513672, 0.1240343017578125, 0.12383414459228516, 0.12429206085205079, 0.12396502685546874, 0.1289129638671875, 0.1238239974975586, 0.12354819488525391, 0.1241747817993164, 0.12499267578125, 0.12439155578613281, 0.12547113800048829, 0.12427295684814453, 0.12428902435302734, 0.12401862335205079, 0.12764019012451172, 0.12525504302978516, 0.1242131805419922, 0.12474412536621093, 0.1244199981689453, 0.1241539535522461, 0.12381388854980468, 0.12364185333251954, 0.12723404693603516, 0.12492361450195312, 0.12438339233398438, 0.12420953369140625, 0.12428851318359375, 0.12385033416748047, 0.12471068572998047, 0.1247466278076172, 0.12389395141601563, 0.12432774353027344, 0.1239502410888672, 0.12410543823242187, 0.12382835388183594, 0.12383174133300781, 0.12344172668457032, 0.12350863647460937, 0.12364604949951172, 0.12503858947753907, 0.12428435516357422, 0.12484217834472656, 0.1241496353149414, 0.12419747161865234, 0.1235598373413086, 0.12400025939941406, 0.1246003189086914, 0.12421529388427735, 0.12448563385009766, 0.12438323211669922, 0.12401999664306641, 0.12357705688476563, 0.12339552307128906, 0.12340643310546875, 0.12339177703857422, 0.12436688232421875, 0.12496665954589843, 0.1256041564941406, 0.12436540985107422, 0.12397571563720704, 0.12351439666748047, 0.12768505859375, 0.1241677474975586, 0.12497350311279297, 0.12497305297851563, 0.12461996459960938, 0.1251398696899414, 0.12434422302246094, 0.12450611114501953, 0.12416204833984375, 0.12486985778808593, 0.12450678253173827, 0.12465561676025391, 0.1258571548461914, 0.1254039077758789, 0.12441942596435547, 0.12421186828613281, 0.12415590667724609, 0.12408201599121094, 0.12431132507324219, 0.12490175628662109, 0.12497280120849609, 0.12472566223144531, 0.12424752044677734, 0.1239658203125, 0.12397904205322266, 0.12423651123046875, 0.12506050872802735, 0.12562038421630858, 0.12486495971679687, 0.12485759735107421, 0.1245869140625, 0.12488025665283203, 0.12479126739501953, 0.12869427490234375, 0.12512252807617188, 0.12597046661376954, 0.12600665283203125, 0.12541970825195312, 0.12520825958251952, 0.1249717788696289, 0.1264680938720703, 0.1268490219116211, 0.1253416976928711, 0.12588646697998046, 0.12548300933837891, 0.1252838363647461, 0.12602371215820313, 0.12656483459472656, 0.1248358383178711, 0.12511436462402345, 0.12557107543945312, 0.12541248321533202, 0.12624082946777343, 0.12558009338378906, 0.12565074920654296, 0.12518828582763672, 0.12505731201171874, 0.12521769714355468, 0.12526265716552734, 0.12577792358398437, 0.1274931182861328, 0.1255700454711914, 0.12610364532470703, 0.12514031982421875, 0.12517433929443358, 0.12530073547363282, 0.12572262573242188, 0.12624671936035156, 0.12559910583496095, 0.12879954528808593, 0.1252737274169922, 0.12492633819580078, 0.12486450958251953, 0.12547277069091797, 0.12568313598632813, 0.1268716812133789, 0.12543869018554688, 0.1258656997680664, 0.12528435516357422, 0.127283203125, 0.12602214050292967, 0.12575481414794923, 0.12606326293945314, 0.12512796783447266, 0.12570047760009764, 0.12549126434326172, 0.1246735382080078, 0.12679859161376952, 0.12705152130126954, 0.12687769317626954, 0.12677228546142577, 0.1257493133544922, 0.12577677154541014, 0.12474575805664062, 0.12496482849121093, 0.12494438171386718, 0.12463091278076172, 0.12478607940673828, 0.1260181121826172, 0.12482189178466797, 0.12503632354736327, 0.12459420776367187, 0.1244180145263672, 0.12433612823486329, 0.12546047973632812, 0.12413046264648438, 0.12547071838378906, 0.12475218963623047, 0.12459091186523437, 0.12539260864257812, 0.12395471954345703, 0.12417689514160156, 0.12647248077392578, 0.12427616119384766, 0.12553612518310547, 0.12425459289550782, 0.12404441833496094, 0.12429811096191407, 0.1240453109741211, 0.12791161346435548, 0.1244224624633789, 0.12426854705810547, 0.12555225372314452, 0.1247031021118164, 0.12443238067626954, 0.13464335632324217, 0.12487510681152343, 0.12425750732421875, 0.12393551635742188, 0.12480464172363281, 0.12534127807617187, 0.12406896209716797, 0.12393186950683593, 0.1284019775390625, 0.12464537811279297, 0.12436998748779297, 0.12503711700439454, 0.12415805053710938, 0.12618268585205078, 0.12472831726074218, 0.1247088623046875, 0.12468943786621094, 0.12498534393310547, 0.12492390441894531, 0.12486041259765625, 0.12509798431396485, 0.12491980743408203, 0.12494166564941406, 0.12435932922363281, 0.12440576171875, 0.12476166534423828, 0.12483545684814454, 0.12420178985595703, 0.12558678436279297, 0.12454374694824219, 0.12395935821533204, 0.12395417785644532, 0.12788015747070314, 0.1250486068725586, 0.12514745330810548, 0.12448336029052734, 0.1250121307373047, 0.12530467224121095, 0.12417772674560547, 0.12428768157958985, 0.12375065612792968, 0.12433382415771485, 0.124295166015625, 0.12407017517089844, 0.12509564971923828, 0.12459161376953125, 0.12425609588623047, 0.124193603515625, 0.12411420440673829, 0.12402851104736329, 0.12380464172363281, 0.1240453109741211, 0.12458294677734374, 0.1250641632080078, 0.1245692138671875, 0.12389206695556641, 0.1244651870727539, 0.12457091522216797, 0.12471695709228516, 0.12429747009277343, 0.12569862365722656, 0.12562226867675783, 0.12449791717529297, 0.12391629028320313, 0.12398796844482422, 0.12398371124267578, 0.12903216552734376, 0.12397551727294921, 0.12532566070556642, 0.12478047943115235, 0.12416620635986328, 0.12346380615234374, 0.12356800079345703, 0.12829653930664062, 0.12466217803955078, 0.12475933074951172, 0.12616368103027345, 0.12563814544677734, 0.1249612808227539, 0.12653116607666015, 0.12503858947753907, 0.12493414306640625, 0.1247922592163086, 0.12614291381835938, 0.12634329223632812, 0.1250160675048828, 0.12474777221679688, 0.12459827423095703, 0.12487065887451172, 0.12620365142822265, 0.12528227233886718, 0.12617142486572266, 0.1253376007080078, 0.1255255661010742, 0.12516384124755858, 0.12540735626220703, 0.12508972930908202, 0.12516345977783203, 0.12803712463378905, 0.12579212951660157, 0.12566118621826172, 0.12480067443847656, 0.12463692474365234, 0.12400662231445313, 0.12416242980957032, 0.1241864013671875, 0.1245199966430664, 0.12517417907714845, 0.12460419464111328, 0.12439193725585937, 0.1240739517211914, 0.1257798080444336, 0.12467542266845703, 0.12452127838134766, 0.1245143051147461, 0.12474982452392579, 0.1239920654296875, 0.12423782348632813, 0.12394290924072265, 0.1239544677734375, 0.12458419036865234, 0.12606511688232422, 0.12478387451171875, 0.1246111068725586, 0.12379484558105469, 0.12414649963378906, 0.12358656311035156, 0.12673843383789063, 0.12396339416503906, 0.1237871322631836, 0.12467622375488281, 0.1254850540161133, 0.12590914916992188, 0.12513014221191407, 0.12464371490478515, 0.12401609802246094, 0.12438588714599609, 0.12504678344726564, 0.1246553955078125, 0.12470909118652344, 0.12427887725830078, 0.12471900939941406, 0.12407596588134766, 0.12372515106201172, 0.12672476959228515, 0.124261474609375, 0.12484931182861328, 0.12512025451660155, 0.12447948455810547, 0.12410047912597656, 0.12388159942626953, 0.12838313293457032, 0.12490854644775391, 0.12447830200195313, 0.12421126556396485, 0.12493817901611329, 0.12561603546142577, 0.12753724670410158, 0.12523897552490235, 0.12500764465332032, 0.1242888641357422, 0.12460102081298828, 0.12467609405517578, 0.12485564422607422, 0.12516121673583985, 0.12412793731689453, 0.124087646484375, 0.12412403106689453, 0.12444806671142578, 0.1240091552734375, 0.12450777435302735, 0.12452082824707031, 0.12471616363525391, 0.12418342590332031, 0.12427468872070313, 0.1242603530883789, 0.12437094116210938, 0.12398595428466797, 0.12411888122558594, 0.12397151947021484, 0.12476640319824218, 0.12376051330566407, 0.12384268951416015, 0.12368281555175781, 0.12379312133789062, 0.12387356567382812, 0.1246648941040039, 0.12442105865478516, 0.12479430389404297, 0.12364585876464844, 0.1287090606689453, 0.12434620666503907, 0.12411433410644532, 0.12438400268554688, 0.12360944366455077, 0.12381759643554688, 0.12386124420166016, 0.1236800308227539, 0.12345622253417969, 0.12352470397949218, 0.12345350646972657, 0.12380355072021484, 0.1235437774658203, 0.12363184356689454, 0.12357177734375, 0.12339775848388672, 0.1226828155517578, 0.12259795379638672, 0.12263423919677735, 0.12305958557128906, 0.12247923278808594, 0.12295372772216796, 0.12349235534667968, 0.12274483489990234, 0.12275507354736329, 0.12285721588134765, 0.12329910278320312, 0.12333744049072265, 0.12280857849121093, 0.12362934112548828, 0.12363388824462891, 0.12342899322509765, 0.12237532806396484, 0.12318998718261719, 0.12694220733642578, 0.12373881530761718, 0.12330441284179687, 0.12338480377197265, 0.12377587127685546, 0.12324166107177735, 0.12574777221679687, 0.1258519058227539, 0.1230470428466797, 0.12406259155273437, 0.12301494598388672, 0.12266518402099609, 0.1241940460205078, 0.12868888854980468, 0.12319334411621094, 0.12307017517089844, 0.12236214447021485, 0.12342704010009765, 0.1227600326538086, 0.122631103515625, 0.1230763168334961, 0.12614275360107421, 0.12334867095947266, 0.12259974670410156, 0.12333261108398437, 0.12344512176513672, 0.12360102081298828, 0.122453857421875, 0.12435472106933594, 0.12334073638916015, 0.12353132629394531, 0.12327731323242187, 0.1229332504272461, 0.12389785766601563, 0.12335427093505859, 0.12351475524902343, 0.1230182113647461, 0.1233612823486328, 0.12310118103027344, 0.12326496124267577, 0.12370336151123047, 0.12355788421630859, 0.12385414123535156, 0.12341932678222656, 0.12271001434326172]",tokens/s,8.012921121764068,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,3864.723456,2152.660992,0.0,1757.413376,1736.37632,s,1,12.5585712890625,12.5585712890625,0.0,12.5585712890625,12.5585712890625,12.5585712890625,12.5585712890625,[12.5585712890625],,kWh,0.000157039415020832,1.731543629477273e-05,5.904226945598057e-05,0.0002333971207715853,,MB,3873.869824,2389.639168,0.0,1973.420032,1922.784256,s,10,0.635660041809082,0.06356600418090821,0.0005778853990760676,0.06343187141418458,0.06426354904174805,0.06442192039489746,0.06454861747741698,"[0.062454689025878904, 0.06324585723876953, 0.06330374526977539, 0.0635445442199707, 0.06458029174804687, 0.06390063858032227, 0.06422835540771485, 0.06392291259765626, 0.06315980911254883, 0.06331919860839844]",tokens/s,4027.3099323881765,kWh,1.8526656114979043e-06,2.0427130066203017e-07,1.1502136487214724e-06,3.2071505608814066e-06,tokens/kWh,79821634.54454246,MB,3876.315136,2410.610688,0.0,1994.391552,1971.314176,s,10,38.39297583007813,3.839297583007813,0.018174484599691544,3.844256469726562,3.8540014892578123,3.861956799316406,3.868321047363281,"[3.80193359375, 3.84812841796875, 3.84927783203125, 3.852233642578125, 3.869912109375, 3.850801513671875, 3.823289794921875, 3.840384521484375, 3.829564453125, 3.827449951171875]",tokens/s,16.409251598216578,kWh,0.00011158585627516668,1.2308142151414446e-05,4.911332656207862e-05,0.00017300732498865974,tokens/kWh,364146.4313960667,,s,630,38.38869422912598,0.060934435284326946,0.0007122517880582249,0.06083739280700684,0.061808459854125974,0.062165083312988284,0.06350617919921876,"[0.059291648864746097, 0.05961318588256836, 0.059856895446777345, 0.06021920013427735, 0.06000044631958008, 0.060630462646484376, 0.05995782470703125, 0.05987737655639649, 0.05959881591796875, 0.05912934494018555, 0.059335201263427735, 0.05983625411987305, 0.060623008728027346, 0.06044672012329102, 0.06023948669433594, 0.060146240234375, 0.06016390228271484, 0.0603504638671875, 0.06021843338012695, 0.060310462951660156, 0.06053273773193359, 0.06044876861572265, 0.061153247833251954, 0.06085635375976563, 0.060499969482421874, 0.06114060974121094, 0.06075635147094727, 0.0605777587890625, 0.060631072998046875, 0.06044876861572265, 0.06047948837280273, 0.060485633850097656, 0.06055116653442383, 0.06176515197753906, 0.06024035263061524, 0.06013951873779297, 0.060466625213623046, 0.06007606506347656, 0.06049846267700195, 0.06016169738769531, 0.06026176071166992, 0.060042495727539065, 0.05960265731811523, 0.06001375961303711, 0.06102508926391602, 0.06027468872070312, 0.05995075225830078, 0.060231201171875, 0.05997647857666016, 0.060308704376220705, 0.06084403228759765, 0.06220064163208008, 0.060393470764160156, 0.06033817672729492, 0.06003302383422852, 0.06009439849853516, 0.060110305786132814, 0.060014686584472655, 0.06055987167358398, 0.059641857147216794, 0.06018832015991211, 0.06171068954467773, 0.061955646514892576, 0.06022143936157227, 0.06018003082275391, 0.05993638229370117, 0.06020899200439453, 0.06060335922241211, 0.06074934387207031, 0.060477920532226566, 0.06094233703613281, 0.061241310119628904, 0.0617042236328125, 0.06184054565429688, 0.061911712646484374, 0.06164633560180664, 0.06165555191040039, 0.061228862762451174, 0.06194419097900391, 0.061341697692871094, 0.06092390441894531, 0.06188236618041992, 0.06077849578857422, 0.06077376174926758, 0.06100559997558594, 0.060656352996826174, 0.060919937133789064, 0.060872703552246096, 0.06098854446411133, 0.06094527816772461, 0.06083776092529297, 0.060729598999023436, 0.060566497802734376, 0.06158428955078125, 0.06048972702026367, 0.060278785705566405, 0.060409854888916016, 0.06029107284545898, 0.061212352752685543, 0.06143212890625, 0.06094438552856445, 0.060361984252929685, 0.06061686325073242, 0.061177600860595704, 0.06185865783691406, 0.06116966247558594, 0.06079283142089844, 0.06041177749633789, 0.06078620910644531, 0.060789440155029295, 0.06215024185180664, 0.06159772872924805, 0.06064976119995117, 0.060622238159179685, 0.06095523071289063, 0.062312446594238284, 0.06128620910644531, 0.06211155319213867, 0.06183155059814453, 0.06164275360107422, 0.061843456268310545, 0.06095872116088867, 0.06184960174560547, 0.06080454254150391, 0.06080527877807617, 0.06193791961669922, 0.060219390869140625, 0.06033283233642578, 0.0604334716796875, 0.060400161743164066, 0.06222275161743164, 0.0607825927734375, 0.060469249725341796, 0.06051225662231445, 0.06033718490600586, 0.061356639862060545, 0.06084236907958984, 0.06070272064208984, 0.06107136154174805, 0.06373580932617187, 0.06077203369140625, 0.062404926300048826, 0.060663009643554686, 0.06050896072387695, 0.060184574127197264, 0.060501632690429685, 0.061247871398925784, 0.06100377655029297, 0.060733409881591795, 0.06091068649291992, 0.06081631851196289, 0.06260921478271485, 0.06404249572753906, 0.0608422737121582, 0.061094112396240234, 0.060866752624511716, 0.06110531234741211, 0.06308278274536133, 0.061722335815429685, 0.0611945915222168, 0.06098160171508789, 0.06103670501708985, 0.060843582153320315, 0.06179065704345703, 0.06097071838378906, 0.060712257385253904, 0.06083273696899414, 0.06112384033203125, 0.061284927368164065, 0.06254995346069336, 0.06101222229003906, 0.060704193115234374, 0.06095110321044922, 0.060681983947753905, 0.061233409881591795, 0.06118550491333008, 0.06071459197998047, 0.060427200317382815, 0.060657054901123046, 0.06067161560058594, 0.06161481475830078, 0.0611514892578125, 0.060674049377441405, 0.06046310424804688, 0.060620704650878904, 0.06087484741210938, 0.061274112701416014, 0.06101113510131836, 0.06105702209472656, 0.06061414337158203, 0.06222467041015625, 0.06091795349121094, 0.060606494903564456, 0.06066998291015625, 0.06259503936767578, 0.061312255859375, 0.06105574417114258, 0.0607006721496582, 0.060825408935546874, 0.06153030395507812, 0.0607907829284668, 0.060954143524169925, 0.06184592056274414, 0.06147078323364258, 0.061183998107910156, 0.06133555221557617, 0.061134849548339844, 0.06238003158569336, 0.061181598663330075, 0.06079904174804687, 0.06090576171875, 0.06411264038085937, 0.061233150482177735, 0.061042686462402344, 0.061851646423339846, 0.061159423828125, 0.06030131149291992, 0.06029625701904297, 0.06022377777099609, 0.06073807907104492, 0.06060995101928711, 0.06082633590698242, 0.06278332901000977, 0.06092563247680664, 0.06021289443969727, 0.06024652862548828, 0.06014803314208984, 0.06029926300048828, 0.06052249526977539, 0.06029107284545898, 0.06029919815063477, 0.060176448822021486, 0.06072524642944336, 0.0603256950378418, 0.06038320159912109, 0.06107139205932617, 0.061929183959960936, 0.061085472106933596, 0.06175187301635742, 0.06152540969848633, 0.060889537811279294, 0.061774112701416015, 0.06127983856201172, 0.06193398284912109, 0.06119424057006836, 0.06132940673828125, 0.061317119598388675, 0.06158335876464844, 0.062042110443115236, 0.0612498550415039, 0.06142329788208008, 0.06162633514404297, 0.06127443313598633, 0.06182672119140625, 0.06168611145019531, 0.06073062515258789, 0.061407806396484375, 0.06187027359008789, 0.061054241180419924, 0.06108233642578125, 0.06128380966186524, 0.060985984802246096, 0.0619703369140625, 0.061658687591552734, 0.06128041458129883, 0.06117814254760742, 0.062134273529052736, 0.06137651062011719, 0.06217932891845703, 0.061884414672851565, 0.06112575912475586, 0.06117670440673828, 0.06154444885253906, 0.060870655059814455, 0.061684959411621096, 0.06138515090942383, 0.060967262268066404, 0.06236179351806641, 0.06318182373046875, 0.06099625778198242, 0.0619769287109375, 0.060835647583007815, 0.060889087677001956, 0.061141342163085935, 0.06092915344238281, 0.06107360076904297, 0.06135638427734375, 0.061017822265625, 0.06120476913452148, 0.060989280700683594, 0.06078806304931641, 0.061309761047363284, 0.06158335876464844, 0.061015296936035156, 0.06161484909057617, 0.06128054428100586, 0.061705951690673826, 0.06231353759765625, 0.06107231903076172, 0.06102582550048828, 0.06149168014526367, 0.0613039665222168, 0.06202249526977539, 0.06195788955688476, 0.061067073822021485, 0.061352127075195315, 0.06095008087158203, 0.06115158462524414, 0.061622623443603516, 0.06120175933837891, 0.060957344055175784, 0.06109593582153321, 0.06127001571655273, 0.062494720458984375, 0.06216019058227539, 0.06112870407104492, 0.06100377655029297, 0.06177740859985351, 0.06090393447875977, 0.06084198379516602, 0.06238617706298828, 0.061118465423583984, 0.061034496307373044, 0.06116556930541992, 0.06076416015625, 0.06084640121459961, 0.0617053108215332, 0.06085283279418945, 0.061504959106445316, 0.06065235137939453, 0.06063801574707031, 0.06159203338623047, 0.06148758316040039, 0.0610750732421875, 0.06127449417114258, 0.06061827087402344, 0.061585121154785157, 0.06146329498291016, 0.061329216003417966, 0.06094457626342773, 0.06081299209594727, 0.06093827056884766, 0.0628267822265625, 0.06098515319824219, 0.06240995025634766, 0.060751903533935545, 0.060910526275634765, 0.061755199432373044, 0.06150960159301758, 0.061595870971679685, 0.06143590545654297, 0.06065948867797852, 0.060766433715820314, 0.06191513442993164, 0.06104822540283203, 0.06216908645629883, 0.06061936187744141, 0.0607636489868164, 0.06158796691894531, 0.06127001571655273, 0.06130435180664062, 0.06041212844848633, 0.06049577713012695, 0.060628448486328125, 0.06087481689453125, 0.060711166381835935, 0.06078521728515625, 0.060835166931152346, 0.06061865615844726, 0.060648193359375, 0.060854270935058595, 0.06029260635375976, 0.06136678314208984, 0.06255327987670899, 0.06072150421142578, 0.06045334243774414, 0.06024009704589844, 0.0602531852722168, 0.059959358215332034, 0.06119164657592773, 0.06322367858886718, 0.06057609558105469, 0.06256412887573243, 0.060857120513916015, 0.060636417388916015, 0.060701438903808594, 0.060641281127929686, 0.0606416015625, 0.06060800170898437, 0.06067577743530273, 0.06052710342407226, 0.060739585876464844, 0.060905216217041015, 0.060690689086914065, 0.06058582305908203, 0.06069878387451172, 0.06108160018920898, 0.060602302551269534, 0.06078265762329101, 0.060365825653076174, 0.06000128173828125, 0.0599705924987793, 0.05969968032836914, 0.05977958297729492, 0.05950019073486328, 0.0599060173034668, 0.06058399963378906, 0.06036227035522461, 0.06051715087890625, 0.060510208129882816, 0.06026393508911133, 0.06018304061889648, 0.05987667083740234, 0.059677375793457034, 0.0604304313659668, 0.06071196746826172, 0.06066265487670899, 0.06016819381713867, 0.06013324737548828, 0.06022751998901367, 0.06013868713378906, 0.059956161499023435, 0.06037510299682617, 0.060539905548095706, 0.06056857681274414, 0.06068633651733398, 0.06054297637939453, 0.06062934494018555, 0.060642208099365234, 0.06118886566162109, 0.06098739242553711, 0.06125363159179688, 0.061530113220214844, 0.061468257904052734, 0.06093648147583008, 0.060945728302001956, 0.060664638519287106, 0.060763198852539065, 0.060942848205566405, 0.06362156677246093, 0.06204006576538086, 0.06055763244628906, 0.060493824005126956, 0.06109743881225586, 0.060708927154541015, 0.06037465667724609, 0.060791454315185546, 0.060955902099609376, 0.06053766250610351, 0.06020256042480469, 0.06040019226074219, 0.06042572784423828, 0.06035696029663086, 0.060403072357177734, 0.060592830657958986, 0.06082316970825195, 0.06056924819946289, 0.06075008010864258, 0.06035670471191406, 0.060461536407470706, 0.06019414520263672, 0.06065350341796875, 0.06090147018432617, 0.061016704559326174, 0.06107340621948242, 0.06080716705322266, 0.06088044738769531, 0.0606223030090332, 0.06086860656738281, 0.060805824279785155, 0.06092828750610352, 0.0608370246887207, 0.06090985488891602, 0.06103062438964844, 0.06124095916748047, 0.06513734436035157, 0.06132457733154297, 0.06180643081665039, 0.06090639877319336, 0.061144927978515624, 0.06142377471923828, 0.061060863494873045, 0.06114310455322266, 0.06096607971191406, 0.06099456024169922, 0.061726497650146483, 0.06107926559448242, 0.061302879333496096, 0.061024673461914064, 0.060851551055908205, 0.06114985656738281, 0.060903423309326174, 0.06077030563354492, 0.060903423309326174, 0.06085599899291992, 0.06075628662109375, 0.06142771148681641, 0.060657470703125, 0.060620223999023434, 0.060521343231201175, 0.060508033752441404, 0.06467788696289062, 0.060360225677490234, 0.06033660888671875, 0.0604925422668457, 0.061249759674072264, 0.06042816162109375, 0.06043414306640625, 0.06098540878295899, 0.06067193603515625, 0.06060483169555664, 0.060545310974121094, 0.06054198455810547, 0.06066451263427734, 0.060738975524902344, 0.06085228729248047, 0.061437950134277344, 0.06044316864013672, 0.06037267303466797, 0.06086073684692383, 0.060821502685546876, 0.06119424057006836, 0.060999454498291014, 0.06082787322998047, 0.060851871490478514, 0.060948543548583985, 0.06079107284545898, 0.061663230895996096, 0.06095439910888672, 0.060997600555419924, 0.06069200134277344, 0.06103897476196289, 0.060617057800292966, 0.06108979034423828, 0.06290415954589844, 0.061139137268066406, 0.060533790588378905, 0.06045695877075195, 0.06145264053344727, 0.06046777725219726, 0.06107305526733398, 0.06077824020385742, 0.060893825531005856, 0.060313438415527346, 0.06201971054077148, 0.06192950439453125, 0.06065129470825195, 0.060502239227294925, 0.06026444625854492, 0.06012054443359375, 0.059820575714111326, 0.060217342376708984, 0.06055260848999024, 0.06035721588134765, 0.06051424026489258, 0.06033414459228516, 0.06015974426269531, 0.05999155044555664, 0.05987609481811523, 0.06269776153564453, 0.060933952331542966, 0.060544513702392576, 0.06042051315307617, 0.06036172866821289, 0.06049280166625977, 0.06031340789794922, 0.06118214416503906, 0.060633087158203126, 0.06081846237182617, 0.06088803100585938, 0.060923358917236325, 0.0607158088684082, 0.06088473510742187, 0.06084377670288086, 0.06144137573242187, 0.06082857513427734, 0.060870433807373045, 0.061042240142822266, 0.06086313629150391, 0.06010262298583984, 0.05970127868652344, 0.05975606536865234, 0.0601297607421875, 0.060361888885498045, 0.060645313262939454, 0.060537761688232425, 0.060849632263183594, 0.060215839385986326, 0.060120094299316404, 0.06030640029907226, 0.06021529769897461, 0.06040480041503906, 0.06052755355834961, 0.06075392150878906, 0.06071062469482422, 0.06095286560058594, 0.06065151977539063, 0.06039955139160156, 0.0601416015625, 0.06009040069580078, 0.06098886489868164, 0.06199148941040039, 0.060663806915283204, 0.06071078491210938, 0.060612510681152344, 0.06073980712890625, 0.06057196807861328, 0.06076147079467773, 0.060647071838378905, 0.060635807037353516, 0.06066524887084961, 0.06094704055786133, 0.060794303894042966, 0.06123107147216797, 0.060687232971191406, 0.06086569595336914, 0.060862110137939456, 0.060939167022705076, 0.060680191040039064, 0.06112006378173828, 0.06082809448242187, 0.06080470275878906, 0.0612149429321289, 0.06126335906982422, 0.06506156921386719, 0.06097318267822266, 0.06041775894165039, 0.06052259063720703, 0.06027679824829101, 0.06031299209594727]",tokens/s,16.41108176901759,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,3940.745216,2152.660992,0.0,1757.413376,1736.37632,s,1,12.384103515625,12.384103515625,0.0,12.384103515625,12.384103515625,12.384103515625,12.384103515625,[12.384103515625],,kWh,0.0001575624012083419,1.7373295562253167e-05,5.8802824820000654e-05,0.0002337385215905957,,MB,3949.490176,2389.639168,0.0,1973.420032,1922.784256,s,10,0.6248357162475585,0.06248357162475586,0.00040845917177390273,0.062462688446044926,0.06309321594238282,0.06314102363586425,0.06317926979064942,"[0.06231094360351563, 0.06234572982788086, 0.06176588821411133, 0.0626399040222168, 0.06252425765991211, 0.06308259201049805, 0.06240111923217773, 0.06255193710327149, 0.0631888313293457, 0.062024513244628904]",tokens/s,4097.076933076171,kWh,1.8305450974999835e-06,2.0187770505538705e-07,1.1433533452375953e-06,3.1757761477929657e-06,tokens/kWh,80610215.60915416,MB,3953.778688,2410.610688,0.0,1994.391552,1971.314176,s,10,37.614040283203124,3.7614040283203125,0.009069761434271535,3.7606953125,3.7708549560546873,3.7750571166992186,3.7784188452148437,"[3.7592001953125, 3.769921142578125, 3.752826171875, 3.759096435546875, 3.759722412109375, 3.762710693359375, 3.761668212890625, 3.77925927734375, 3.743501220703125, 3.766134521484375]",tokens/s,16.74906485069438,kWh,0.00010931741821874955,1.2057846609998803e-05,4.829829731916261e-05,0.00016967356214791094,tokens/kWh,371301.2163031061,,s,630,37.60988599777223,0.05969823174249558,0.0006495408563423068,0.05958225631713867,0.060316112518310545,0.06066802577972412,0.06273069122314454,"[0.059224063873291016, 0.0593284797668457, 0.05942825698852539, 0.05926502227783203, 0.05924431991577148, 0.06028684616088867, 0.05945372772216797, 0.059944896697998046, 0.05954841613769531, 0.05943619155883789, 0.05951926422119141, 0.059681663513183596, 0.05923193740844727, 0.05932790374755859, 0.0591099853515625, 0.05921331024169922, 0.059760353088378904, 0.05974095916748047, 0.05978112030029297, 0.059580257415771484, 0.05966864013671875, 0.05972172927856445, 0.060841217041015624, 0.05987744140625, 0.05982454299926758, 0.059902271270751956, 0.05982729721069336, 0.05948915100097656, 0.059799552917480465, 0.05936332702636719, 0.05973606491088867, 0.05953116989135742, 0.05963980865478516, 0.059676769256591794, 0.05936537551879883, 0.05915750503540039, 0.059259647369384764, 0.05924259185791016, 0.05942867279052735, 0.060131072998046875, 0.05946428680419922, 0.05997158432006836, 0.05946774291992187, 0.05980339050292969, 0.059815711975097656, 0.05965465545654297, 0.05939523315429687, 0.05944134521484375, 0.05966505432128906, 0.0596841926574707, 0.059566497802734375, 0.059709121704101566, 0.060635326385498046, 0.061163902282714844, 0.059313823699951175, 0.05962118530273437, 0.0611190071105957, 0.059734016418457034, 0.059719295501708985, 0.0597237434387207, 0.05934246444702149, 0.05961808013916016, 0.05956198501586914, 0.05938412857055664, 0.0601640625, 0.05978521728515625, 0.05979536056518555, 0.0597237434387207, 0.05978291320800781, 0.05971305465698242, 0.06002355194091797, 0.06085027313232422, 0.059870849609375, 0.0609222412109375, 0.05995315170288086, 0.06254572677612305, 0.05985228729248047, 0.05980435180664063, 0.05960489654541016, 0.05949241638183594, 0.059420703887939456, 0.05967052841186524, 0.05947187042236328, 0.05942272186279297, 0.0594411506652832, 0.05948124694824219, 0.059468639373779296, 0.059641857147216794, 0.06030448150634766, 0.060642208099365234, 0.0598337287902832, 0.05967257690429688, 0.059512928009033204, 0.06035411071777344, 0.05919583892822266, 0.05936515045166016, 0.05951504135131836, 0.059543422698974606, 0.059396320343017575, 0.059474334716796876, 0.059340160369873045, 0.05985103988647461, 0.05910940933227539, 0.059148193359375, 0.05972768020629883, 0.06048223876953125, 0.060104705810546874, 0.06002483367919922, 0.05955897521972656, 0.05964896011352539, 0.060294654846191405, 0.05961983871459961, 0.0595865592956543, 0.05946723175048828, 0.059428897857666016, 0.05954406356811524, 0.06356134414672851, 0.05958649444580078, 0.05944364929199219, 0.0598466567993164, 0.059925983428955075, 0.059664928436279296, 0.0605568962097168, 0.05959702301025391, 0.05932566452026367, 0.059079647064208984, 0.05984403228759766, 0.059516864776611327, 0.059152481079101565, 0.05883280181884765, 0.059048320770263674, 0.05913375854492187, 0.06066435241699219, 0.058929153442382816, 0.0589854736328125, 0.059036415100097654, 0.05937587356567383, 0.05997884750366211, 0.05934076690673828, 0.059689632415771486, 0.059275550842285155, 0.059490623474121096, 0.05934867095947265, 0.05923132705688477, 0.059163169860839845, 0.05917030334472656, 0.05938675308227539, 0.05942438507080078, 0.05962380981445312, 0.061257728576660155, 0.05957222366333008, 0.05927056121826172, 0.05924515151977539, 0.059104385375976565, 0.06059001541137695, 0.059611358642578126, 0.05982076644897461, 0.05976473617553711, 0.05970249557495117, 0.060463905334472656, 0.061488800048828125, 0.05970473480224609, 0.059611358642578126, 0.05959958267211914, 0.05952022552490235, 0.05955855941772461, 0.05991027069091797, 0.0596085433959961, 0.05941712188720703, 0.05911958312988281, 0.05949203109741211, 0.06020694351196289, 0.059789825439453125, 0.05973974227905274, 0.059845024108886716, 0.05938585662841797, 0.0590561294555664, 0.05925888061523438, 0.05911142349243164, 0.059268352508544925, 0.05916748809814453, 0.060010784149169924, 0.059803359985351565, 0.05928316879272461, 0.059291488647460935, 0.05935945510864258, 0.05965798568725586, 0.059256542205810545, 0.06000307083129883, 0.05970057678222656, 0.05962179183959961, 0.059058433532714845, 0.05907379150390625, 0.059034175872802734, 0.05908908843994141, 0.05935308837890625, 0.059688064575195314, 0.059442047119140626, 0.059701248168945314, 0.05980979156494141, 0.0597498893737793, 0.059582977294921874, 0.05973984146118164, 0.0598691520690918, 0.060563041687011716, 0.060136192321777346, 0.05998387145996094, 0.06058134460449219, 0.060504608154296875, 0.05959785461425781, 0.05982012939453125, 0.05982633590698242, 0.0596998405456543, 0.05972742462158203, 0.060226081848144535, 0.05978112030029297, 0.0597872314453125, 0.0628062400817871, 0.05953011322021484, 0.05963382339477539, 0.060315456390380856, 0.0594741439819336, 0.060068607330322266, 0.06003302383422852, 0.05956979370117187, 0.05940851211547851, 0.05923455810546875, 0.05910284805297852, 0.05913183975219727, 0.05897679901123047, 0.05919232177734375, 0.05942726516723633, 0.05938224029541016, 0.058943489074707034, 0.0590579833984375, 0.05899190521240234, 0.05923932647705078, 0.05989990234375, 0.058982398986816405, 0.05968076705932617, 0.05968896102905273, 0.05949039840698242, 0.05950217437744141, 0.05960326385498047, 0.05971747207641601, 0.05957436752319336, 0.05955385589599609, 0.05959065628051758, 0.05969891357421875, 0.060322017669677735, 0.05950265502929687, 0.05957017517089844, 0.05965679931640625, 0.060605888366699216, 0.05997830581665039, 0.05963123321533203, 0.059913665771484374, 0.05963811111450195, 0.05938412857055664, 0.05913209533691406, 0.05935932922363281, 0.05922412872314453, 0.05924448013305664, 0.059342655181884765, 0.05956403350830078, 0.05945564651489258, 0.05932444763183594, 0.05914214324951172, 0.059153759002685546, 0.059143871307373044, 0.06000534439086914, 0.0594463996887207, 0.05954006576538086, 0.05958860778808594, 0.0592360954284668, 0.05938844680786133, 0.05925020980834961, 0.05930624008178711, 0.05904611206054688, 0.05905817413330078, 0.0593554573059082, 0.06386761474609375, 0.059300254821777344, 0.0594043197631836, 0.06028342437744141, 0.05969715118408203, 0.05907660675048828, 0.05999411010742187, 0.05969100952148437, 0.05980160140991211, 0.05993616104125977, 0.059381729125976564, 0.059400222778320313, 0.05951139068603516, 0.05954051208496094, 0.05983536148071289, 0.05972371292114258, 0.059564094543457034, 0.061009918212890625, 0.05955379104614258, 0.0595148811340332, 0.05965558242797851, 0.060009025573730466, 0.0597072639465332, 0.060372318267822266, 0.05929171371459961, 0.05949721527099609, 0.05944425582885742, 0.05928854370117188, 0.05982992172241211, 0.059682590484619144, 0.059406913757324216, 0.05935647964477539, 0.05969580841064453, 0.06168985748291016, 0.059284351348876954, 0.05928550338745117, 0.05950054550170898, 0.05925878524780273, 0.06027801513671875, 0.0593455696105957, 0.05968841552734375, 0.06140966415405273, 0.06012118530273437, 0.06172697448730469, 0.059688766479492186, 0.05922124862670899, 0.05911167907714844, 0.0591080322265625, 0.05941452789306641, 0.059379711151123046, 0.059232257843017576, 0.059150337219238285, 0.05919539260864258, 0.05971353530883789, 0.059602783203125, 0.06063276672363281, 0.06013932800292969, 0.05983504104614258, 0.06021324920654297, 0.06008211135864258, 0.05970950317382812, 0.059701248168945314, 0.05956595230102539, 0.059977855682373044, 0.05963145446777344, 0.06029651260375977, 0.059738975524902344, 0.059668479919433595, 0.05975827026367188, 0.059109695434570314, 0.05924780654907227, 0.05934163284301758, 0.06042591857910156, 0.059953441619873045, 0.05934902572631836, 0.05957632064819336, 0.05928550338745117, 0.05950431823730469, 0.05925305557250977, 0.05981980895996094, 0.059470046997070314, 0.059213214874267575, 0.06060502243041992, 0.05936742401123047, 0.05945897674560547, 0.05932463836669922, 0.05967910385131836, 0.05980160140991211, 0.05969676971435547, 0.06091609573364258, 0.06035456085205078, 0.05989555358886719, 0.060293376922607424, 0.059724864959716795, 0.05919145584106445, 0.05918777465820312, 0.05954572677612305, 0.05929033660888672, 0.05935718536376953, 0.059404289245605466, 0.06221414566040039, 0.06008768081665039, 0.060010784149169924, 0.05965606307983398, 0.060772830963134766, 0.05980160140991211, 0.06015795135498047, 0.05977468872070312, 0.05957251358032226, 0.059491424560546874, 0.05963459014892578, 0.059553665161132814, 0.05962105560302734, 0.059450847625732425, 0.05966719818115234, 0.059807968139648435, 0.05909004974365235, 0.059368320465087894, 0.05941862487792969, 0.05972092819213867, 0.059327232360839845, 0.060530719757080076, 0.05972310256958008, 0.05951145553588867, 0.05978054428100586, 0.059464256286621095, 0.059351070404052735, 0.059041759490966794, 0.05954764938354492, 0.05917020797729492, 0.05925129699707031, 0.0593583984375, 0.059318206787109376, 0.05931097412109375, 0.05923001480102539, 0.05954579162597656, 0.05947596740722656, 0.059295169830322264, 0.059875904083251955, 0.05989376068115235, 0.0594288330078125, 0.059731998443603516, 0.05977907180786133, 0.05952716827392578, 0.06014355087280274, 0.05951894378662109, 0.05956208038330078, 0.05959270477294922, 0.05936742401123047, 0.05973811340332031, 0.06004908752441406, 0.05989807891845703, 0.059703231811523434, 0.059905216217041014, 0.05989270401000977, 0.06064096069335938, 0.06003744125366211, 0.05997772979736328, 0.05985263824462891, 0.05990620803833008, 0.05955955123901367, 0.0596627197265625, 0.05947596740722656, 0.06109299087524414, 0.06283308792114257, 0.06341404724121094, 0.05963017654418945, 0.05946988677978516, 0.06367846298217773, 0.05933059310913086, 0.06079894256591797, 0.060063743591308595, 0.059643905639648435, 0.06013071823120117, 0.06000831985473633, 0.05988016128540039, 0.05937337493896484, 0.05938735961914063, 0.059175617218017576, 0.059637153625488284, 0.0593230094909668, 0.060020126342773435, 0.059345504760742185, 0.059514816284179685, 0.05937753677368164, 0.059611328125, 0.05939199829101562, 0.060375038146972655, 0.05974835205078125, 0.05980556869506836, 0.05988355255126953, 0.060477535247802736, 0.059469825744628904, 0.059490238189697266, 0.059364959716796874, 0.0597154541015625, 0.05964656066894531, 0.06093414306640625, 0.06232473754882813, 0.06012492752075195, 0.059899391174316405, 0.060771072387695316, 0.05992652893066406, 0.05950431823730469, 0.06067027282714844, 0.05958969497680664, 0.05970758438110352, 0.059796222686767576, 0.06192876815795898, 0.059849407196044924, 0.059238399505615234, 0.060063743591308595, 0.05920153427124023, 0.05944313430786133, 0.05954291152954101, 0.059222015380859375, 0.059249343872070315, 0.05934080123901367, 0.05949235153198242, 0.05922316741943359, 0.05926591873168945, 0.0601190071105957, 0.05965212631225586, 0.05925724792480469, 0.059821632385253905, 0.0593922233581543, 0.05898672103881836, 0.059233535766601564, 0.058729057312011716, 0.05907062530517578, 0.058971935272216794, 0.05903900909423828, 0.059306209564208984, 0.0593023681640625, 0.05951103973388672, 0.059322368621826174, 0.05943404769897461, 0.060171199798583985, 0.05949030303955078, 0.05929574584960937, 0.05923385620117187, 0.05959654235839844, 0.05977104187011719, 0.05941302490234375, 0.059581535339355465, 0.05954019165039062, 0.05974035263061524, 0.059633663177490234, 0.05954355239868164, 0.05946777725219726, 0.05947769546508789, 0.05983996963500977, 0.059634529113769534, 0.06034211349487305, 0.059801216125488284, 0.05905868911743164, 0.05961014556884765, 0.05971046447753906, 0.05944905471801758, 0.059035934448242185, 0.05922444915771485, 0.0590885124206543, 0.058905601501464844, 0.058935966491699215, 0.058883872985839844, 0.060015167236328125, 0.06111231994628906, 0.059701248168945314, 0.0595599365234375, 0.05981798553466797, 0.06066527938842774, 0.05939225769042969, 0.05897203063964844, 0.05904172897338867, 0.059359745025634764, 0.059264671325683596, 0.05897846221923828, 0.05904812622070312, 0.05905817413330078, 0.058788864135742185, 0.05876630401611328, 0.05926457595825195, 0.05942035293579102, 0.05929859161376953, 0.05919744110107422, 0.05952511978149414, 0.06043033599853516, 0.05974630355834961, 0.06386687850952148, 0.060061473846435544, 0.06046537780761719, 0.060014591217041016, 0.0597147216796875, 0.059884033203125, 0.05981423950195312, 0.059652095794677736, 0.059813697814941405, 0.05988729476928711, 0.06047001647949219, 0.059579391479492184, 0.0604505615234375, 0.05977916717529297, 0.059240543365478515, 0.06007686233520508, 0.05943910217285156, 0.05992144012451172, 0.060143871307373045, 0.05943164825439453, 0.05943280029296875, 0.059238174438476565, 0.05943270492553711, 0.059089534759521486, 0.05868544006347656, 0.05862124633789063, 0.059052513122558596, 0.05927753448486328, 0.059181087493896486, 0.05919126510620117, 0.05929068756103516, 0.05918406295776367, 0.05982617568969727, 0.05911324691772461, 0.05931827163696289, 0.05980972671508789, 0.059932159423828124, 0.05904054260253906, 0.05866393661499023, 0.059038719177246096, 0.05884214401245117, 0.059092960357666015, 0.059417598724365236, 0.0625376319885254, 0.060276607513427734, 0.058894561767578124, 0.06049792098999023, 0.059641761779785155, 0.06040537643432617, 0.06008019256591797, 0.0598306884765625, 0.059781024932861325, 0.059956382751464844, 0.06049795150756836, 0.05976160049438477, 0.05966432189941406, 0.059779102325439454, 0.05950857543945313, 0.06119644927978515, 0.0600266227722168, 0.059700897216796875]",tokens/s,16.750914906716744,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,23902.429184,13034.78272,0.0,12639.535104,12621.66016,s,1,48.46375,48.46375,0.0,48.46375,48.46375,48.46375,48.46375,[48.46375],,kWh,0.0012073979881250087,0.0001331777867873099,0.00045702203228401017,0.0017975978071963288,,MB,1234.599936,13909.295104,0.0,13493.075968,13249.793024,s,10,1.6881716613769533,0.16881716613769532,0.001683804255809587,0.16813304138183593,0.1713816635131836,0.17156614456176758,0.17171372940063476,"[0.16711264038085938, 0.16768089294433594, 0.1665416259765625, 0.16802146911621094, 0.17134066772460937, 0.16955641174316408, 0.17175062561035156, 0.16785340881347657, 0.16824461364746093, 0.17006930541992188]",tokens/s,1516.433463829112,kWh,4.969859327189498e-06,5.480902051208892e-07,3.285638221728996e-06,8.803587754039385e-06,tokens/kWh,29079053.58046082,MB,1256.77568,13909.295104,0.0,13493.075968,13389.080064,s,10,57.10703173828125,5.710703173828125,0.010946179836741406,5.710876708984375,5.724618115234375,5.725002416992187,5.725309858398438,"[5.71461181640625, 5.71090478515625, 5.7108486328125, 5.70460205078125, 5.72538671875, 5.70095751953125, 5.72364453125, 5.72453271484375, 5.691873046875, 5.699669921875]",tokens/s,11.031916400194978,kWh,0.00016651749440864557,1.8367533693865484e-05,0.00010979925780166894,0.00029468428590417997,tokens/kWh,213788.1217747905,,s,630,57.10336171722414,0.09064025669400655,0.0008973252111778205,0.09052087783813476,0.0912185333251953,0.09173341217041016,0.09427259468078614,"[0.08919391632080079, 0.08895750427246094, 0.09219071960449218, 0.0894422378540039, 0.08997484588623048, 0.09072640228271485, 0.09069974517822266, 0.09024515533447265, 0.09008537292480469, 0.09020211029052734, 0.09005411529541016, 0.0903419189453125, 0.0906806411743164, 0.09092166137695312, 0.09107862091064453, 0.09096931457519532, 0.09114911651611328, 0.09083289337158203, 0.09694931030273438, 0.09088710021972657, 0.09127295684814453, 0.09111577606201172, 0.0909659194946289, 0.09091081237792968, 0.09064374542236328, 0.0906247329711914, 0.09070591735839843, 0.09052774047851563, 0.09090866851806641, 0.09069158172607422, 0.09061302185058594, 0.09065507507324219, 0.09063257598876953, 0.0908431396484375, 0.09075673675537109, 0.0908878402709961, 0.09017545318603516, 0.09012095642089844, 0.09048883056640625, 0.09071001434326172, 0.09078374481201172, 0.09020198059082031, 0.09018732452392578, 0.08996543884277344, 0.09024646759033203, 0.09037651062011719, 0.09034758758544922, 0.09082675170898437, 0.09029427337646484, 0.09067724609375, 0.09117619323730469, 0.09048719787597656, 0.09127152252197265, 0.09069683074951172, 0.09068224334716797, 0.09097007751464843, 0.09071004486083985, 0.09181593322753906, 0.09058303833007812, 0.09043539428710938, 0.09072393798828125, 0.090648193359375, 0.09032323455810547, 0.09000784301757812, 0.09026057434082031, 0.09181043243408203, 0.08976121520996094, 0.09040573120117187, 0.09022844696044922, 0.09349353790283203, 0.09018163299560547, 0.09031785583496094, 0.09568764495849609, 0.09043145751953124, 0.09003759765625, 0.09010230255126953, 0.09034153747558593, 0.08994361877441406, 0.09069538879394531, 0.09085382080078125, 0.09004182434082031, 0.0907023696899414, 0.09069990539550782, 0.09033513641357421, 0.09064883422851562, 0.09060352325439452, 0.09122406768798828, 0.09040605163574218, 0.089916259765625, 0.09026150512695312, 0.09023283386230468, 0.09025126647949219, 0.09032249450683594, 0.08996927642822265, 0.0898353271484375, 0.09014272308349609, 0.08967577362060547, 0.09054988861083985, 0.09102579498291016, 0.091109375, 0.09075711822509766, 0.0905173110961914, 0.09057865905761718, 0.09003392028808593, 0.09014959716796875, 0.08996044921875, 0.09032268524169922, 0.09063849639892578, 0.09057849884033203, 0.09084162902832031, 0.09246924591064454, 0.09113529968261719, 0.09073939514160156, 0.09075414276123046, 0.09090700531005859, 0.09137820434570312, 0.09087379455566406, 0.0907979507446289, 0.09052098846435547, 0.0905123519897461, 0.09056031799316407, 0.0906137924194336, 0.09066822052001954, 0.09059203338623047, 0.09043353271484375, 0.0906954574584961, 0.08974153900146484, 0.08958541107177734, 0.09001932525634766, 0.09005542755126954, 0.09030553436279297, 0.0896890869140625, 0.092295166015625, 0.09002550506591797, 0.09048912048339844, 0.09038662719726563, 0.09009942626953125, 0.09044918060302734, 0.09039360046386719, 0.09050701141357421, 0.0904747543334961, 0.09118093109130859, 0.09085491180419922, 0.09106867218017578, 0.09059334564208985, 0.09064998626708984, 0.09066761779785157, 0.0905464324951172, 0.08998847961425781, 0.08987955474853515, 0.09010761260986327, 0.09019779205322266, 0.08981321716308593, 0.0898941421508789, 0.09013529968261719, 0.08953446197509765, 0.08997046661376953, 0.09029449462890625, 0.09019705963134765, 0.0902010269165039, 0.09019913482666016, 0.09018243408203125, 0.09036198425292968, 0.09107046508789063, 0.09024281311035157, 0.09050137329101562, 0.09012364959716797, 0.09028435516357422, 0.09055661010742187, 0.09052806091308593, 0.09083270263671875, 0.09068726348876953, 0.0907696304321289, 0.0947589111328125, 0.09082061004638672, 0.09065267181396484, 0.09105817413330078, 0.09042739105224609, 0.09375129699707031, 0.09383126068115234, 0.0917236785888672, 0.09206310272216797, 0.09053968048095704, 0.09044700622558594, 0.09066886138916015, 0.09093917083740234, 0.09084336090087891, 0.09095782470703125, 0.09037824249267579, 0.09058223724365234, 0.09105059051513673, 0.09005689239501953, 0.0898682861328125, 0.09071820831298828, 0.09040691375732422, 0.09005465698242188, 0.09075011444091798, 0.0905240936279297, 0.09096438598632813, 0.09096559906005859, 0.09092546844482421, 0.09076326751708984, 0.09064995574951172, 0.09086822509765625, 0.09091670227050781, 0.09087318420410156, 0.09105868530273438, 0.09060777282714844, 0.09018946838378906, 0.09108956909179687, 0.09061526489257812, 0.09092559814453124, 0.09036799621582031, 0.09027891540527344, 0.0901740493774414, 0.09046262359619141, 0.09064447784423828, 0.09171558380126953, 0.09077862548828125, 0.09009168243408203, 0.09029821014404296, 0.09038508605957031, 0.09054854583740235, 0.09047449493408204, 0.09069261169433594, 0.09060454559326171, 0.09049702453613281, 0.09027894592285156, 0.09044182586669922, 0.09128819274902343, 0.09062630462646484, 0.09088409423828125, 0.09106022644042969, 0.0907814712524414, 0.09072662353515625, 0.0906219482421875, 0.09066019439697266, 0.09090115356445312, 0.09053321838378907, 0.09056947326660156, 0.09054195404052734, 0.08976182556152344, 0.0901295394897461, 0.09072115325927735, 0.09016067504882813, 0.08977983856201172, 0.08987120056152344, 0.0899420166015625, 0.09046812438964844, 0.0899106216430664, 0.089936767578125, 0.09021151733398437, 0.0904365463256836, 0.09059468841552734, 0.09099533081054688, 0.09009081268310547, 0.09000611114501954, 0.09025545501708984, 0.09022259521484376, 0.09111558532714843, 0.09148818969726563, 0.09161337280273438, 0.090814208984375, 0.09099590301513671, 0.09087680053710938, 0.09058723449707032, 0.09053993225097656, 0.09001376342773437, 0.08997062683105468, 0.09059123229980469, 0.09148992156982422, 0.09018748474121094, 0.0909748764038086, 0.09063129425048828, 0.09038841247558593, 0.09013139343261718, 0.09033318328857422, 0.09016851043701171, 0.09057071685791016, 0.09074585723876953, 0.09298518371582032, 0.0903864288330078, 0.09116671752929688, 0.09035775756835937, 0.09075260925292969, 0.09435167694091796, 0.09042873382568359, 0.09998761749267578, 0.09073458862304687, 0.09083766174316406, 0.09112777709960937, 0.09103263854980469, 0.09088233947753906, 0.09115510559082031, 0.0905871353149414, 0.09085747528076171, 0.09084070587158204, 0.09137110137939453, 0.09100777435302734, 0.09105203247070312, 0.09092505645751953, 0.09055846405029297, 0.08996633911132812, 0.09026790618896484, 0.09085337829589844, 0.09017747497558594, 0.09033106994628906, 0.09045388793945312, 0.09057305908203125, 0.09007209777832031, 0.09010684967041016, 0.09021798706054687, 0.09096806335449219, 0.0904975357055664, 0.09032704162597656, 0.0909738540649414, 0.09058348846435547, 0.09069158172607422, 0.09070182037353515, 0.09134489440917969, 0.09091046142578126, 0.0904268798828125, 0.09084909057617188, 0.09490863800048828, 0.09218531036376953, 0.09024050903320313, 0.09080473327636719, 0.0901794204711914, 0.08999052429199218, 0.08992054748535157, 0.09033865356445313, 0.09033334350585938, 0.09010816192626953, 0.09039871978759766, 0.08971673583984376, 0.09021234893798828, 0.09039842987060547, 0.09021820831298828, 0.0908519058227539, 0.09321849822998046, 0.09011961364746093, 0.09025154876708984, 0.09034368133544922, 0.09014649963378907, 0.09005942535400391, 0.08976895904541016, 0.09028825378417969, 0.09019686126708984, 0.08997682952880859, 0.09163795471191406, 0.09076624298095703, 0.09086892700195312, 0.090646240234375, 0.09060352325439452, 0.09029837036132812, 0.0902652816772461, 0.09075305938720703, 0.09105846405029297, 0.0904532470703125, 0.08956594848632812, 0.09019753265380859, 0.08973155212402344, 0.08993743896484375, 0.08950806427001953, 0.08979634857177735, 0.09254723358154297, 0.09004450988769531, 0.08999993896484375, 0.09087558746337891, 0.09015090942382813, 0.0897228775024414, 0.089997314453125, 0.08971635437011719, 0.08972723388671874, 0.09023296356201171, 0.0897000961303711, 0.08990914916992188, 0.09020451354980469, 0.0915538558959961, 0.09107273864746093, 0.09046771240234375, 0.09090908813476563, 0.09078688049316407, 0.09063520050048827, 0.09101229095458985, 0.09069356536865235, 0.09222029113769531, 0.09059859466552735, 0.0903422088623047, 0.09042534637451172, 0.09063219451904297, 0.09042329406738281, 0.09384754943847656, 0.09197158050537109, 0.09185689544677735, 0.09065577697753906, 0.09056079864501954, 0.09031571197509766, 0.09051519775390625, 0.09012633514404297, 0.09065062713623047, 0.09043762969970703, 0.09041305541992188, 0.09047654724121093, 0.0901426239013672, 0.09023513793945312, 0.09052963256835937, 0.09058678436279297, 0.09143331146240234, 0.09099673461914062, 0.09073766326904296, 0.09118208312988281, 0.0914554901123047, 0.09089215850830078, 0.09105216217041015, 0.09111891174316407, 0.090657470703125, 0.09011933135986328, 0.09029254150390625, 0.09030095672607422, 0.0904781723022461, 0.09061158752441406, 0.09039107513427734, 0.09082991790771484, 0.09061878204345702, 0.09039462280273437, 0.09061170959472656, 0.09031423950195312, 0.09016973114013672, 0.09017561340332031, 0.09050521850585938, 0.09035980987548828, 0.0940789794921875, 0.0915598373413086, 0.09090057373046875, 0.09089218902587891, 0.09095145416259766, 0.0905403823852539, 0.09109478759765625, 0.09083106994628906, 0.09159593963623047, 0.09068540954589843, 0.09126502227783204, 0.09084722900390625, 0.09147174072265625, 0.0916911392211914, 0.09173401641845703, 0.09038848114013671, 0.0908779525756836, 0.09052108764648438, 0.09035990142822266, 0.09053635406494141, 0.09127117156982421, 0.09049638366699218, 0.09058163452148438, 0.09043558502197266, 0.09082195281982422, 0.09035027313232422, 0.09042649841308593, 0.09008777618408204, 0.08998307037353516, 0.09036227416992187, 0.09048681640625, 0.09004236602783203, 0.09086566162109375, 0.09085542297363282, 0.09126265716552734, 0.09088966369628906, 0.09085836791992187, 0.09075507354736329, 0.09113391876220703, 0.09063744354248048, 0.09076624298095703, 0.09030156707763672, 0.09059744262695313, 0.09054496002197265, 0.09121791839599609, 0.09037619018554688, 0.09034342193603516, 0.09026870727539063, 0.09019696044921875, 0.08981708526611328, 0.09027337646484375, 0.09014313507080078, 0.0902323226928711, 0.09050070190429688, 0.09058134460449219, 0.09044873809814453, 0.0905192642211914, 0.09073161315917969, 0.09044656372070313, 0.0902392349243164, 0.08968121337890625, 0.08995494079589844, 0.09089766693115234, 0.09399088287353516, 0.09071263885498047, 0.09143856048583984, 0.0905770263671875, 0.09113868713378906, 0.09859059143066407, 0.0914968032836914, 0.09285404968261719, 0.09127117156982421, 0.09032102203369141, 0.08976620483398437, 0.08988652801513672, 0.08986592102050782, 0.08987052917480469, 0.09013203430175781, 0.08969468688964843, 0.08973321533203125, 0.0899681625366211, 0.09061033630371093, 0.09073458862304687, 0.09024214172363282, 0.08991222381591797, 0.09167209625244141, 0.0901165771484375, 0.09041510772705078, 0.09042460632324219, 0.0903564453125, 0.0903719711303711, 0.09049919891357422, 0.09042739105224609, 0.09047449493408204, 0.09052159881591797, 0.09032294464111328, 0.09069964599609374, 0.09044595336914063, 0.09093488311767578, 0.09104630279541015, 0.09046966552734376, 0.09072099304199219, 0.09038787078857421, 0.08988323211669921, 0.09003008270263672, 0.0901053466796875, 0.09191577911376952, 0.09056768035888672, 0.09037824249267579, 0.09020620727539062, 0.0907973403930664, 0.0901618881225586, 0.09004000091552734, 0.09019149017333984, 0.08963549041748047, 0.09004838562011719, 0.0900814437866211, 0.0902446060180664, 0.09033779144287109, 0.09018982696533204, 0.09004441833496094, 0.09084928131103516, 0.09031168365478516, 0.09045449829101562, 0.09037264251708985, 0.09025740814208984, 0.09011151885986328, 0.09056944274902344, 0.09036160278320313, 0.09052076721191406, 0.0906289291381836, 0.09052489471435547, 0.09079058837890625, 0.09003542327880859, 0.0898485107421875, 0.09085081481933593, 0.0900186538696289, 0.09037964630126953, 0.08995231628417968, 0.0902252197265625, 0.08942108917236329, 0.08993046569824219, 0.09027378845214844, 0.09033436584472657, 0.089695068359375, 0.08985708618164062, 0.09123661041259766, 0.09116761779785157, 0.09004831695556641, 0.0903024673461914, 0.09023693084716797, 0.08961344146728516, 0.09017024230957031, 0.09074073791503906, 0.09107408142089844, 0.09056285095214844, 0.09056070709228516, 0.09086771392822265, 0.09050726318359376, 0.09108595275878906, 0.09106931304931641, 0.0907323226928711, 0.0895162582397461, 0.08999523162841797, 0.0904151382446289, 0.08995635223388672, 0.08919859313964844, 0.08955709075927734, 0.0899419174194336, 0.09187875366210937, 0.09256816101074218, 0.08996160125732422, 0.09040377807617188, 0.09014886474609375, 0.09075862121582032, 0.09173267364501952, 0.09214345550537109, 0.09133465576171874, 0.0900321273803711, 0.09042124938964843, 0.09030854034423828, 0.09043778991699218, 0.09023648071289063, 0.09061615753173828, 0.09084441375732422, 0.09070022583007813, 0.09056598663330079, 0.09061692810058594, 0.09066073608398438, 0.09076767730712891, 0.0909288330078125, 0.09132236480712891, 0.0903004150390625, 0.08980502319335938, 0.08989263916015625, 0.08983261108398438, 0.09004937744140624, 0.090523681640625]",tokens/s,11.032625419143628,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,,MB,10678.616064,5536.350208,0.0,5200.93696,5184.311296,s,1,23.298111328125,23.298111328125,0.0,23.298111328125,23.298111328125,23.298111328125,23.298111328125,[23.298111328125],,kWh,0.0004689173453708084,5.1717742519565364e-05,0.00015671734759599432,0.0006773524354863681,,MB,4247.494656,5857.214464,0.0,5433.720832,5337.6896,s,10,1.5288677368164063,0.15288677368164064,0.0010946389572496526,0.15280164337158203,0.15443841705322267,0.15470793228149415,0.15492354446411133,"[0.1533728332519531, 0.15310617065429688, 0.15261541748046875, 0.15192617797851563, 0.15437852478027345, 0.1529878692626953, 0.1521849670410156, 0.15497744750976564, 0.15220162963867187, 0.15111669921875]",tokens/s,1674.4417704377372,kWh,4.359670135820907e-06,4.804961754983431e-07,2.5844796795223903e-06,7.424645990841642e-06,tokens/kWh,34479758.40407448,MB,4251.623424,5920.129024,0.0,5496.635392,5361.562112,s,10,93.13316015625,9.313316015625,0.03919256994179987,9.33402880859375,9.345691210937499,9.3491400390625,9.3518991015625,"[9.3337509765625, 9.341330078125, 9.3449248046875, 9.334306640625, 9.3525888671875, 9.3349677734375, 9.3110185546875, 9.2777275390625, 9.280740234375, 9.2218046875]",tokens/s,6.764507925459048,kWh,0.00026225230137001183,2.8928129442241083e-05,0.00010843633135927679,0.0003996167621715297,tokens/kWh,157651.044609981,,s,630,93.12899783325199,0.14782380608452692,0.0012180916522557064,0.14774659729003908,0.14903353576660155,0.15016160736083983,0.152548097076416,"[0.14684783935546875, 0.1478982696533203, 0.14833868408203124, 0.1483345947265625, 0.1525506591796875, 0.14822988891601563, 0.147557373046875, 0.14781564331054686, 0.14754281616210937, 0.14783488464355468, 0.14805401611328126, 0.14827696228027343, 0.15126325988769532, 0.14794776916503907, 0.14763740539550782, 0.14761456298828124, 0.147355224609375, 0.14808720397949218, 0.1477980194091797, 0.15132057189941406, 0.14805372619628906, 0.1473661804199219, 0.14919065856933594, 0.14835302734375, 0.14812570190429689, 0.14736997985839845, 0.14849591064453124, 0.14850738525390625, 0.14765548706054688, 0.14747946166992187, 0.14798233032226563, 0.14828134155273437, 0.1473597412109375, 0.15010552978515626, 0.14832902526855468, 0.14760755920410157, 0.147662841796875, 0.14761164855957032, 0.14739605712890624, 0.14822044372558593, 0.14851251220703124, 0.14714906311035156, 0.14771331787109376, 0.14916224670410155, 0.1474093780517578, 0.14834597778320313, 0.1482124481201172, 0.14806646728515624, 0.1472716827392578, 0.14767526245117188, 0.14737564086914062, 0.14794528198242188, 0.148789794921875, 0.14865190124511718, 0.1477739562988281, 0.14748223876953126, 0.1477642822265625, 0.1477736053466797, 0.14840911865234374, 0.14900837707519532, 0.14813743591308592, 0.1478701171875, 0.14737831115722655, 0.14695936584472657, 0.14815628051757812, 0.1484207305908203, 0.14786355590820313, 0.14748777770996094, 0.14832505798339843, 0.14718185424804686, 0.1496813507080078, 0.14885597229003905, 0.147942138671875, 0.1474745635986328, 0.14764111328125, 0.14910624694824218, 0.1484394836425781, 0.1479080047607422, 0.14730300903320312, 0.14761961364746093, 0.14820783996582032, 0.1471979522705078, 0.14813717651367186, 0.14753257751464843, 0.1484472351074219, 0.1500055389404297, 0.1504967041015625, 0.14800770568847657, 0.14770585632324218, 0.14799667358398438, 0.1475747833251953, 0.14795347595214844, 0.14942431640625, 0.14771200561523437, 0.1471009216308594, 0.14736051940917969, 0.1476381072998047, 0.14768348693847655, 0.1485803527832031, 0.14751458740234374, 0.15031776428222657, 0.14791484069824218, 0.14761807250976564, 0.14791241455078125, 0.1485207061767578, 0.14793478393554688, 0.148654052734375, 0.14827127075195312, 0.14813650512695312, 0.15036416625976562, 0.14816569519042969, 0.15025564575195313, 0.1513762512207031, 0.1480361328125, 0.14844502258300782, 0.14823423767089844, 0.14843919372558595, 0.1495203857421875, 0.14804713439941405, 0.14758595275878905, 0.14910035705566407, 0.14845951843261718, 0.14796560668945313, 0.14745152282714843, 0.14784176635742188, 0.14768899536132812, 0.14654521179199217, 0.1486868133544922, 0.14798422241210937, 0.14812498474121094, 0.14807948303222657, 0.14822515869140626, 0.14758592224121095, 0.14825471496582032, 0.14843283081054687, 0.14879341125488282, 0.14914579772949219, 0.14980514526367186, 0.14707682800292968, 0.14783488464355468, 0.1483468780517578, 0.15077375793457032, 0.1486862335205078, 0.14862806701660156, 0.14886502075195313, 0.14864588928222655, 0.14782669067382812, 0.14812570190429689, 0.14959922790527344, 0.14798486328125, 0.14851536560058592, 0.14872575378417968, 0.153638916015625, 0.14845274353027343, 0.1480853729248047, 0.14869241333007813, 0.14749754333496093, 0.14818415832519533, 0.14775149536132812, 0.14821026611328125, 0.14759500122070313, 0.14872604370117187, 0.14764413452148437, 0.14761164855957032, 0.14749081420898438, 0.14705043029785156, 0.15081887817382814, 0.1475782470703125, 0.14788838195800783, 0.14774925231933594, 0.1481648712158203, 0.14819273376464845, 0.1480440673828125, 0.14791177368164063, 0.14784144592285156, 0.15039663696289063, 0.1490173797607422, 0.14759117126464844, 0.14883865356445314, 0.1473697204589844, 0.14793624877929687, 0.1474999694824219, 0.14848005676269532, 0.14789836120605468, 0.1480865936279297, 0.14784326171875, 0.14831619262695311, 0.14758604431152345, 0.14750770568847657, 0.14658851623535157, 0.14835856628417968, 0.14697532653808593, 0.1471689910888672, 0.14694169616699218, 0.1480169219970703, 0.14691813659667968, 0.1474839630126953, 0.14739491271972657, 0.14707746887207032, 0.14667529296875, 0.14730076599121095, 0.14723481750488282, 0.1471488037109375, 0.15061308288574218, 0.1486094970703125, 0.14693148803710937, 0.14759388732910156, 0.1472321014404297, 0.14830189514160155, 0.14901901245117188, 0.14791897583007813, 0.15040725708007813, 0.14729624938964844, 0.14744773864746094, 0.14781773376464843, 0.14736262512207032, 0.14729830932617188, 0.14947689819335938, 0.14788761901855468, 0.1534693145751953, 0.14803414916992189, 0.14881178283691407, 0.1475869140625, 0.14767120361328126, 0.1481378173828125, 0.14832768249511719, 0.14793002319335938, 0.14743693542480468, 0.14770597839355468, 0.1475609588623047, 0.1477587890625, 0.1484244842529297, 0.14875013732910156, 0.14908390808105468, 0.1475675811767578, 0.14795960998535157, 0.14779820251464842, 0.147884033203125, 0.14974156188964843, 0.14844313049316407, 0.14859400939941406, 0.14816668701171876, 0.1525418243408203, 0.14775631713867188, 0.14879408264160157, 0.1486499786376953, 0.1480863037109375, 0.14727334594726563, 0.14913807678222657, 0.1481320343017578, 0.14815008544921876, 0.14999725341796874, 0.1487689208984375, 0.14874412536621093, 0.14794537353515624, 0.14800486755371095, 0.14750003051757812, 0.14859161376953126, 0.14857574462890624, 0.14916610717773438, 0.14779644775390624, 0.150594970703125, 0.14797410583496093, 0.1477945556640625, 0.1479393310546875, 0.14780953979492187, 0.14748133850097656, 0.1477323913574219, 0.14845960998535157, 0.1515045166015625, 0.14927705383300782, 0.14822195434570312, 0.1475955810546875, 0.14736073303222658, 0.14834870910644532, 0.14761788940429688, 0.1487163848876953, 0.14767056274414062, 0.1479680633544922, 0.14792950439453126, 0.14748179626464844, 0.14781523132324217, 0.14765391540527345, 0.1474829406738281, 0.14795103454589845, 0.15259552001953125, 0.1477237091064453, 0.15151356506347657, 0.14978460693359374, 0.14766265869140624, 0.14806646728515624, 0.14749900817871095, 0.14808883666992187, 0.14742930603027343, 0.14795712280273438, 0.14746054077148438, 0.14747251892089844, 0.14825894165039064, 0.1478795166015625, 0.15302287292480468, 0.14773861694335938, 0.14829542541503907, 0.14837718200683594, 0.1483047332763672, 0.14787770080566406, 0.14766694641113282, 0.1479354248046875, 0.14835897827148437, 0.15031295776367187, 0.14961640930175782, 0.14819964599609375, 0.15107276916503906, 0.14854121398925782, 0.1479989471435547, 0.14794857788085938, 0.14719683837890624, 0.1489407958984375, 0.14776524353027343, 0.14995571899414062, 0.14830848693847656, 0.14791676330566406, 0.14835977172851564, 0.1479432373046875, 0.14829061889648437, 0.14799679565429688, 0.14796678161621094, 0.14732194519042968, 0.1478480987548828, 0.14728807067871094, 0.14728192138671875, 0.14818460083007812, 0.1482019500732422, 0.14867840576171876, 0.148005126953125, 0.14785276794433594, 0.1476463623046875, 0.14957017517089843, 0.14779833984375, 0.15020748901367187, 0.14775981140136718, 0.14796371459960939, 0.14879353332519532, 0.14796937561035156, 0.14819941711425783, 0.14805430603027345, 0.14881613159179688, 0.14934439086914061, 0.14756658935546876, 0.14806198120117187, 0.14744166564941405, 0.14842083740234374, 0.14799462890625, 0.15256288146972657, 0.14773536682128907, 0.14821331787109376, 0.1478967742919922, 0.14782438659667968, 0.14814031982421874, 0.14718153381347657, 0.14828492736816407, 0.14772274780273437, 0.14858589172363282, 0.1476077423095703, 0.14801344299316407, 0.14795088195800782, 0.1508175354003906, 0.14805538940429688, 0.14769013977050782, 0.14830723571777343, 0.14774345397949218, 0.14719984436035155, 0.14779408264160157, 0.14698495483398438, 0.14694195556640624, 0.1468661804199219, 0.150607421875, 0.14746054077148438, 0.14744781494140624, 0.14705258178710937, 0.14755226135253907, 0.14880770874023438, 0.14892970275878906, 0.14781114196777342, 0.1479147491455078, 0.14701560974121095, 0.14914469909667968, 0.14727264404296875, 0.1476680908203125, 0.1469502410888672, 0.1481510772705078, 0.148411865234375, 0.1482920379638672, 0.147660888671875, 0.14749696350097657, 0.14743142700195314, 0.14783078002929687, 0.14713856506347656, 0.14787350463867188, 0.14672259521484374, 0.14746617126464845, 0.1469354248046875, 0.14742979431152345, 0.14938780212402344, 0.14902793884277343, 0.14690966796875, 0.14723878479003907, 0.14667398071289062, 0.1466534423828125, 0.1474042510986328, 0.14775555419921876, 0.14860882568359374, 0.14994245910644532, 0.14847946166992188, 0.14758470153808595, 0.14778839111328124, 0.14767567443847657, 0.148459228515625, 0.1481359405517578, 0.14786151123046876, 0.14961045837402343, 0.1476157684326172, 0.14771568298339843, 0.14723727416992188, 0.147957763671875, 0.147267578125, 0.14786968994140626, 0.148827392578125, 0.14713218688964844, 0.14775395202636718, 0.147736572265625, 0.14780192565917968, 0.1469806671142578, 0.15005938720703124, 0.14787962341308594, 0.147619384765625, 0.14770867919921876, 0.14687408447265626, 0.14737619018554687, 0.14690447998046874, 0.14707180786132812, 0.14710374450683594, 0.1467664031982422, 0.1470930938720703, 0.14736178588867188, 0.14683558654785156, 0.14697593688964844, 0.14683197021484376, 0.14781858825683594, 0.14817893981933594, 0.14711190795898438, 0.1482137908935547, 0.14846054077148438, 0.1469096374511719, 0.14629875183105467, 0.14689712524414061, 0.1462379150390625, 0.14734278869628906, 0.14659414672851562, 0.14619456481933593, 0.1465713653564453, 0.14635629272460937, 0.14650367736816405, 0.14663388061523439, 0.14703631591796876, 0.14609266662597656, 0.14650367736816405, 0.14651708984375, 0.14598851013183595, 0.14611856079101562, 0.14893679809570312, 0.147736572265625, 0.1479413757324219, 0.14753753662109376, 0.14764802551269532, 0.1474589080810547, 0.14752153015136718, 0.14738432312011718, 0.14751744079589843, 0.14752700805664062, 0.14809971618652343, 0.14817660522460938, 0.147687744140625, 0.14728565979003908, 0.14753952026367187, 0.14727154541015625, 0.14715177917480468, 0.1506996765136719, 0.1476050567626953, 0.14755101013183594, 0.14756156921386718, 0.1479730224609375, 0.14717706298828126, 0.1469628448486328, 0.1469276123046875, 0.14639295959472656, 0.14678848266601563, 0.14681321716308593, 0.1477321319580078, 0.14706080627441406, 0.14892013549804686, 0.147242431640625, 0.1476507873535156, 0.1466425323486328, 0.14672557067871095, 0.14731471252441405, 0.14752153015136718, 0.14672691345214844, 0.15097036743164063, 0.14743942260742188, 0.1471203155517578, 0.15069996643066405, 0.14652415466308594, 0.14648326110839843, 0.1471091766357422, 0.14706944274902345, 0.14655302429199218, 0.14708714294433595, 0.146467041015625, 0.14792410278320312, 0.1471005401611328, 0.1477439422607422, 0.14734185791015625, 0.14652275085449218, 0.14722377014160157, 0.14695248413085937, 0.14678623962402343, 0.14647727966308594, 0.14653353881835937, 0.1462156219482422, 0.14591973876953124, 0.14618167114257813, 0.146374755859375, 0.14671337890625, 0.14699853515625, 0.14702021789550782, 0.14626194763183595, 0.14982815551757814, 0.14671757507324218, 0.14598381042480468, 0.14593222045898438, 0.14643910217285155, 0.1466385955810547, 0.1464617919921875, 0.14648617553710938, 0.1459996795654297, 0.14592434692382814, 0.146762939453125, 0.1464713897705078, 0.14786726379394532, 0.14831234741210939, 0.1488326416015625, 0.14705389404296876, 0.14693618774414063, 0.1541041259765625, 0.14793318176269532, 0.14828901672363282, 0.1483284454345703, 0.14770860290527343, 0.14852064514160157, 0.14651519775390626, 0.14685224914550782, 0.14657760620117188, 0.14690304565429688, 0.14700361633300782, 0.14789657592773436, 0.14898538208007814, 0.14869923400878907, 0.14670469665527344, 0.14613090515136717, 0.14549200439453125, 0.14548173522949218, 0.14490010070800782, 0.14539689636230468, 0.1448835906982422, 0.1483737335205078, 0.1475629119873047, 0.14669654846191407, 0.14581106567382812, 0.14578034973144532, 0.14567706298828126, 0.14653372192382813, 0.1462864990234375, 0.14665805053710937, 0.14665318298339844, 0.14647698974609374, 0.14642387390136719, 0.14603846740722656, 0.14508175659179687, 0.14660240173339845, 0.146323974609375, 0.14844313049316407, 0.14630706787109374, 0.14712979125976564, 0.1468780517578125, 0.14747337341308595, 0.14692950439453126, 0.14630313110351562, 0.14582521057128905, 0.1452672576904297, 0.14557594299316406, 0.14541413879394532, 0.1464934387207031, 0.1465641326904297, 0.148885986328125, 0.14697244262695314, 0.14647923278808594, 0.14551043701171876, 0.14575823974609375, 0.14594915771484376, 0.1470300750732422, 0.1467342987060547, 0.14950274658203125, 0.14676786804199218, 0.14647212219238281, 0.14567408752441408, 0.14571533203125, 0.1458461151123047, 0.14637362670898438, 0.1461964874267578, 0.14517657470703124, 0.1504215087890625, 0.1454898223876953, 0.1462205810546875, 0.1459717712402344, 0.14654229736328125, 0.14626031494140626, 0.14602029418945311, 0.14583938598632812, 0.1453841552734375, 0.1456619567871094]",tokens/s,6.764810259507128,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,,MB,14016.106496,9019.71968,0.0,8751.415296,8749.162496,s,1,31.16703515625,31.16703515625,0.0,31.16703515625,31.16703515625,31.16703515625,31.16703515625,[31.16703515625],,kWh,0.0007002599188958205,7.723139200886418e-05,0.00023481129895999708,0.0010123026098646818,,MB,3552.669696,9409.789952,0.0,8986.29632,8843.8656,s,10,1.1860337295532228,0.11860337295532228,0.00036292135391698284,0.11865356826782227,0.1190838035583496,0.11913647956848145,0.11917862037658691,"[0.11877097320556641, 0.11907209777832031, 0.1187577896118164, 0.11820588684082031, 0.11881324768066406, 0.11918915557861329, 0.11819452667236328, 0.11854934692382812, 0.11842291259765625, 0.11805779266357422]",tokens/s,2158.4546343082066,kWh,3.540078580471962e-06,3.9040808893911926e-07,2.3341551471080997e-06,6.2646418165191806e-06,tokens/kWh,40864267.66251117,MB,3556.872192,9409.789952,0.0,8986.29632,8913.014784,s,10,62.6527958984375,6.26527958984375,0.014779773464531545,6.258895263671875,6.280024951171875,6.291793725585937,6.301208745117187,"[6.3035625, 6.25422412109375, 6.25474755859375, 6.27740966796875, 6.2566396484375, 6.2706201171875, 6.26479248046875, 6.25300927734375, 6.25765771484375, 6.2601328125]",tokens/s,10.055417175974927,kWh,0.00018315348075327646,2.020248452225317e-05,9.543858973769171e-05,0.0002987945550132214,tokens/kWh,210847.2157305956,,s,630,62.6500216598511,0.09944447882516044,0.0011025928365699926,0.09917630386352538,0.10020718536376953,0.10114470520019532,0.10388863182067871,"[0.09943122863769531, 0.09975228881835937, 0.0994486083984375, 0.09914736175537109, 0.09907798767089844, 0.09943302154541016, 0.09940544128417969, 0.09944879913330078, 0.09920735931396485, 0.09923324584960938, 0.09917247772216797, 0.09916793823242187, 0.09976313781738282, 0.09931116485595703, 0.1005836181640625, 0.10113641357421875, 0.09985465240478515, 0.10001593780517579, 0.09991177368164063, 0.10029379272460938, 0.09953981018066406, 0.09959219360351562, 0.09997516632080078, 0.09985004425048828, 0.1002416000366211, 0.10051097869873046, 0.09998822021484376, 0.09935871887207032, 0.09949922943115234, 0.09982383728027344, 0.09963330841064454, 0.099578369140625, 0.09939724731445312, 0.10982550048828126, 0.10046546936035156, 0.09967772674560547, 0.10401967620849609, 0.100031005859375, 0.09982998657226562, 0.09953218841552734, 0.09964553833007812, 0.10003260803222656, 0.10002217864990234, 0.09941417694091798, 0.09916655731201172, 0.09925984191894531, 0.09967814636230468, 0.09982220458984375, 0.09956761932373047, 0.09912934112548828, 0.10087423706054688, 0.10080847930908203, 0.10011856079101562, 0.09956352233886719, 0.10053196716308593, 0.09945065307617187, 0.10119385528564454, 0.10034639739990234, 0.10055801391601563, 0.09992582702636718, 0.09995523071289063, 0.09992845153808594, 0.10109142303466796, 0.0996078109741211, 0.10013158416748047, 0.09975926208496094, 0.10103250885009765, 0.09976351928710937, 0.09937814331054687, 0.09929058837890625, 0.09935517120361329, 0.09926246643066407, 0.09963228607177735, 0.09949884796142579, 0.09883033752441406, 0.09889791870117187, 0.09893593597412109, 0.09875958251953125, 0.09921043395996093, 0.09884547424316406, 0.09927862548828124, 0.0988034210205078, 0.09864415740966796, 0.09933625793457031, 0.09904563140869141, 0.09909248352050781, 0.09893251037597656, 0.09913571166992187, 0.09905872344970704, 0.09935971069335937, 0.09928265380859375, 0.1011118392944336, 0.09979644775390625, 0.09925421142578125, 0.09873494720458985, 0.09890966033935547, 0.09956201934814453, 0.09924137878417968, 0.098859619140625, 0.0988096923828125, 0.09844751739501953, 0.09927680206298828, 0.0984466552734375, 0.09896006774902344, 0.09868492889404297, 0.10202931213378906, 0.0990044174194336, 0.09899116516113281, 0.09912006378173828, 0.09841868591308593, 0.09869926452636718, 0.09900012969970703, 0.09878870391845704, 0.10020336151123047, 0.09895724487304687, 0.09948985290527344, 0.09904038238525391, 0.10008889770507813, 0.10043373107910156, 0.09954508972167969, 0.09890569305419922, 0.09912361907958984, 0.09875846099853515, 0.09913142395019531, 0.09890013122558594, 0.09909657287597656, 0.09849113464355469, 0.09956320190429688, 0.09888966369628906, 0.09909900665283203, 0.09920079803466797, 0.10217084503173827, 0.10010198211669921, 0.0988141098022461, 0.1005728302001953, 0.09946758270263673, 0.09966595458984374, 0.09946112060546874, 0.09944268798828125, 0.09879551696777343, 0.09920511627197266, 0.09894265747070312, 0.09870368194580079, 0.09860915374755859, 0.09878083038330078, 0.09859664154052734, 0.0989241943359375, 0.09864076995849609, 0.10154569244384766, 0.09964131164550781, 0.0994942398071289, 0.09900032043457031, 0.09966553497314454, 0.09898390197753906, 0.09878982543945312, 0.0986562271118164, 0.09902681732177734, 0.09916429138183594, 0.09928857421875, 0.09916896057128906, 0.09910409545898438, 0.0990516128540039, 0.09963686370849609, 0.1011514892578125, 0.10038272094726562, 0.10036758422851562, 0.0992673568725586, 0.09965894317626953, 0.09913206481933594, 0.09884073638916016, 0.0990531234741211, 0.0987918701171875, 0.09937296295166016, 0.09850675201416016, 0.0988628158569336, 0.0985317153930664, 0.09890735626220704, 0.09861404418945313, 0.10111180877685547, 0.09951398468017578, 0.09929714965820312, 0.09875917053222656, 0.0987031021118164, 0.0989771499633789, 0.09901254272460938, 0.09842160034179688, 0.09871984100341796, 0.09848636627197266, 0.09969654083251953, 0.0988919677734375, 0.10289328002929687, 0.09981571197509766, 0.09865161895751953, 0.09868704223632813, 0.09886563110351562, 0.09890338897705078, 0.09884076690673828, 0.0994708480834961, 0.09876598358154297, 0.09971897888183594, 0.09905292510986329, 0.09917475128173828, 0.09861257934570312, 0.10646141052246094, 0.09942054748535156, 0.09952496337890625, 0.09922313690185547, 0.09977897644042968, 0.0990412826538086, 0.09975183868408204, 0.09912445068359375, 0.10005939483642579, 0.09908493041992188, 0.09921331024169922, 0.09868045043945313, 0.09925663757324218, 0.10291001892089843, 0.10005299377441407, 0.09872793579101563, 0.09944258880615234, 0.09948563385009766, 0.09908854675292969, 0.09875251007080078, 0.09917366027832031, 0.09875702667236329, 0.09885462188720703, 0.0989681625366211, 0.0993812484741211, 0.09898188781738282, 0.09903513336181641, 0.09959750366210937, 0.10186214447021484, 0.09943654632568359, 0.09968851470947265, 0.10170909118652344, 0.09933897399902344, 0.0989469451904297, 0.10011046600341797, 0.09973554992675782, 0.09912319946289062, 0.09934438323974609, 0.09966384124755859, 0.09907968139648438, 0.0992425308227539, 0.09909180450439453, 0.09968000030517578, 0.09914659118652344, 0.0997040023803711, 0.10190937805175782, 0.09989119720458985, 0.09950198364257813, 0.10074758148193359, 0.10150899505615234, 0.09925465393066406, 0.09886086273193359, 0.09899052429199219, 0.09975014495849609, 0.09931136322021485, 0.09905964660644531, 0.09904307556152343, 0.0999240951538086, 0.09888582611083985, 0.09881132507324218, 0.09984169769287109, 0.09927772521972657, 0.09884492492675781, 0.09921920013427735, 0.09887907409667969, 0.09871196746826172, 0.0990730209350586, 0.10010726165771484, 0.09899827575683594, 0.09882828521728515, 0.09895116424560547, 0.09912934112548828, 0.0985907211303711, 0.09938534545898438, 0.09874018859863282, 0.09908585357666015, 0.09886361694335938, 0.09942015838623047, 0.0993175048828125, 0.09893504333496093, 0.09855494689941406, 0.09911763000488281, 0.0990355224609375, 0.09965516662597657, 0.09901516723632812, 0.0990145263671875, 0.1014062042236328, 0.10041165161132813, 0.0993177261352539, 0.0993120346069336, 0.0991173095703125, 0.09961385345458984, 0.09948220825195313, 0.09912319946289062, 0.09940582275390625, 0.09899622344970703, 0.09951875305175781, 0.0994110107421875, 0.09923600006103515, 0.09932777404785156, 0.09861808013916015, 0.09956352233886719, 0.09890838623046876, 0.0997332763671875, 0.09926831817626953, 0.09954742431640624, 0.09920921325683593, 0.09932969665527344, 0.09932399749755859, 0.10011625671386719, 0.09921993255615234, 0.09984210968017578, 0.09963970947265625, 0.09962905883789062, 0.0989358367919922, 0.09916719818115234, 0.09920102691650391, 0.09938524627685547, 0.09960457611083984, 0.09899417877197265, 0.09925126647949219, 0.09882495880126953, 0.10009327697753906, 0.09905673980712891, 0.09878502655029296, 0.09876175689697266, 0.1093150405883789, 0.09989308929443359, 0.09973168182373048, 0.10384786987304688, 0.09959014129638671, 0.09889702606201171, 0.09965894317626953, 0.09898973083496093, 0.0989609603881836, 0.09825472259521484, 0.0991258544921875, 0.09875788879394531, 0.09887513732910157, 0.09941709136962891, 0.09906739044189453, 0.0986107177734375, 0.09929417419433594, 0.10279065704345704, 0.0997913589477539, 0.09943596649169922, 0.09989910125732422, 0.09964220428466797, 0.09951356506347656, 0.09939369964599609, 0.09955391693115234, 0.09886124420166016, 0.09920800018310547, 0.09994547271728516, 0.09880780792236328, 0.0987914276123047, 0.09893257904052734, 0.09889603424072266, 0.09937430572509766, 0.09893462371826171, 0.10035456085205079, 0.0991707534790039, 0.09972898864746094, 0.0990799331665039, 0.09954105377197266, 0.09899273681640625, 0.10051081848144532, 0.0987350082397461, 0.09911030578613281, 0.0986704330444336, 0.09929993438720704, 0.09876290893554687, 0.09891798400878907, 0.0989163818359375, 0.09916588592529296, 0.09916544342041016, 0.09899394989013671, 0.09879759979248047, 0.0986304931640625, 0.09896742248535156, 0.09879801940917969, 0.09923113250732422, 0.09884630584716797, 0.09907279968261719, 0.10390528106689453, 0.10041120147705078, 0.09911519622802735, 0.09940496063232422, 0.09874658966064453, 0.09919728088378907, 0.09963465881347656, 0.09933500671386719, 0.09890608215332031, 0.09881021118164063, 0.0987208023071289, 0.09908902740478516, 0.09846918487548828, 0.10866553497314453, 0.10035769653320313, 0.09940831756591798, 0.09964749145507812, 0.09899622344970703, 0.09853084564208985, 0.09911958312988281, 0.09844124603271484, 0.09924809265136719, 0.09904537963867187, 0.09919257354736329, 0.09899571228027344, 0.09906047821044922, 0.09862553405761719, 0.09937680053710937, 0.09869709014892578, 0.09967225646972656, 0.09893654632568359, 0.0990316162109375, 0.09862886047363281, 0.09879424285888672, 0.09915750122070313, 0.09890812683105468, 0.09818342590332031, 0.09921564483642578, 0.09901821136474609, 0.099672607421875, 0.09945401763916016, 0.09933001708984375, 0.10348438262939454, 0.09948569488525391, 0.10264780426025391, 0.0999629135131836, 0.09896752166748046, 0.09934003448486328, 0.09919718170166016, 0.09896710205078126, 0.09840684509277343, 0.09925862121582031, 0.09828530883789062, 0.0988590087890625, 0.10076048278808594, 0.09893199920654297, 0.09880239868164062, 0.10165862274169922, 0.09940582275390625, 0.09886307525634766, 0.09902896118164063, 0.09991808319091797, 0.09955104064941406, 0.0991778564453125, 0.09916617584228515, 0.09873887634277344, 0.09878482818603515, 0.09906531524658203, 0.09864083099365234, 0.09852722930908203, 0.09847808074951171, 0.09906813049316407, 0.09887312316894531, 0.09853558349609375, 0.09983296203613282, 0.09882598114013671, 0.09914876556396485, 0.09926806640625, 0.09912313842773438, 0.09858313751220703, 0.09893682861328125, 0.09974575805664063, 0.09907324981689453, 0.09887184143066406, 0.09910249328613281, 0.0983741455078125, 0.09932176208496094, 0.09944278717041016, 0.0995962905883789, 0.09913343811035157, 0.09895120239257812, 0.09914335632324218, 0.09901200103759765, 0.09857727813720703, 0.09902489471435547, 0.0985959701538086, 0.09893708801269531, 0.0994228515625, 0.09938662719726563, 0.0990014419555664, 0.10255481719970704, 0.10194525146484375, 0.1004877471923828, 0.09896959686279297, 0.09905561828613281, 0.0985025634765625, 0.09906425476074218, 0.10128956604003907, 0.09978681945800781, 0.09923891448974609, 0.0990750732421875, 0.09871071624755859, 0.0986728973388672, 0.09850118255615234, 0.09921536254882812, 0.09856610870361328, 0.09869884490966797, 0.09903670501708985, 0.0994881591796875, 0.09904489898681641, 0.09934390258789062, 0.09906249237060546, 0.09929142761230468, 0.09918669128417969, 0.09863372802734376, 0.09874345397949219, 0.09908665466308594, 0.09920771026611327, 0.09893462371826171, 0.09931196594238281, 0.09874208068847656, 0.09888358306884766, 0.09904742431640624, 0.09895948791503906, 0.09858854675292969, 0.09852722930908203, 0.09914777374267578, 0.09894624328613282, 0.09879039764404297, 0.09924598693847657, 0.10109327697753906, 0.09994630432128906, 0.0990742416381836, 0.10104013061523437, 0.0989017562866211, 0.0991193618774414, 0.09957341003417969, 0.09916655731201172, 0.09888768005371094, 0.09962931060791015, 0.0991552963256836, 0.09908060455322265, 0.09891840362548829, 0.0997232666015625, 0.09896463775634766, 0.09892131042480469, 0.09880287933349609, 0.10209343719482422, 0.09943408203125, 0.09912496185302734, 0.10119872283935546, 0.10281983947753906, 0.09893199920654297, 0.10000399780273438, 0.09891481781005859, 0.09917855834960937, 0.09903103637695312, 0.09935564422607422, 0.09888396453857422, 0.09891088104248047, 0.09909859466552734, 0.09952767944335937, 0.09847500610351563, 0.09956082916259766, 0.10126127624511719, 0.09902342224121094, 0.09866966247558594, 0.09895763397216797, 0.09857625579833984, 0.09910291290283203, 0.09952217864990234, 0.09924674987792968, 0.09949177551269531, 0.09871798706054688, 0.09863993835449218, 0.09928256225585938, 0.09922393798828125, 0.09899378967285156, 0.09920559692382812, 0.09874217224121094, 0.09933414459228515, 0.09919904327392579, 0.09956140899658203, 0.0991457290649414, 0.09898313903808593, 0.09916400146484375, 0.09942521667480468, 0.09886681365966797, 0.10225836944580079, 0.09991986846923828, 0.09967686462402343, 0.09885286712646485, 0.09982502746582031, 0.10035072326660156, 0.0998603515625, 0.09966182708740234, 0.09908214569091797, 0.09910486602783203, 0.09942345428466796, 0.09897596740722656, 0.09918217468261718, 0.09947634887695313, 0.0988570556640625, 0.09898188781738282, 0.09899622344970703, 0.09828966522216796, 0.09916758728027343, 0.09881037139892578, 0.0991337890625, 0.09894073486328125, 0.09897164916992188, 0.0997109146118164, 0.09892870330810546, 0.09910272216796875, 0.09922560119628906, 0.09845760345458984, 0.09928083038330078, 0.09919213104248047, 0.09990220642089843, 0.09939762878417968, 0.09912115478515625, 0.09901401519775391, 0.09943103790283203, 0.09922354888916016, 0.09895526123046874, 0.09935612487792969, 0.09959024047851563, 0.09885740661621094, 0.09947885131835937, 0.09927750396728516, 0.10066124725341796, 0.09889292907714843, 0.1042000961303711]",tokens/s,10.055862445195801,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.13 GiB is free. Process 63567 has 13.61 GiB memory in use. Of the allocated memory 13.36 GiB is allocated by PyTorch, and 136.95 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,14068.785152,7826.440192,0.0,7423.91808,7411.122688,s,1,31.281576171875,31.281576171875,0.0,31.281576171875,31.281576171875,31.281576171875,31.281576171875,[31.281576171875],,kWh,0.0006986173352083141,7.705552035690005e-05,0.0002344412986640121,0.0010101141542292263,,MB,1360.150528,8279.425024,0.0,7855.931392,7802.664448,s,10,1.146432189941406,0.11464321899414061,0.0003597562497143943,0.11456543731689453,0.11512906646728516,0.11528823089599609,0.11541556243896485,"[0.11467536163330078, 0.11451699066162109, 0.11432367706298828, 0.11416486358642577, 0.11544739532470703, 0.11436323547363281, 0.11454402923583984, 0.11458684539794922, 0.11471609497070312, 0.11509369659423828]",tokens/s,2233.0147587105357,kWh,3.386829001245174e-06,3.7350428502813773e-07,2.2422814873104727e-06,6.002614773583785e-06,tokens/kWh,42648080.82081176,MB,1416.58112,8281.522176,0.0,7855.931392,7739.936768,s,10,52.35908544921875,5.235908544921875,0.010178370027459308,5.239252197265625,5.24714814453125,5.2489068359374995,5.2503137890625,"[5.2413486328125, 5.22217236328125, 5.24278173828125, 5.22356494140625, 5.25066552734375, 5.22391015625, 5.24675732421875, 5.22710205078125, 5.243626953125, 5.23715576171875]",tokens/s,12.032295724703118,kWh,0.00015289196024458938,1.686447626164176e-05,7.864794989148773e-05,0.00024840438639771886,tokens/kWh,253618.70985293735,,s,630,52.35661626434329,0.08310574010213218,0.0008719917757053033,0.08293159866333008,0.08377255096435546,0.08453054084777832,0.08666472557067871,"[0.08282521820068359, 0.08292352294921874, 0.08234803009033204, 0.08220262145996093, 0.08261631774902344, 0.08489539337158203, 0.08266377258300782, 0.08346860504150391, 0.08238457489013672, 0.08226201629638671, 0.0830318374633789, 0.08327394866943359, 0.08313865661621093, 0.08417475128173828, 0.08266957092285156, 0.08283955383300781, 0.08269123077392578, 0.08344435119628907, 0.08264521789550781, 0.0826976318359375, 0.0823276138305664, 0.08222774505615234, 0.08312448120117187, 0.08241161346435547, 0.08228569793701172, 0.08380847930908203, 0.08243436431884765, 0.08243119812011719, 0.08221295928955077, 0.08290137481689454, 0.08211872100830078, 0.08398182678222656, 0.08297958374023437, 0.0830528335571289, 0.08370352172851563, 0.08330662536621093, 0.08361519622802735, 0.08351494598388672, 0.08285874938964843, 0.08563932800292968, 0.08372013092041015, 0.08617574310302735, 0.08322978973388671, 0.08334559631347656, 0.08433324432373047, 0.08333110046386719, 0.08351334381103516, 0.08297471618652344, 0.08254013061523438, 0.08254300689697265, 0.08263065338134766, 0.08291123199462891, 0.08256438446044922, 0.0835263671875, 0.08548566436767578, 0.08322243499755859, 0.08241712188720703, 0.0850898208618164, 0.08248627471923828, 0.08287260437011719, 0.08305430603027343, 0.08425875091552734, 0.08472105407714844, 0.082853759765625, 0.08282864379882812, 0.08234255981445313, 0.08255487823486328, 0.08559603118896485, 0.0827741470336914, 0.08282112121582032, 0.08246502685546875, 0.0830830078125, 0.08293350219726563, 0.08247154998779296, 0.0827501449584961, 0.08251840209960938, 0.0824972152709961, 0.08270527648925781, 0.08305174255371094, 0.0827676773071289, 0.08277680206298828, 0.08301805114746094, 0.08243913269042968, 0.08251897430419922, 0.08339868927001953, 0.08264070129394531, 0.08263289642333985, 0.08291645050048828, 0.08276876831054687, 0.08265321350097657, 0.08290908813476562, 0.08305423736572265, 0.08306972503662109, 0.08317916870117187, 0.08302381134033203, 0.08317343902587891, 0.08324060821533204, 0.08264329528808594, 0.08278630065917969, 0.08308857727050781, 0.08309433746337891, 0.08279654693603515, 0.08255020904541016, 0.08233763122558593, 0.08329939270019532, 0.0832138900756836, 0.08218019104003907, 0.08262223815917968, 0.08367955017089844, 0.08279849243164063, 0.08276377868652343, 0.082716064453125, 0.08273187255859375, 0.08240243530273438, 0.0826822738647461, 0.0830730209350586, 0.08272860717773438, 0.08318447875976563, 0.08315465545654296, 0.08310169219970703, 0.08428710174560547, 0.08309561920166016, 0.08259375762939453, 0.08242819213867188, 0.08315910339355469, 0.08231756591796875, 0.08302591705322265, 0.08305420684814453, 0.08326300811767579, 0.08301805114746094, 0.08317906951904297, 0.0835400619506836, 0.08323980712890625, 0.08307917022705077, 0.083019775390625, 0.0828375015258789, 0.08269948577880859, 0.08248921966552734, 0.0822690887451172, 0.08195276641845703, 0.08376262664794921, 0.08223785400390625, 0.08238617706298829, 0.08239942169189453, 0.08240815734863281, 0.08258255767822266, 0.08246121978759766, 0.08234255981445313, 0.0825087661743164, 0.08258595275878906, 0.08260655975341796, 0.0831541748046875, 0.08327788543701171, 0.08352009582519532, 0.08258723449707031, 0.08287100982666015, 0.08259171295166015, 0.08929261016845703, 0.08708505249023438, 0.08438329315185547, 0.08350867462158203, 0.08328294372558594, 0.08467250823974609, 0.08328752136230469, 0.08358147430419922, 0.0837754898071289, 0.08344892883300781, 0.08340956878662109, 0.0827394561767578, 0.08279065704345703, 0.08311577606201172, 0.08255078125, 0.08230838775634766, 0.08232182312011718, 0.08358505249023437, 0.08448233795166016, 0.08271459197998046, 0.08273923492431641, 0.08261007690429688, 0.08378582763671875, 0.083453857421875, 0.0832359390258789, 0.08334188842773438, 0.08288915252685547, 0.08335155487060547, 0.08242585754394531, 0.08638499450683594, 0.08245827484130859, 0.08260124969482421, 0.08265827178955078, 0.08308723449707031, 0.08293334197998047, 0.08332342529296875, 0.08299520111083984, 0.08307469177246093, 0.08287696075439453, 0.08284758758544922, 0.08277811431884766, 0.08257126617431641, 0.08207360076904296, 0.08214937591552735, 0.08232454681396484, 0.082538818359375, 0.0823609619140625, 0.0824299545288086, 0.08314060974121094, 0.08268521881103516, 0.0827356185913086, 0.08307266998291016, 0.08282374572753906, 0.08289663696289062, 0.08321459197998046, 0.08292352294921874, 0.08296575927734375, 0.0829939193725586, 0.08282316589355469, 0.08309315490722656, 0.08315532684326171, 0.08321430206298829, 0.08272895812988282, 0.08277401733398437, 0.08242758178710938, 0.08307917022705077, 0.08311430358886719, 0.08392447662353515, 0.08388864135742187, 0.0835149154663086, 0.08234236907958985, 0.0824928970336914, 0.0825346221923828, 0.08238726043701172, 0.08250358581542969, 0.08329948425292968, 0.08256403350830079, 0.08288050842285156, 0.08573977661132813, 0.08321603393554687, 0.083165283203125, 0.0831283187866211, 0.0823473892211914, 0.08248172760009766, 0.08241567993164063, 0.08228025817871094, 0.08261888122558594, 0.08268972778320313, 0.0822824935913086, 0.08245657348632812, 0.08314076995849609, 0.08448627471923828, 0.08313622283935547, 0.08381443023681641, 0.08270240020751952, 0.08325708770751954, 0.08292745971679688, 0.08287055969238281, 0.08286617279052734, 0.08275302124023437, 0.08313292694091796, 0.08285721588134766, 0.08305331420898437, 0.08419884490966797, 0.08325382232666016, 0.08258719635009766, 0.08283590698242188, 0.083142333984375, 0.08313887786865234, 0.08248934173583984, 0.08306889343261718, 0.08380210876464844, 0.08328524780273437, 0.08267241668701172, 0.08281702423095703, 0.08308128356933593, 0.08322354888916016, 0.08309228515625, 0.08398796844482422, 0.08654083251953125, 0.08313251495361328, 0.08264463806152343, 0.08350550079345703, 0.08667052459716797, 0.08377222442626953, 0.08302387237548828, 0.08367842864990234, 0.08443116760253906, 0.08413152313232422, 0.08373715209960937, 0.08307119750976563, 0.08321343994140624, 0.08266947174072266, 0.08244038391113281, 0.08338921356201172, 0.08665052795410157, 0.08311759948730468, 0.08262502288818359, 0.08264070129394531, 0.08336624145507812, 0.08377734375, 0.08307318115234374, 0.0832841567993164, 0.08301728057861328, 0.08322252655029297, 0.0828686752319336, 0.0831666259765625, 0.08799068450927734, 0.08253254699707031, 0.08256217956542969, 0.08238988494873047, 0.08345782470703125, 0.08309101104736329, 0.08302828979492187, 0.08314275360107422, 0.08238921356201172, 0.08238288116455078, 0.08220060729980469, 0.08289238739013671, 0.08327120208740234, 0.08268838500976562, 0.08247551727294922, 0.08305049896240234, 0.08233955383300781, 0.08220066833496094, 0.08264313507080077, 0.08210841369628906, 0.08237660980224609, 0.08286246490478516, 0.08308092498779297, 0.08294400024414063, 0.08313890838623048, 0.08275727844238281, 0.08285388946533204, 0.08330035400390624, 0.08292556762695312, 0.08303206634521484, 0.08309372711181641, 0.08297977447509766, 0.08302678680419921, 0.0824560317993164, 0.08250422668457032, 0.0822824935913086, 0.08266339111328125, 0.08225523376464844, 0.08250621032714844, 0.08290528106689453, 0.082757568359375, 0.08255430603027344, 0.08263340759277343, 0.08265926361083985, 0.08281292724609375, 0.08247872161865234, 0.08313219451904297, 0.08275004577636719, 0.08381568145751953, 0.08334976196289062, 0.08341657257080078, 0.08337100982666015, 0.08339046478271485, 0.08346214294433593, 0.08348262023925782, 0.08329420471191407, 0.08352297973632812, 0.08346070098876954, 0.08351849365234375, 0.08325424194335937, 0.08321024322509765, 0.08330646514892578, 0.08311196899414063, 0.08308470153808593, 0.08296083068847657, 0.08287862396240234, 0.08273490905761718, 0.08289875030517578, 0.08263731384277344, 0.08235750579833985, 0.08251795196533203, 0.08243885040283203, 0.08330239868164062, 0.08347964477539062, 0.08293846130371094, 0.08254073333740235, 0.08243199920654297, 0.08250982666015624, 0.08273935699462891, 0.08253218841552734, 0.0823214111328125, 0.08340214538574219, 0.08312687683105469, 0.08324729919433593, 0.08288851165771484, 0.08262560272216797, 0.08272364807128907, 0.082725341796875, 0.08532329559326172, 0.08613043212890625, 0.08481126403808593, 0.0835183334350586, 0.08293376159667969, 0.08453497314453125, 0.0829507827758789, 0.08330003356933594, 0.08308723449707031, 0.08340646362304688, 0.08424089813232422, 0.08519270324707032, 0.08299929809570313, 0.08353791809082031, 0.08336761474609375, 0.08343788909912109, 0.08299881744384766, 0.08332537841796875, 0.08422431945800782, 0.08324681854248046, 0.08325529479980469, 0.08314675140380859, 0.08364774322509766, 0.08338470458984375, 0.08262319946289062, 0.08266694641113281, 0.08284182739257813, 0.08326348876953125, 0.08273260498046875, 0.08382061004638672, 0.08690930938720703, 0.08294355010986328, 0.08242018890380859, 0.08265084838867187, 0.08464118194580078, 0.08266560363769532, 0.08284966278076172, 0.08264176177978516, 0.08276175689697265, 0.08288377380371094, 0.08304867553710937, 0.08308297729492188, 0.0828076171875, 0.08318569946289063, 0.08266697692871093, 0.08286182403564453, 0.08328189086914063, 0.08288134765625, 0.08263680267333984, 0.0832142105102539, 0.0836779556274414, 0.08345132446289062, 0.08334502410888672, 0.08339961242675781, 0.08334960174560548, 0.08333507537841797, 0.08452512359619141, 0.08335968017578126, 0.08323836517333984, 0.08295273590087891, 0.08319286346435546, 0.08310269165039062, 0.08279859161376953, 0.08301948547363282, 0.08293199920654297, 0.08293785858154297, 0.0829276123046875, 0.08263270568847657, 0.08280403137207032, 0.0833628158569336, 0.08280998229980469, 0.08259798431396484, 0.08255709075927735, 0.08238262176513672, 0.08277251434326172, 0.08240054321289063, 0.08271161651611328, 0.08292111968994141, 0.08333491516113281, 0.08313587188720703, 0.08329424285888672, 0.08304659271240235, 0.08337065887451171, 0.08243177795410156, 0.08249954986572265, 0.08248140716552735, 0.08284690856933594, 0.08307817840576172, 0.08276557159423828, 0.08264678192138672, 0.08377986907958984, 0.08319181060791016, 0.0832020492553711, 0.08332492828369141, 0.08290303802490234, 0.08276134490966797, 0.08258598327636718, 0.08254198455810546, 0.08335135650634766, 0.08236332702636719, 0.08267743682861328, 0.08288297271728516, 0.08305023956298828, 0.08268927764892578, 0.08415663909912109, 0.08260662078857423, 0.08247529602050781, 0.08265904235839844, 0.08263257598876952, 0.08239730834960937, 0.08270057678222656, 0.0822635498046875, 0.08364387512207032, 0.08352947235107422, 0.08279961395263671, 0.08300748443603516, 0.0827694091796875, 0.08320256042480469, 0.08652390289306641, 0.08542822265625, 0.08283785247802734, 0.08312207794189454, 0.08330806732177734, 0.0843658218383789, 0.0829551010131836, 0.08334047698974609, 0.08302515411376953, 0.08288505554199219, 0.08282931518554687, 0.08292988586425781, 0.08360118103027343, 0.08324294281005859, 0.08347654724121094, 0.08298003387451172, 0.08318985748291016, 0.0880909423828125, 0.08346870422363281, 0.082761474609375, 0.08267391967773438, 0.08380518341064454, 0.0837193603515625, 0.08260755157470703, 0.08252249908447265, 0.08274534606933594, 0.08273206329345703, 0.08245142364501953, 0.08274329376220703, 0.08264534759521484, 0.08609142303466796, 0.08279763031005859, 0.08243500518798828, 0.08240946960449219, 0.08290003204345703, 0.08261933135986328, 0.08269647979736328, 0.08274301147460937, 0.08272064208984375, 0.08261644744873047, 0.08247090911865235, 0.08248320007324218, 0.08266342163085938, 0.08315084838867187, 0.08253440093994141, 0.08290303802490234, 0.08300953674316407, 0.08266854095458985, 0.08268883514404297, 0.08306217956542969, 0.08302262115478516, 0.08311398315429687, 0.08326758575439454, 0.083052734375, 0.08334143829345703, 0.08511408233642578, 0.08281961822509766, 0.08313875579833985, 0.08335340881347657, 0.08323705291748047, 0.08300115203857422, 0.0830506591796875, 0.0826655044555664, 0.08273305511474609, 0.0823381118774414, 0.08272649383544922, 0.08220706939697266, 0.08238256072998047, 0.0831220474243164, 0.08512518310546875, 0.08241942596435547, 0.08246307373046875, 0.082142333984375, 0.08239833831787109, 0.08545458984375, 0.08251763153076172, 0.08266998291015625, 0.08279631805419922, 0.08314457702636718, 0.08295868682861328, 0.08302591705322265, 0.08317337799072265, 0.08251545715332032, 0.08211235046386718, 0.08249922943115234, 0.08250879669189454, 0.08272905731201172, 0.08293119812011719, 0.08236697387695313, 0.08254988861083984, 0.08301046752929687, 0.08304627227783203, 0.08314617919921875, 0.08312230682373047, 0.08291372680664062, 0.08296857452392578, 0.08287801361083984, 0.0828809585571289, 0.08301270294189453, 0.08309843444824219, 0.0827495346069336, 0.08294809722900391, 0.08324095916748046, 0.08281292724609375, 0.08385536193847656, 0.08291715240478516, 0.08262895965576172, 0.08327565002441406, 0.08271590423583984, 0.08258329772949219, 0.0827914276123047, 0.08333740997314452, 0.08302301025390625, 0.08364080047607422, 0.08288658905029297, 0.08406454467773437, 0.08314262390136719, 0.08282726287841796, 0.08620992279052735, 0.09071846771240234]",tokens/s,12.032863178536855,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,3191.812096,1350.434816,0.0,947.912704,879.697408,s,1,10.689001953125,10.689001953125,0.0,10.689001953125,10.689001953125,10.689001953125,10.689001953125,[10.689001953125],,kWh,9.625699644158581e-05,1.0610166165376894e-05,3.0257246427994877e-05,0.00013712440903495757,,MB,3242.55744,1516.109824,0.0,1092.616192,1018.207232,s,10,0.8615022888183593,0.08615022888183595,0.00026312813005895585,0.08613697814941407,0.08642394638061524,0.08657722282409668,0.08669984397888184,"[0.08585078430175781, 0.08602333068847656, 0.08628582763671876, 0.08617123413085938, 0.08620384216308594, 0.08638988494873047, 0.08578915405273438, 0.08595500946044922, 0.08673049926757813, 0.08610272216796876]",tokens/s,2971.553335640359,kWh,2.517955763901018e-06,2.776555819273579e-07,1.1432193053790783e-06,3.938830651207454e-06,tokens/kWh,64993908.76871613,MB,3246.751744,1662.910464,0.0,1239.416832,1018.209792,s,10,52.80603466796875,5.280603466796874,0.00744157281307721,5.27969873046875,5.2876439453124995,5.2916447265625,5.2948453515625,"[5.2749853515625, 5.27547705078125, 5.2794111328125, 5.2770126953125, 5.2867548828125, 5.279986328125, 5.26719580078125, 5.2956455078125, 5.283681640625, 5.28588427734375]",tokens/s,11.930454614918235,kWh,0.00015523395992694095,1.7122849908015632e-05,5.293049062022149e-05,0.0002252873004551781,tokens/kWh,279642.92648858886,,s,630,52.80007298278812,0.08380963965521919,0.0009220305509765502,0.08361991882324218,0.08460800399780273,0.08522856140136718,0.08826011627197268,"[0.08339295959472656, 0.08347650909423829, 0.08327190399169922, 0.08575794982910157, 0.08345597076416016, 0.08308262634277344, 0.08386822509765625, 0.08440656280517578, 0.08346809387207031, 0.08541401672363282, 0.08694976043701172, 0.08373248291015625, 0.08357814025878907, 0.08295088195800782, 0.08340652465820313, 0.08316127777099609, 0.08345817565917969, 0.08306489562988281, 0.08270780944824219, 0.08318656158447266, 0.08361891174316406, 0.08345049285888671, 0.08415548706054687, 0.08319500732421875, 0.08321385955810547, 0.08266777801513672, 0.08288460540771485, 0.083525634765625, 0.08416015625, 0.08523117065429688, 0.0887608642578125, 0.08377305603027344, 0.08394636535644531, 0.08356018829345703, 0.0835788803100586, 0.08465430450439453, 0.08292912292480469, 0.08293974304199218, 0.08321241760253906, 0.08300758361816406, 0.08376448059082031, 0.08350208282470703, 0.0836126708984375, 0.08292454528808593, 0.08371164703369141, 0.08338671875, 0.08424447631835938, 0.08401715087890625, 0.08388813018798828, 0.08311516571044922, 0.08342819213867188, 0.08306687927246094, 0.08360345458984375, 0.08364435577392579, 0.08305260467529296, 0.08315494537353516, 0.0833201904296875, 0.08329894256591797, 0.08318511962890625, 0.08476732635498047, 0.0838658218383789, 0.08354940795898437, 0.0840115203857422, 0.08318329620361328, 0.08363088226318359, 0.08286418914794921, 0.08294537353515626, 0.08293628692626953, 0.08443097686767578, 0.08319999694824219, 0.08491827392578125, 0.08444732666015625, 0.08384735870361328, 0.08403734588623046, 0.08357273864746094, 0.08379151916503906, 0.08337347412109375, 0.08316352081298828, 0.0835384979248047, 0.08412569427490234, 0.08348166656494141, 0.08345414733886719, 0.08513613128662109, 0.08468479919433594, 0.0852643814086914, 0.08495104217529297, 0.08450662231445312, 0.08404991912841797, 0.08372000122070312, 0.08308988952636719, 0.08508777618408203, 0.08331283569335937, 0.08314064025878906, 0.0844697265625, 0.08352355194091797, 0.08393730926513672, 0.08326374053955078, 0.08332870483398437, 0.08346125030517579, 0.08400697326660156, 0.08550835418701172, 0.08343958282470704, 0.08383139038085938, 0.08347856140136718, 0.08321654510498047, 0.08350678253173828, 0.08321167755126953, 0.08365676879882812, 0.08349890899658204, 0.08364867401123047, 0.08328585815429687, 0.08321321868896485, 0.08325939178466797, 0.08272854614257813, 0.0830568618774414, 0.08375059509277344, 0.08359577941894532, 0.08375052642822266, 0.08392320251464844, 0.08363836669921874, 0.08361373138427734, 0.08384508514404297, 0.08362957000732422, 0.0832558364868164, 0.08325920104980469, 0.08407469177246094, 0.08355059051513672, 0.08363827514648438, 0.0834557113647461, 0.0831385269165039, 0.0832265625, 0.08344140625, 0.08374646759033202, 0.08328028869628906, 0.08312866973876953, 0.0831976318359375, 0.08322886657714844, 0.08366255950927734, 0.0835157470703125, 0.08309363555908203, 0.08365878295898438, 0.0840101089477539, 0.08347734069824218, 0.08332457733154297, 0.08395986938476563, 0.08425244903564454, 0.08443357086181641, 0.09057500457763672, 0.08409279632568359, 0.08411775970458985, 0.08412528228759765, 0.08431756591796875, 0.08394137573242187, 0.0832437744140625, 0.08323891448974609, 0.08296390533447266, 0.08480210876464844, 0.08337129974365234, 0.08328422546386718, 0.08334111785888672, 0.08309113311767578, 0.08349568176269531, 0.08351766204833984, 0.08298700714111328, 0.08380982208251953, 0.08457234954833984, 0.08455519866943359, 0.08451516723632813, 0.08531199645996093, 0.08408700561523437, 0.08353072357177735, 0.0860967025756836, 0.08362598419189453, 0.08378572845458984, 0.08362393951416015, 0.08351046752929688, 0.08325107574462891, 0.08412364959716796, 0.08340493011474609, 0.08347686767578125, 0.0841404800415039, 0.08376319885253906, 0.08344313812255859, 0.08338070678710938, 0.08310384368896484, 0.08305458831787109, 0.08355667114257813, 0.08301331329345703, 0.08320636749267578, 0.08286553955078126, 0.08343417358398438, 0.08372195434570312, 0.08392733001708984, 0.08339161682128907, 0.08325791931152343, 0.08315660858154297, 0.0835959701538086, 0.08351849365234375, 0.08307113647460937, 0.08309126281738281, 0.08323318481445313, 0.08310160064697265, 0.08343775939941406, 0.08385126495361328, 0.08426547241210937, 0.08385142517089844, 0.08329609680175781, 0.08740460968017578, 0.08404914855957031, 0.08393993377685546, 0.08374076843261719, 0.08505561828613281, 0.08398834991455079, 0.0834491195678711, 0.08330313873291016, 0.08344780731201172, 0.08363164520263672, 0.0863722915649414, 0.0838967056274414, 0.08353513336181641, 0.08442963409423829, 0.08375302124023437, 0.083836669921875, 0.08393138885498047, 0.08376687622070313, 0.0837371826171875, 0.08370521545410156, 0.08452748870849609, 0.08338642883300781, 0.08419267272949219, 0.08371596527099609, 0.08384595489501953, 0.08384297943115235, 0.08414524841308593, 0.08312435150146484, 0.0835118408203125, 0.08362608337402344, 0.08313488006591797, 0.08335094451904297, 0.08365910339355469, 0.0834703369140625, 0.08343727874755859, 0.0841833267211914, 0.08415232086181641, 0.08346173095703124, 0.08331078338623046, 0.08324937438964844, 0.0839649887084961, 0.08341926574707031, 0.08389920043945312, 0.0833986587524414, 0.08340502166748047, 0.08342918395996093, 0.08409737396240234, 0.0836710433959961, 0.08413561248779297, 0.08328838348388672, 0.08333251190185546, 0.08320416259765626, 0.08376322937011718, 0.08321075439453125, 0.08307071685791016, 0.08414205169677734, 0.0847608642578125, 0.08603145599365235, 0.09038531494140625, 0.08526866912841796, 0.08460678100585937, 0.08376432037353515, 0.08392307281494141, 0.08352969360351563, 0.0834563217163086, 0.08350739288330078, 0.08388639831542968, 0.08397209930419922, 0.08360559844970702, 0.08366508483886718, 0.08402009582519532, 0.08494364929199219, 0.08367520141601563, 0.08330393218994141, 0.08323123168945312, 0.08378982543945312, 0.08363558197021484, 0.08319254302978515, 0.08385116577148438, 0.08326866912841797, 0.08365984344482422, 0.0834610595703125, 0.08334809875488282, 0.08434105682373047, 0.08439177703857421, 0.08339472198486328, 0.08376729583740235, 0.08352886199951172, 0.08390128326416016, 0.0833795166015625, 0.0839947509765625, 0.08332243347167968, 0.08336803436279297, 0.08359145355224609, 0.08307081604003906, 0.08389328002929687, 0.0841928939819336, 0.08360908508300781, 0.08395353698730469, 0.0839373779296875, 0.08390547180175781, 0.08378659057617187, 0.08416710662841796, 0.0837984619140625, 0.08439529418945313, 0.08354684448242188, 0.08366284942626953, 0.0841402587890625, 0.08303119659423829, 0.08335001373291015, 0.08345561981201172, 0.08332144165039063, 0.08387174224853515, 0.08485273742675781, 0.08400028991699218, 0.08381279754638672, 0.08323484802246094, 0.08586854553222656, 0.08555734252929688, 0.0847850570678711, 0.0840447998046875, 0.08591053009033203, 0.0844352035522461, 0.0835907211303711, 0.0844392318725586, 0.08361695861816407, 0.08490707397460938, 0.08648678588867187, 0.08417619323730469, 0.08338451385498047, 0.08394588470458984, 0.08359740447998047, 0.08327782440185547, 0.08340275573730468, 0.08346733093261718, 0.08335577392578125, 0.08310403442382812, 0.08343167877197266, 0.08321414184570312, 0.08374447631835938, 0.08767359924316406, 0.08342499542236329, 0.0837729263305664, 0.08395257568359375, 0.0834450912475586, 0.08363263702392579, 0.08408185577392578, 0.08362095642089844, 0.08363798522949219, 0.08447180938720703, 0.08302591705322265, 0.08278630065917969, 0.0832020492553711, 0.08409638214111329, 0.08426764678955079, 0.08400262451171875, 0.08341693115234375, 0.08302611541748046, 0.08297283172607423, 0.083187744140625, 0.08294806671142578, 0.08323065948486329, 0.08304032135009766, 0.08278809356689452, 0.08274742126464844, 0.08264886474609374, 0.08355023956298828, 0.08387145233154297, 0.0831966094970703, 0.08316652679443359, 0.08374752044677734, 0.08292899322509766, 0.0842040023803711, 0.08448969268798828, 0.08392377471923829, 0.09114595031738282, 0.08405359649658203, 0.08461901092529298, 0.0841468505859375, 0.08418265533447265, 0.0838024673461914, 0.08353823852539062, 0.08366239929199219, 0.08348831939697265, 0.08373433685302735, 0.08366355133056641, 0.0839908447265625, 0.08419631958007813, 0.08409171295166015, 0.08396006774902344, 0.08373836517333984, 0.08351443481445313, 0.0837940444946289, 0.08381136322021485, 0.08337795257568359, 0.08327561950683594, 0.08324662780761719, 0.08322252655029297, 0.08282332611083984, 0.08307145690917969, 0.08316928100585938, 0.08327391815185547, 0.08309286499023437, 0.08274345397949219, 0.08343170928955078, 0.0836280288696289, 0.08329766082763672, 0.08402598571777344, 0.08327366638183593, 0.0834493408203125, 0.08315142059326172, 0.08297833251953125, 0.08313699340820313, 0.08327782440185547, 0.0833446044921875, 0.08288902282714844, 0.0829628143310547, 0.08326358032226562, 0.08315046691894531, 0.08370003509521484, 0.08379398345947266, 0.08336998748779297, 0.08294313812255859, 0.08289571380615235, 0.0830212173461914, 0.08300300598144532, 0.08288355255126953, 0.08310972595214844, 0.08349302673339844, 0.08342848205566407, 0.08332985687255859, 0.08348678588867188, 0.08358707427978515, 0.08338227081298828, 0.08267680358886718, 0.08605414581298829, 0.08427468872070312, 0.0838453140258789, 0.08388198089599609, 0.08414761352539063, 0.0836878433227539, 0.08345961761474609, 0.08385926055908204, 0.08364937591552735, 0.08518463897705078, 0.08532342529296875, 0.08374703979492187, 0.08367308807373047, 0.08367715454101562, 0.08377142333984375, 0.08369091033935547, 0.0839851531982422, 0.08332067108154297, 0.08371727752685547, 0.08338019561767578, 0.08324559783935546, 0.08328431701660156, 0.08288460540771485, 0.08300310516357422, 0.08301331329345703, 0.0842390365600586, 0.08417667388916016, 0.08388371276855469, 0.08415837097167969, 0.083567138671875, 0.0840189437866211, 0.08394777679443359, 0.08397824096679687, 0.08442880249023438, 0.08507952117919922, 0.08482585906982422, 0.08438658905029296, 0.08427129364013672, 0.08387564849853515, 0.08366508483886718, 0.08349267578125, 0.08350643157958984, 0.08408345794677734, 0.08405187225341797, 0.08385340881347657, 0.08392851257324219, 0.08347296142578126, 0.08334464263916015, 0.08369420623779297, 0.08412598419189453, 0.08403337860107422, 0.08329011535644532, 0.08371405029296874, 0.0839925765991211, 0.08339046478271485, 0.08427129364013672, 0.08499795532226563, 0.08452207946777343, 0.08925814056396485, 0.08671727752685547, 0.08420499420166015, 0.08411977386474609, 0.08318134307861329, 0.08345231628417969, 0.08309097290039062, 0.08303247833251953, 0.083560546875, 0.08363212585449219, 0.08336326599121094, 0.0846215362548828, 0.08309980773925782, 0.0832718734741211, 0.08350508880615234, 0.08390799713134765, 0.0835672607421875, 0.08398966217041015, 0.08356233978271485, 0.0839136962890625, 0.08446099090576172, 0.08399913787841796, 0.08849967956542969, 0.08387042999267579, 0.08338819122314453, 0.0834327392578125, 0.08354886627197265, 0.08347074890136719, 0.08344086456298828, 0.08400784301757812, 0.08470301055908203, 0.08495846557617187, 0.08511353302001953, 0.08518013000488281, 0.0842511978149414, 0.08392060852050781, 0.08366854095458984, 0.08369811248779296, 0.0843605728149414, 0.08368806457519531, 0.08332854461669922, 0.08380258941650391, 0.0833064956665039, 0.084168701171875, 0.08383026885986328, 0.0837938232421875, 0.08365526580810546, 0.08351334381103516, 0.08316928100585938, 0.08351913452148438, 0.08356489562988281, 0.08387932586669922, 0.08359382629394531, 0.08349878692626952, 0.08331696319580079, 0.08319152069091797, 0.08369385528564453, 0.08336707305908203, 0.08319471740722656, 0.08278972625732423, 0.0848288345336914, 0.08468233489990235, 0.08419171142578125, 0.08560777282714843, 0.08365708923339844, 0.08376992034912109, 0.0838222427368164, 0.08391046142578125, 0.08499833679199219, 0.08556339263916016, 0.08370333099365235, 0.08357698822021484, 0.08482643127441407, 0.08350099182128906, 0.08395561981201172, 0.08390249633789063, 0.0833345947265625, 0.08316368103027344, 0.08333328247070312, 0.08385536193847656, 0.08312767791748046, 0.08362201690673828, 0.08351289367675781, 0.0832542724609375, 0.08576812744140624, 0.08306390380859376, 0.08365071868896484, 0.08426108551025391, 0.08404239654541015, 0.0836209259033203, 0.08345069122314454, 0.08371199798583985, 0.08350105285644531, 0.08328498840332031, 0.08425984191894531, 0.08356864166259766, 0.083214111328125, 0.08306915283203126, 0.08334249877929688, 0.08310870361328125, 0.08308940887451172, 0.08291487884521484, 0.0832619857788086, 0.08638044738769532, 0.08346185302734375, 0.08322099304199218, 0.08522537231445312, 0.0833815689086914, 0.08378201293945313, 0.08324729919433593, 0.08343142700195312, 0.08336383819580079, 0.08348876953125, 0.08308121490478515, 0.08314988708496093, 0.08462841796875, 0.0842335968017578, 0.0859368667602539, 0.08457817840576172, 0.08911670684814453, 0.08440831756591796, 0.08436524963378907, 0.08397011566162109, 0.08484864044189454, 0.08323881530761719, 0.08368287658691406, 0.08367158508300782, 0.08341027069091797, 0.08375263977050781, 0.08296259307861328]",tokens/s,11.931801689087989,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,26318.712832,13989.96992,0.0,13587.447808,13583.186432,s,1,53.512828125,53.512828125,0.0,53.512828125,53.512828125,53.512828125,53.512828125,[53.512828125],,kWh,0.0013542991612500068,0.00014938093056629913,0.0004392834069820023,0.0019429634987983082,,MB,1332.056064,14386.331648,0.0,13962.838016,13923.483136,s,10,1.9093886108398437,0.19093886108398436,0.0003040668460382427,0.19099217224121093,0.19129284057617185,0.1913105972290039,0.19132480255126955,"[0.19119081115722655, 0.19063095092773438, 0.19111862182617187, 0.19039155578613282, 0.19115750122070313, 0.19072451782226563, 0.1912888946533203, 0.19086572265625, 0.19132835388183594, 0.19069168090820313]",tokens/s,1340.743306766654,kWh,5.629516950480488e-06,6.20547243194092e-07,3.7175617347308824e-06,9.967625928405462e-06,tokens/kWh,25683146.80333843,MB,1368.817664,14390.525952,0.0,13964.935168,13852.182528,s,10,93.57234960937498,9.3572349609375,0.020973525335330586,9.35282373046875,9.38316298828125,9.391182568359374,9.397598232421874,"[9.37440234375, 9.381380859375, 9.345634765625, 9.3367001953125, 9.339369140625, 9.3992021484375, 9.362095703125, 9.3600126953125, 9.33394921875, 9.3396025390625]",tokens/s,6.732758155908061,kWh,0.00027096903133618644,2.988966752727507e-05,0.0001370216341898701,0.00043788033305333163,tokens/kWh,143874.9248241001,,s,630,93.56987982177733,0.14852361876472595,0.001222921795477969,0.1482891387939453,0.14987278442382812,0.15068186416625975,0.15320537399291992,"[0.14962092590332032, 0.14910435485839843, 0.1492704620361328, 0.15098281860351562, 0.148748291015625, 0.14891416931152343, 0.14914300537109376, 0.1492274932861328, 0.14937554931640626, 0.14885635375976564, 0.1495979766845703, 0.15041151428222657, 0.1492177276611328, 0.14865408325195312, 0.14796389770507812, 0.14788983154296875, 0.14888380432128906, 0.14729420471191407, 0.14729180908203124, 0.15045257568359374, 0.14846156311035155, 0.14828472900390624, 0.14810797119140626, 0.14802738952636718, 0.15400755310058595, 0.14814157104492187, 0.14834739685058593, 0.1485312042236328, 0.148057373046875, 0.14888829040527343, 0.14752902221679687, 0.14875308227539064, 0.1482936248779297, 0.14928076171875, 0.14806367492675782, 0.14900682067871093, 0.1485207977294922, 0.14781056213378907, 0.14847795104980469, 0.14859638977050782, 0.14781190490722657, 0.14840911865234374, 0.14878105163574218, 0.14974771118164062, 0.14946266174316405, 0.14863398742675782, 0.1486807098388672, 0.1481414337158203, 0.14917027282714843, 0.14840066528320311, 0.1485597381591797, 0.14779753112792968, 0.1509771270751953, 0.14868803405761719, 0.1483880615234375, 0.14882879638671875, 0.14903091430664062, 0.14807391357421876, 0.14828726196289063, 0.1481408386230469, 0.14848371887207032, 0.14809260559082033, 0.14948013305664062, 0.14821331787109376, 0.14807084655761718, 0.14781234741210938, 0.14751335144042968, 0.14796592712402343, 0.1479451904296875, 0.1479453125, 0.14767152404785155, 0.1484668731689453, 0.14918739318847657, 0.14890567016601564, 0.14938963317871093, 0.14927052307128907, 0.14837702941894532, 0.1476980743408203, 0.1477736053466797, 0.14770527648925783, 0.14792515563964845, 0.14858895874023437, 0.14979798889160156, 0.14772079467773438, 0.14729248046875, 0.15256576538085936, 0.15019772338867188, 0.14797030639648437, 0.14873770141601564, 0.14851545715332032, 0.15327001953125, 0.1479068145751953, 0.14881721496582032, 0.1480975341796875, 0.14809475708007813, 0.14888111877441407, 0.14884320068359375, 0.14817689514160157, 0.14873545837402344, 0.14880796813964844, 0.14867222595214843, 0.14936294555664062, 0.15197532653808593, 0.14997357177734374, 0.14856367492675782, 0.15182044982910156, 0.14836947631835937, 0.1496268768310547, 0.14934646606445312, 0.14989129638671875, 0.14886236572265624, 0.15010028076171875, 0.14992979431152345, 0.14838143920898436, 0.14897225952148438, 0.1481359405517578, 0.14853074645996095, 0.14846144104003905, 0.1504072265625, 0.15050393676757812, 0.14824847412109374, 0.14802134704589845, 0.14845951843261718, 0.14946917724609374, 0.14912294006347657, 0.14907148742675783, 0.14905584716796874, 0.1485455322265625, 0.1519718475341797, 0.14889549255371093, 0.14853350830078124, 0.147884033203125, 0.14767625427246095, 0.14744642639160158, 0.14819967651367189, 0.14851072692871095, 0.14785049438476563, 0.1475018310546875, 0.1482355499267578, 0.14786224365234374, 0.14805564880371094, 0.1479490509033203, 0.14802627563476561, 0.14803897094726562, 0.14839407348632813, 0.1481365509033203, 0.14855372619628907, 0.1481890869140625, 0.14920918273925782, 0.14711807250976563, 0.14781240844726562, 0.15133689880371093, 0.14861721801757813, 0.14941798400878906, 0.14859642028808595, 0.14894522094726562, 0.15164210510253906, 0.14871133422851562, 0.14844248962402343, 0.14755503845214843, 0.14788566589355467, 0.14725514221191408, 0.14775724792480469, 0.14776358032226564, 0.14729011535644532, 0.14847747802734376, 0.1484292755126953, 0.14831202697753906, 0.14780198669433595, 0.14817631530761718, 0.14721034240722655, 0.14806661987304687, 0.14751776123046875, 0.1469210205078125, 0.14708758544921874, 0.14802496337890625, 0.14849224853515625, 0.14764710998535155, 0.14769970703125, 0.14821955871582032, 0.14781385803222657, 0.14728614807128906, 0.14782745361328126, 0.14894688415527343, 0.15025555419921874, 0.14922105407714845, 0.15001849365234374, 0.1483345947265625, 0.1487419891357422, 0.1486396484375, 0.14835292053222657, 0.14906796264648436, 0.14802259826660155, 0.14826156616210937, 0.1523095703125, 0.14815440368652344, 0.1474244842529297, 0.14736671447753907, 0.14723660278320314, 0.15011593627929687, 0.1479664611816406, 0.1480747833251953, 0.14816665649414062, 0.14726921081542968, 0.14882806396484374, 0.14731507873535157, 0.14730003356933594, 0.1468645782470703, 0.14736895751953125, 0.14781484985351562, 0.14730706787109374, 0.14735324096679686, 0.1470610809326172, 0.14766249084472657, 0.14871382141113282, 0.14754611206054688, 0.14829568481445313, 0.149142822265625, 0.1498672332763672, 0.14800413513183594, 0.14865481567382813, 0.14787350463867188, 0.14921116638183593, 0.14946287536621095, 0.149561767578125, 0.1482977294921875, 0.14795365905761718, 0.14850361633300782, 0.1483027801513672, 0.14825584411621093, 0.1473870086669922, 0.1473450164794922, 0.1474566650390625, 0.1485516815185547, 0.14723583984375, 0.14856069946289063, 0.14868220520019532, 0.14914195251464843, 0.14823861694335938, 0.14792262268066406, 0.1477307586669922, 0.1477459259033203, 0.14742393493652345, 0.1477060546875, 0.14823782348632814, 0.1476240692138672, 0.1479407958984375, 0.14760032653808594, 0.14742938232421876, 0.1492576904296875, 0.14998733520507812, 0.1482872314453125, 0.14920352172851561, 0.14819964599609375, 0.14778746032714843, 0.14803181457519532, 0.14722402954101563, 0.1468687744140625, 0.14746182250976564, 0.14809452819824218, 0.1581923828125, 0.14781986999511718, 0.14813046264648438, 0.14709706115722657, 0.1479009552001953, 0.1478224334716797, 0.14853955078125, 0.1483345947265625, 0.14824844360351563, 0.14714028930664064, 0.1484739532470703, 0.147276123046875, 0.1477775421142578, 0.1480396728515625, 0.14859036254882813, 0.14776956176757813, 0.14760960388183594, 0.14695834350585937, 0.1472283477783203, 0.14801132202148437, 0.14945893859863282, 0.14741708374023438, 0.14803321838378905, 0.15099664306640626, 0.14780482482910157, 0.14938522338867188, 0.14784512329101562, 0.14823161315917968, 0.14853587341308594, 0.1503002166748047, 0.14774931335449218, 0.1471918029785156, 0.14701097106933594, 0.1478047637939453, 0.14811856079101562, 0.14730543518066405, 0.1468661804199219, 0.1466361541748047, 0.14753240966796874, 0.1474394836425781, 0.14726541137695312, 0.14852735900878905, 0.1533050537109375, 0.14809027099609376, 0.1474976043701172, 0.14742848205566406, 0.14778253173828124, 0.14864505004882814, 0.14860986328125, 0.14850250244140625, 0.14804176330566407, 0.1477710418701172, 0.1483695068359375, 0.14930355834960937, 0.148432861328125, 0.151408447265625, 0.14893075561523436, 0.14903855895996093, 0.15009046936035156, 0.14907305908203125, 0.14903372192382813, 0.14958390808105468, 0.14914749145507813, 0.14925155639648438, 0.14887004089355468, 0.14969856262207032, 0.14921932983398437, 0.15290336608886718, 0.15098707580566406, 0.1488240661621094, 0.14862950134277345, 0.1480312042236328, 0.1481444091796875, 0.14900837707519532, 0.14790655517578125, 0.1485373077392578, 0.14745928955078125, 0.1481931915283203, 0.1481143341064453, 0.14859674072265625, 0.1485823974609375, 0.14953836059570313, 0.14913497924804686, 0.1492488250732422, 0.14858172607421874, 0.14922157287597657, 0.1500401611328125, 0.14869389343261719, 0.14941183471679687, 0.15125856018066405, 0.1483118133544922, 0.14932249450683593, 0.1486642303466797, 0.14876687622070311, 0.14874179077148436, 0.1506279296875, 0.15012736511230468, 0.14948503112792969, 0.1486906280517578, 0.14864662170410156, 0.14888153076171876, 0.14940567016601564, 0.1483612518310547, 0.1507631072998047, 0.14783938598632812, 0.14981529235839844, 0.14907527160644532, 0.14924386596679687, 0.1489845428466797, 0.1501890869140625, 0.1489881591796875, 0.1489348449707031, 0.14891827392578125, 0.1483978271484375, 0.14989596557617188, 0.1493011474609375, 0.14867593383789063, 0.1494801940917969, 0.1492085723876953, 0.14890211486816407, 0.15119747924804688, 0.14985061645507813, 0.14962074279785156, 0.1491210174560547, 0.149115966796875, 0.14945376586914064, 0.1485980224609375, 0.148785888671875, 0.14910057067871094, 0.14925564575195313, 0.14863002014160157, 0.14875007629394532, 0.14901458740234375, 0.15208265686035155, 0.14826211547851562, 0.1495314178466797, 0.1486636505126953, 0.14936111450195313, 0.14914988708496094, 0.14913536071777345, 0.15246336364746094, 0.1478448028564453, 0.1504730224609375, 0.1487626190185547, 0.14942108154296874, 0.1492244110107422, 0.15009689331054688, 0.14902169799804688, 0.14833807373046876, 0.14840620422363282, 0.14845404052734376, 0.14839193725585936, 0.14922752380371093, 0.14698086547851563, 0.14685296630859376, 0.14962371826171875, 0.148102783203125, 0.14796188354492187, 0.1479785919189453, 0.14861053466796875, 0.14963970947265626, 0.14776524353027343, 0.14862710571289062, 0.14754829406738282, 0.14819129943847656, 0.1476630096435547, 0.14760470581054688, 0.14642256164550782, 0.1466544647216797, 0.14735337829589842, 0.14685693359375, 0.14737149047851564, 0.14789651489257813, 0.14733340454101562, 0.14724844360351563, 0.14772883605957032, 0.14797833251953124, 0.14835734558105468, 0.14759933471679687, 0.14755430603027345, 0.14744989013671875, 0.14767082214355468, 0.14677090454101563, 0.14814413452148437, 0.14713241577148437, 0.14662042236328124, 0.1470586853027344, 0.14999346923828125, 0.14789427185058593, 0.1482977294921875, 0.14933999633789063, 0.14785533142089843, 0.148385986328125, 0.14871142578125, 0.14728807067871094, 0.14734335327148437, 0.14738432312011718, 0.14814617919921874, 0.14702796936035156, 0.15067546081542968, 0.14942204284667968, 0.1474190673828125, 0.14717074584960937, 0.147355712890625, 0.1476675567626953, 0.14785516357421874, 0.1476999053955078, 0.14828314208984375, 0.14756668090820313, 0.1477797393798828, 0.1492987823486328, 0.14874024963378907, 0.14891238403320312, 0.14814198303222656, 0.14889366149902344, 0.14798585510253906, 0.14798098754882813, 0.14906982421875, 0.15409939575195314, 0.14880799865722658, 0.14811135864257813, 0.15111167907714843, 0.14867984008789062, 0.14727565002441406, 0.153861083984375, 0.1480744934082031, 0.15068710327148438, 0.15064947509765625, 0.14809686279296874, 0.14718377685546874, 0.14641766357421876, 0.15222579956054688, 0.14809706115722657, 0.1476460723876953, 0.15059292602539062, 0.14841746520996094, 0.14739430236816406, 0.14739199829101562, 0.14843565368652345, 0.15304710388183593, 0.14870323181152345, 0.1497845764160156, 0.14793113708496095, 0.14808268737792968, 0.14871888732910157, 0.14807113647460937, 0.14799871826171876, 0.14721824645996093, 0.14707321166992188, 0.14814317321777343, 0.14708755493164063, 0.1477127685546875, 0.1498439636230469, 0.1501648712158203, 0.148566650390625, 0.14770941162109374, 0.14848249816894532, 0.1480145263671875, 0.1499757080078125, 0.14950399780273438, 0.14776524353027343, 0.14806346130371092, 0.1483835906982422, 0.14821676635742187, 0.14731210327148436, 0.15367593383789063, 0.14791690063476562, 0.1473907470703125, 0.14662611389160157, 0.14698249816894532, 0.1470018310546875, 0.1477626495361328, 0.14799964904785157, 0.1477918701171875, 0.14688665771484374, 0.14733311462402343, 0.1480253448486328, 0.14717747497558595, 0.14775091552734376, 0.14754173278808594, 0.1480583953857422, 0.1473269805908203, 0.14720140075683594, 0.1476566467285156, 0.14803829956054687, 0.1477857666015625, 0.14706871032714844, 0.15024490356445314, 0.14786221313476564, 0.14890599060058593, 0.14804518127441407, 0.14807688903808594, 0.14870457458496095, 0.1521180725097656, 0.14787298583984376, 0.14751417541503906, 0.1474561309814453, 0.14747200012207032, 0.14715122985839843, 0.1475924530029297, 0.14929791259765626, 0.1482659912109375, 0.14882508850097656, 0.14834028625488282, 0.14820573425292968, 0.1491254119873047, 0.14762342834472655, 0.14910064697265624, 0.14837965393066407, 0.14847573852539062, 0.14808079528808593, 0.1482178497314453, 0.14794752502441405, 0.14735565185546876, 0.1477242889404297, 0.147378173828125, 0.1476485137939453, 0.14715267944335939, 0.14759138488769533, 0.15047445678710938, 0.14856211853027343, 0.14875657653808594, 0.14806211853027343, 0.14843449401855469, 0.14834751892089842, 0.14780201721191405, 0.14817280578613282, 0.14817205810546874, 0.14847254943847657, 0.148291015625, 0.14771168518066408, 0.1486011199951172, 0.1498707275390625, 0.14904066467285157, 0.14908502197265625, 0.14760108947753905, 0.14774249267578124, 0.1477576904296875, 0.15150694274902343, 0.14834483337402343, 0.14739865112304687, 0.14813583374023437, 0.14772972106933593, 0.14749993896484376, 0.14732601928710937, 0.14850469970703126, 0.14815650939941405, 0.14791871643066407, 0.14765718078613282, 0.1480543975830078, 0.14756442260742186, 0.14874624633789063, 0.14705010986328124, 0.15074240112304688, 0.14772682189941405, 0.14783238220214845, 0.14815536499023438, 0.1479516143798828, 0.14779493713378905, 0.15066624450683594, 0.148911865234375, 0.14819354248046876, 0.14842662048339844, 0.14830921936035157, 0.14764738464355467, 0.14768333435058595, 0.1483345947265625, 0.14735360717773438, 0.1480130615234375, 0.14800419616699217]",tokens/s,6.732935867823723,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,14728.306688,10142.810112,0.0,9747.562496,9611.730944,s,1,34.2201796875,34.2201796875,0.0,34.2201796875,34.2201796875,34.2201796875,34.2201796875,[34.2201796875],,kWh,0.0007770563281250058,8.570774036808844e-05,0.00029328690129595825,0.0011560509697890525,,MB,4534.358016,10528.68608,0.0,10112.466944,9989.953536,s,10,1.3355377807617186,0.1335537780761719,0.0007079238234458477,0.1334502716064453,0.13443924865722656,0.13473276214599608,0.13496757293701173,"[0.13502627563476563, 0.1336026611328125, 0.1330015106201172, 0.1329153594970703, 0.13297628784179688, 0.1338626251220703, 0.13385903930664061, 0.1343740234375, 0.13329788208007812, 0.13262211608886718]",tokens/s,1916.8308353956963,kWh,3.955573593243509e-06,4.3622652555131006e-07,2.6355989553242327e-06,7.027399074119053e-06,tokens/kWh,36428840.499867566,MB,4534.358016,10530.783232,0.0,10114.564096,9989.956096,s,10,80.05053466796876,8.005053466796877,0.02531685858535615,8.001396240234374,8.034038037109374,8.038192846679687,8.041516694335938,"[7.952193359375, 7.99734033203125, 8.03311474609375, 7.99462353515625, 8.0054521484375, 8.03254443359375, 7.98501123046875, 8.01132421875, 8.04234765625, 7.9965830078125]",tokens/s,7.870028633950982,kWh,0.00023490137661509009,2.591078800471422e-05,0.00012195066738027532,0.00038276283200007954,tokens/kWh,164592.78365875114,,s,630,80.04725710296626,0.12705913825867668,0.0013620803168982011,0.12683967971801757,0.12818678588867188,0.12899595718383788,0.1319671975708008,"[0.12649116516113282, 0.12545555114746093, 0.12538333129882812, 0.12505817413330078, 0.12551462554931642, 0.12662364959716796, 0.12541961669921875, 0.12550348663330077, 0.12609331512451172, 0.12630445098876952, 0.1256118392944336, 0.12536217498779298, 0.12581068420410157, 0.12666675567626953, 0.12659302520751953, 0.12585747528076172, 0.12592508697509766, 0.1260813446044922, 0.12538655853271485, 0.12597090911865233, 0.12633497619628906, 0.1263250274658203, 0.12574281311035157, 0.12630016326904298, 0.12559359741210938, 0.12646195220947265, 0.13100604248046874, 0.1264193572998047, 0.12596189117431641, 0.126712158203125, 0.12619481658935547, 0.1268048629760742, 0.1268652801513672, 0.126609375, 0.12605878448486327, 0.12571772766113282, 0.12595820617675782, 0.12764841461181642, 0.12527814483642577, 0.12552191925048828, 0.1249053726196289, 0.1253561248779297, 0.126959228515625, 0.1260752639770508, 0.12618697357177736, 0.1267790069580078, 0.12664720153808592, 0.12654796600341797, 0.12597862243652344, 0.12626739501953124, 0.1260481948852539, 0.12601094055175782, 0.12568831634521485, 0.12668041229248048, 0.12605715179443358, 0.12601446533203126, 0.1261987533569336, 0.12671385955810546, 0.1270456314086914, 0.12625263977050782, 0.12705219268798829, 0.12717884826660156, 0.12660294342041015, 0.12774195098876953, 0.1273446044921875, 0.12821098327636718, 0.12652134704589843, 0.12745113372802735, 0.1281640625, 0.12818409729003907, 0.1298101806640625, 0.13154920959472657, 0.12657481384277344, 0.1287598114013672, 0.1274464340209961, 0.1335650177001953, 0.12803890991210937, 0.12666265869140625, 0.13019287109375, 0.12668163299560548, 0.1264739532470703, 0.1282596435546875, 0.125899169921875, 0.12760665893554687, 0.1260835189819336, 0.12578819274902345, 0.12661551666259765, 0.12666284942626954, 0.12578797149658202, 0.1274449920654297, 0.12633087921142577, 0.12660921478271484, 0.12563670349121095, 0.12578966522216797, 0.1257890853881836, 0.12621526336669922, 0.1261164779663086, 0.12610963439941406, 0.1271539535522461, 0.1268800354003906, 0.12502425384521484, 0.12535142517089845, 0.12551980590820314, 0.12851402282714844, 0.1266951675415039, 0.12675312042236328, 0.1258604507446289, 0.12747299194335937, 0.12592918395996094, 0.12617132568359374, 0.12609337615966798, 0.1262816619873047, 0.1256678695678711, 0.1273079071044922, 0.12688531494140626, 0.1264911346435547, 0.12661331176757812, 0.12654003143310547, 0.12668873596191407, 0.1259727325439453, 0.12722000122070312, 0.12524063873291016, 0.12676390075683594, 0.12656118774414063, 0.12633590698242186, 0.12491161346435548, 0.12725308990478515, 0.12686150360107423, 0.12669747161865236, 0.12809429931640626, 0.1320928955078125, 0.1259980163574219, 0.12838706970214844, 0.12708573150634767, 0.1264293746948242, 0.12899754333496094, 0.1289272918701172, 0.12822569274902343, 0.1265669403076172, 0.12716441345214843, 0.12686675262451172, 0.12775494384765626, 0.12632592010498048, 0.1285804443359375, 0.12642892456054688, 0.127219970703125, 0.12674588775634765, 0.1269378204345703, 0.12834121704101562, 0.1308168029785156, 0.1271329574584961, 0.1267453155517578, 0.1269188766479492, 0.12718057250976564, 0.12930400085449217, 0.12838557434082032, 0.13739004516601563, 0.1263863983154297, 0.12533539581298828, 0.12798902130126952, 0.1272814712524414, 0.12999722290039062, 0.13082829284667968, 0.12764524841308594, 0.1275457305908203, 0.12752041625976562, 0.12770883178710937, 0.1266604766845703, 0.12734681701660155, 0.1268087387084961, 0.12625289916992188, 0.126281982421875, 0.1268875198364258, 0.12698870086669922, 0.12653135681152344, 0.12620646667480467, 0.12657023620605468, 0.12632870483398437, 0.12506492614746093, 0.12639065551757814, 0.126382080078125, 0.12764125061035156, 0.12761328125, 0.12553116607666015, 0.1279333724975586, 0.1272393569946289, 0.1266146240234375, 0.12666646575927734, 0.12677731323242186, 0.1271214065551758, 0.12734668731689452, 0.13081382751464843, 0.1256736297607422, 0.12571180725097655, 0.12634700775146485, 0.12658735656738282, 0.12598652648925782, 0.12669120025634767, 0.1273002243041992, 0.12740338897705078, 0.1284917755126953, 0.12654332733154297, 0.12675564575195314, 0.12715436553955078, 0.1259306869506836, 0.12638905334472655, 0.12634127807617188, 0.12785852813720702, 0.1263677444458008, 0.1265864028930664, 0.1260157470703125, 0.12689020538330079, 0.12804710388183593, 0.12587213134765626, 0.12829029846191406, 0.12704560089111328, 0.1268045120239258, 0.12610559844970703, 0.126023681640625, 0.12735641479492188, 0.12574934387207032, 0.127048095703125, 0.12612403106689454, 0.12627731323242186, 0.1257616958618164, 0.12623200225830078, 0.1289940185546875, 0.12815359497070314, 0.12667001342773437, 0.1251233901977539, 0.12625305938720705, 0.1297257537841797, 0.1290185546875, 0.12845465087890626, 0.12857139587402344, 0.12759468841552735, 0.1264392318725586, 0.1261480941772461, 0.12576588439941405, 0.12654208374023437, 0.12618256378173828, 0.12730387115478517, 0.12676493072509765, 0.12622108459472656, 0.12669276428222656, 0.1265320281982422, 0.12790595245361328, 0.12589430236816407, 0.12678179168701173, 0.12693910217285156, 0.12657052612304687, 0.12602108764648437, 0.1269273910522461, 0.12696275329589843, 0.12670047760009764, 0.12592908477783202, 0.12537657928466797, 0.1253288345336914, 0.12832850646972657, 0.12700998687744142, 0.12632685089111328, 0.12740665435791015, 0.1276246109008789, 0.12834060668945313, 0.1271968994140625, 0.12635596466064453, 0.12646342468261718, 0.1258317413330078, 0.12493824005126954, 0.1279273910522461, 0.12657350158691405, 0.12676297760009766, 0.12626700592041015, 0.12670365142822265, 0.12760918426513673, 0.12791216278076173, 0.12755126190185548, 0.12724649810791017, 0.1270208969116211, 0.12647628784179688, 0.12739552307128907, 0.1270572509765625, 0.12701590728759765, 0.12668723297119142, 0.12617113494873047, 0.1276570587158203, 0.12700569915771484, 0.12662364959716796, 0.12676464080810546, 0.12714556884765624, 0.12687648010253907, 0.12641484832763672, 0.12619366455078124, 0.1271398391723633, 0.1267116470336914, 0.12721974182128906, 0.12651238250732422, 0.12714278411865235, 0.126823486328125, 0.12666483306884765, 0.12629856109619142, 0.1274493408203125, 0.12670169830322264, 0.12700582122802734, 0.12966761779785158, 0.12836457824707032, 0.1275149459838867, 0.12661555480957032, 0.12851405334472657, 0.1298041229248047, 0.127401123046875, 0.12813209533691405, 0.1283524169921875, 0.1269390411376953, 0.12801164245605468, 0.12712989044189454, 0.1270214080810547, 0.12682879638671876, 0.1270023651123047, 0.1269844512939453, 0.12798566436767578, 0.1280307159423828, 0.1323351287841797, 0.12742610931396484, 0.12845257568359375, 0.12756473541259766, 0.1275014114379883, 0.128787353515625, 0.1275533142089844, 0.128272705078125, 0.1274059829711914, 0.12705792236328126, 0.12691267395019531, 0.12571222686767577, 0.12474285125732422, 0.13084072875976563, 0.12620252990722655, 0.1264353256225586, 0.12670172882080077, 0.12781346893310547, 0.12749199676513673, 0.12737750244140625, 0.127421630859375, 0.12795785522460937, 0.12852940368652344, 0.12772988891601564, 0.12761110687255858, 0.1274043197631836, 0.12747545623779297, 0.12663017272949217, 0.1277496337890625, 0.1275144958496094, 0.12865350341796875, 0.1280047607421875, 0.12893798828125, 0.12760889434814454, 0.1281383056640625, 0.1283162841796875, 0.12760387420654296, 0.12738601684570314, 0.12795539093017577, 0.1274286117553711, 0.12753715515136718, 0.12708175659179688, 0.12740476989746094, 0.12761027526855467, 0.12720829010009765, 0.12727616119384766, 0.12790557098388672, 0.12633773040771484, 0.12668534088134767, 0.12684083557128906, 0.12672525024414064, 0.12667378997802733, 0.12666883087158204, 0.1268080291748047, 0.12755165100097657, 0.12623046112060546, 0.1278726043701172, 0.12691305541992187, 0.13165945434570311, 0.1277740478515625, 0.12698095703125, 0.12597862243652344, 0.12592127990722657, 0.1281249237060547, 0.12802653503417968, 0.12632073974609376, 0.12982791137695313, 0.12613053131103516, 0.12624246215820312, 0.12543670654296876, 0.1250057907104492, 0.12553644561767577, 0.12720681762695313, 0.12676080322265626, 0.1259362564086914, 0.12617945861816407, 0.1264754867553711, 0.12457561492919922, 0.12570057678222657, 0.12595244598388672, 0.12682444763183592, 0.1258711395263672, 0.12613075256347656, 0.12583773040771484, 0.12711116790771484, 0.12596224212646484, 0.12602095794677734, 0.12560185241699218, 0.1262200927734375, 0.1257520980834961, 0.12574489593505858, 0.12815589904785157, 0.1264005126953125, 0.1257850875854492, 0.12623145294189453, 0.12673414611816405, 0.1271995162963867, 0.1265519027709961, 0.12600768280029298, 0.12684060668945313, 0.12770918273925783, 0.1269078369140625, 0.13011820983886718, 0.12739584350585936, 0.12715817260742188, 0.12646409606933592, 0.1264353256225586, 0.12657244873046875, 0.12594390106201173, 0.12697420501708984, 0.12629376220703126, 0.12624636840820314, 0.12814306640625, 0.12722486114501952, 0.12750771331787109, 0.12684092712402345, 0.12763590240478515, 0.12678758239746094, 0.12680806732177735, 0.1280128936767578, 0.1276006393432617, 0.12689817810058593, 0.12746342468261718, 0.1271357421875, 0.1271357421875, 0.12662783813476564, 0.12694937896728514, 0.12766617584228515, 0.12811468505859375, 0.12797456359863282, 0.12768956756591796, 0.1278908462524414, 0.12688236999511718, 0.12651113891601562, 0.12710486602783203, 0.12768067169189454, 0.13018316650390624, 0.12640460968017578, 0.12690870666503906, 0.12695318603515626, 0.12619065856933595, 0.13767366027832031, 0.12683058929443358, 0.1276804504394531, 0.13040771484375, 0.12647459411621093, 0.12775801849365234, 0.12641542053222657, 0.12630786895751953, 0.12656707000732423, 0.12717874908447266, 0.12827381896972656, 0.12689043426513671, 0.126210205078125, 0.12740560150146485, 0.12616486358642579, 0.12706400299072265, 0.12647081756591796, 0.126501953125, 0.1275381088256836, 0.12665184020996093, 0.12670550537109376, 0.12683747100830078, 0.12617298889160156, 0.1270540771484375, 0.12573693084716797, 0.12631993865966798, 0.12690089416503905, 0.12744703674316407, 0.12767964935302734, 0.1268376007080078, 0.12544796752929688, 0.1254832305908203, 0.1262449264526367, 0.12742649841308593, 0.1262998046875, 0.1261182403564453, 0.12658287811279298, 0.12662774658203124, 0.12582281494140626, 0.12650717163085937, 0.1262808303833008, 0.1264625930786133, 0.12685699462890626, 0.12705391693115234, 0.12739798736572266, 0.1268019485473633, 0.1265739517211914, 0.12800099182128907, 0.12821670532226562, 0.12783673858642577, 0.12719699096679687, 0.1272845458984375, 0.1279086380004883, 0.1270885467529297, 0.12740755462646483, 0.12698271942138672, 0.1275555877685547, 0.12719087982177735, 0.12695977783203125, 0.12734003448486328, 0.12680652618408203, 0.1269614715576172, 0.12716464233398436, 0.12683875274658202, 0.12712754821777345, 0.12732621002197267, 0.12679759979248048, 0.12772310638427734, 0.1277221145629883, 0.12638800048828125, 0.12651881408691407, 0.1279813461303711, 0.12848016357421874, 0.131235107421875, 0.1263458251953125, 0.1275141143798828, 0.12699404907226564, 0.13524044799804688, 0.13757008361816406, 0.1280619201660156, 0.12792217254638671, 0.12945408630371094, 0.12721766662597656, 0.12676505279541014, 0.12680397033691407, 0.12685517120361328, 0.12711526489257813, 0.12762079620361327, 0.12784877014160156, 0.1269516830444336, 0.12766531372070314, 0.12742716979980467, 0.12698419189453125, 0.12757401275634767, 0.12773094177246094, 0.12768118286132812, 0.1276655044555664, 0.12768720245361329, 0.1276539535522461, 0.12869573974609375, 0.12751305389404297, 0.1261647644042969, 0.12559529876708983, 0.12651602935791015, 0.12801632690429687, 0.12506527709960938, 0.12434786987304687, 0.12576131439208985, 0.12668211364746093, 0.12708223724365234, 0.12663926696777345, 0.1268580780029297, 0.12575926208496094, 0.12725062561035155, 0.12702342224121094, 0.12741990661621094, 0.12759664154052736, 0.12732166290283203, 0.1270052490234375, 0.12753510284423827, 0.12722994995117187, 0.12735874938964845, 0.12684925079345702, 0.12648652648925782, 0.1266841583251953, 0.1280369873046875, 0.12664630126953125, 0.12703215789794922, 0.12669068908691405, 0.12759465789794922, 0.1266990737915039, 0.12612086486816407, 0.1264680938720703, 0.12721695709228514, 0.12626998138427734, 0.12640681457519531, 0.126536865234375, 0.1272369613647461, 0.12636160278320313, 0.12615042877197266, 0.1269697952270508, 0.12692483520507813, 0.12669773101806642, 0.1266728973388672, 0.12652543640136718, 0.12617436981201172, 0.12767654418945312, 0.1273947525024414, 0.1271703338623047, 0.1276374740600586, 0.12987091064453124, 0.12684796905517579, 0.12650847625732423, 0.12627410888671875, 0.12585158538818358, 0.12682041931152344, 0.1270203552246094, 0.1270074234008789, 0.12680985260009767, 0.13043122863769532, 0.12649267578125, 0.12600633239746092, 0.12930758666992187, 0.1258138885498047, 0.12746431732177735, 0.1279815673828125, 0.12647792053222656]",tokens/s,7.870350875228854,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,7904.325632,4726.849536,0.0,4324.327424,4324.229632,s,1,19.678267578125,19.678267578125,0.0,19.678267578125,19.678267578125,19.678267578125,19.678267578125,[19.678267578125],,kWh,0.00036773720859167343,4.055722211759645e-05,0.00011900148409002553,0.0005272959147992955,,MB,1703.919616,5058.199552,0.0,4634.70592,4579.358208,s,10,0.5627925796508788,0.05627925796508789,0.00020275353563124158,0.0562708969116211,0.056428320312499995,0.05659752006530762,0.05673287986755371,"[0.05639072036743164, 0.056078495025634764, 0.05609747314453125, 0.05634121704101563, 0.05624720001220703, 0.05629459381103516, 0.056381759643554685, 0.056151039123535154, 0.05604336166381836, 0.056766719818115235]",tokens/s,4548.745119539534,kWh,1.6699579551665616e-06,1.8416675943702592e-07,1.097321512777099e-06,2.951446227380686e-06,tokens/kWh,86737138.432365,MB,1712.238592,5060.296704,0.0,4634.70592,4519.089152,s,10,26.080097900390626,2.608009790039062,0.008913860438076852,2.6055440673828123,2.6192439453125,2.619385888671875,2.619499443359375,"[2.617414794921875, 2.600658935546875, 2.606550537109375, 2.593564697265625, 2.616010498046875, 2.61921240234375, 2.61952783203125, 2.60453759765625, 2.598369873046875, 2.604250732421875]",tokens/s,24.156351038489163,kWh,7.660550230025272e-05,8.449492064891773e-06,3.892196050262507e-05,0.00012397695486776958,tokens/kWh,508158.9563737396,,s,630,26.078191844940207,0.04139395530942887,0.0005585993066193567,0.041262895584106445,0.04167123031616211,0.041923864936828606,0.04405609390258789,"[0.041619007110595706, 0.04149270248413086, 0.041271232604980466, 0.04122937774658203, 0.041415233612060544, 0.04131248092651367, 0.041445537567138674, 0.04131852722167969, 0.041375232696533204, 0.04115033721923828, 0.0411627197265625, 0.041046463012695315, 0.041113697052001956, 0.04118732833862305, 0.04114585494995117, 0.041127647399902344, 0.04112259292602539, 0.04118937683105469, 0.041111614227294924, 0.0443309440612793, 0.04160102462768555, 0.04139177703857422, 0.04223587036132812, 0.04143142318725586, 0.04156550216674805, 0.041564865112304686, 0.0416069450378418, 0.04144675064086914, 0.04146265411376953, 0.04145967864990235, 0.04156118392944336, 0.041476993560791015, 0.04162771224975586, 0.041509246826171874, 0.04154534530639648, 0.04139750289916992, 0.04270975875854492, 0.041444961547851565, 0.041640350341796875, 0.044426815032958984, 0.04162355041503906, 0.04151055908203125, 0.04180774307250976, 0.04152822494506836, 0.041562110900878906, 0.04144348907470703, 0.04155344009399414, 0.04145769500732422, 0.04145590209960937, 0.04164812850952149, 0.04159020614624023, 0.04161955261230469, 0.041421054840087894, 0.041348991394042967, 0.04147619247436524, 0.04131676864624023, 0.041432926177978516, 0.041218048095703126, 0.04155801773071289, 0.041369503021240234, 0.04135654449462891, 0.04131676864624023, 0.04134716796875, 0.04115740966796875, 0.04116064071655273, 0.041054527282714845, 0.041277408599853516, 0.041080543518066406, 0.04114828872680664, 0.041831615447998044, 0.04137670516967774, 0.04103577423095703, 0.04124671936035156, 0.04126924896240235, 0.04131427383422852, 0.04140854263305664, 0.04120912170410156, 0.04154032135009766, 0.04118527984619141, 0.041280513763427736, 0.04122550582885742, 0.041549537658691404, 0.041236320495605466, 0.04110351943969726, 0.04112179183959961, 0.04110540771484375, 0.041188575744628905, 0.041057056427001956, 0.04132592010498047, 0.04147267150878906, 0.041491840362548826, 0.041181438446044924, 0.041192127227783204, 0.04121977615356445, 0.041191425323486325, 0.04102703857421875, 0.04120787048339844, 0.04128438568115234, 0.041428672790527345, 0.04114556884765625, 0.0411759033203125, 0.04152313613891601, 0.041215999603271485, 0.04142822265625, 0.04116902542114258, 0.04163756942749024, 0.04114566421508789, 0.041248382568359374, 0.041297088623046874, 0.04125369644165039, 0.04129075241088867, 0.04107571029663086, 0.04120576095581055, 0.041193473815917966, 0.041380928039550784, 0.04119372940063477, 0.04131881713867187, 0.041616863250732425, 0.041192161560058595, 0.041128032684326174, 0.04106208038330078, 0.041291423797607425, 0.04149929428100586, 0.04148223876953125, 0.04170479965209961, 0.04141532897949219, 0.041952606201171874, 0.041373695373535156, 0.041409278869628904, 0.041109504699707033, 0.041113086700439457, 0.04122214508056641, 0.04121651077270508, 0.04123648071289063, 0.043498912811279294, 0.04164668655395508, 0.041316352844238284, 0.04121225738525391, 0.041504417419433594, 0.041637889862060545, 0.041371295928955075, 0.04114425659179687, 0.041073055267333985, 0.04113929748535156, 0.041242752075195316, 0.041253662109375, 0.04142844772338867, 0.04141507339477539, 0.04138819122314453, 0.04122627258300781, 0.04118451309204101, 0.04130464172363281, 0.04152556610107422, 0.041365024566650394, 0.044542240142822265, 0.04132044982910156, 0.04119087982177734, 0.04134064102172851, 0.04116355133056641, 0.041166881561279296, 0.04116889572143555, 0.04119571304321289, 0.04132863998413086, 0.04134089660644531, 0.04146774291992188, 0.04115865707397461, 0.04108835220336914, 0.041153182983398436, 0.04111782455444336, 0.04119321441650391, 0.04125948715209961, 0.04163923263549805, 0.04145801544189453, 0.04118937683105469, 0.041842689514160154, 0.041314559936523436, 0.04137887954711914, 0.04114236831665039, 0.0411956787109375, 0.04116320037841797, 0.04112297439575195, 0.0410939826965332, 0.041118751525878905, 0.041129150390625, 0.04112771224975586, 0.041189537048339844, 0.04120163345336914, 0.041142143249511716, 0.041213375091552734, 0.041072959899902346, 0.04107843017578125, 0.04119267272949219, 0.04129667282104492, 0.04122745513916016, 0.041339710235595704, 0.041560352325439455, 0.04127920150756836, 0.041010814666748045, 0.04137984085083008, 0.04130806350708008, 0.04109161758422852, 0.04111763381958008, 0.04110496139526367, 0.04118368148803711, 0.04099423980712891, 0.04111523056030274, 0.04110208129882813, 0.041033119201660154, 0.04128041458129883, 0.041027488708496096, 0.04106393432617188, 0.041165313720703124, 0.04108915328979492, 0.04098239898681641, 0.041387744903564457, 0.041099552154541016, 0.04103702545166016, 0.041100128173828125, 0.04124870300292969, 0.041368896484375, 0.04112793731689453, 0.04108153533935547, 0.04139212799072266, 0.04168918228149414, 0.04112169647216797, 0.041048385620117187, 0.041105087280273435, 0.0411096305847168, 0.041017215728759764, 0.041115646362304685, 0.04111529541015625, 0.04106703948974609, 0.04105606460571289, 0.04106768035888672, 0.04105424118041992, 0.04106227111816406, 0.041044929504394534, 0.04098867034912109, 0.04100246429443359, 0.041064769744873046, 0.04114873504638672, 0.04118518447875977, 0.04170307159423828, 0.04131875228881836, 0.041137569427490236, 0.041151073455810545, 0.041373695373535156, 0.04114364624023437, 0.041331039428710935, 0.040998401641845705, 0.04114425659179687, 0.04106252670288086, 0.04126499176025391, 0.04134918212890625, 0.041145599365234375, 0.04256639862060547, 0.04345436859130859, 0.04141091156005859, 0.04127926254272461, 0.04137500762939453, 0.04114070510864258, 0.0411544303894043, 0.04116105651855469, 0.04116275024414062, 0.041166847229003906, 0.04164028930664063, 0.041497760772705075, 0.041437919616699216, 0.04131520080566406, 0.041470783233642575, 0.04158588790893555, 0.04122073745727539, 0.0410912971496582, 0.04335619354248047, 0.04130319976806641, 0.046212959289550784, 0.04187142562866211, 0.04148729705810547, 0.04128345489501953, 0.04123046493530273, 0.04121811294555664, 0.0412437744140625, 0.04119020843505859, 0.04145084762573242, 0.04118185424804687, 0.04121136093139648, 0.04132137680053711, 0.04110707092285156, 0.041205184936523434, 0.04116332626342773, 0.04123433685302735, 0.041433441162109376, 0.04114230346679688, 0.04141955184936524, 0.041411422729492185, 0.04119766235351562, 0.04110063934326172, 0.04115353775024414, 0.041191070556640626, 0.04119087982177734, 0.04406451034545898, 0.04117916870117187, 0.04123484802246094, 0.04127891159057617, 0.04119801712036133, 0.04125254440307617, 0.04123855972290039, 0.041599742889404295, 0.04142860794067383, 0.041328319549560545, 0.04132044982910156, 0.042019519805908206, 0.04160518264770508, 0.0414535026550293, 0.041221248626708985, 0.04126828765869141, 0.04151776123046875, 0.041569599151611326, 0.041433792114257816, 0.04159078216552734, 0.041201984405517575, 0.04123616027832031, 0.0412303352355957, 0.041082878112792966, 0.041181182861328124, 0.041624832153320315, 0.04148643112182617, 0.04146448135375977, 0.04137318420410156, 0.04139468765258789, 0.04123068618774414, 0.041258655548095706, 0.04173411178588867, 0.04151708984375, 0.04124195098876953, 0.041288352966308596, 0.04168294525146484, 0.04170342254638672, 0.04148223876953125, 0.04128988647460938, 0.04123376083374024, 0.04140419387817383, 0.041579105377197265, 0.04158240127563476, 0.04137811279296875, 0.041366592407226566, 0.041325824737548825, 0.04135084915161133, 0.04134707260131836, 0.04137779235839844, 0.04147609710693359, 0.042272544860839846, 0.04147145462036133, 0.04141094589233398, 0.041503231048583986, 0.041335807800292966, 0.04143590545654297, 0.04134924697875977, 0.04153865432739258, 0.041417633056640625, 0.04177459335327149, 0.04147071838378906, 0.04171049499511719, 0.041392833709716796, 0.04154998397827148, 0.041473438262939456, 0.041566272735595704, 0.04157484817504883, 0.042096672058105467, 0.041636928558349606, 0.04141350555419922, 0.04150268936157227, 0.04665683364868164, 0.04188873672485351, 0.04178243255615234, 0.04156694412231445, 0.04187673568725586, 0.041837310791015624, 0.04182220840454102, 0.04166656112670898, 0.04149068832397461, 0.04141644668579102, 0.04138915252685547, 0.041489215850830076, 0.04168918228149414, 0.041453342437744144, 0.0412562255859375, 0.041479103088378905, 0.04135116958618164, 0.04145481491088867, 0.04330806350708008, 0.04319788742065429, 0.0417459831237793, 0.04162854385375977, 0.041586559295654295, 0.04155801773071289, 0.04158185577392578, 0.041437919616699216, 0.04140851211547852, 0.04136671829223633, 0.04174016189575196, 0.041757217407226564, 0.04171408081054687, 0.04170927810668945, 0.04173756790161133, 0.041619873046875, 0.041482463836669925, 0.04232460784912109, 0.042592159271240236, 0.042293022155761716, 0.04216012954711914, 0.04237849426269531, 0.04174515151977539, 0.04176867294311523, 0.04143094253540039, 0.041339263916015626, 0.04144025421142578, 0.04126822280883789, 0.04114227294921875, 0.04109062576293945, 0.04129183959960937, 0.041175743103027344, 0.041164478302001956, 0.041670654296875, 0.04135558319091797, 0.041443264007568356, 0.0411454086303711, 0.04115769577026367, 0.04126268768310547, 0.04129596710205078, 0.041189311981201175, 0.041240577697753904, 0.041443649291992186, 0.04134265518188476, 0.041180862426757815, 0.04153577423095703, 0.04159641647338867, 0.0413803825378418, 0.04126134490966797, 0.041311969757080076, 0.04136665725708008, 0.04137779235839844, 0.04125900650024414, 0.04114022445678711, 0.04121193695068359, 0.041244640350341796, 0.04114636611938476, 0.04149612808227539, 0.04120380783081055, 0.041124191284179684, 0.04157827377319336, 0.04130364990234375, 0.04232854461669922, 0.0415827522277832, 0.0413175048828125, 0.04144627380371094, 0.04120150375366211, 0.04129983901977539, 0.04144976043701172, 0.041184543609619144, 0.04131078338623047, 0.0411440315246582, 0.041245121002197266, 0.0411926383972168, 0.04118204879760742, 0.04121187210083008, 0.04124671936035156, 0.04117504119873047, 0.0411255989074707, 0.04113670349121094, 0.041163776397705076, 0.04154032135009766, 0.041166847229003906, 0.041078784942626956, 0.041095169067382815, 0.04108492660522461, 0.04118707275390625, 0.041091102600097656, 0.041117919921875, 0.04132659149169922, 0.04112284851074219, 0.041255905151367185, 0.04133583831787109, 0.04103071975708008, 0.04105411148071289, 0.041109504699707033, 0.040958206176757814, 0.04115840148925781, 0.04113334274291992, 0.041126720428466795, 0.041282657623291016, 0.04122195053100586, 0.043166721343994144, 0.04216944122314453, 0.04158534240722656, 0.043237342834472656, 0.04143539047241211, 0.04123433685302735, 0.04112598419189453, 0.04126310348510742, 0.04127334213256836, 0.041150592803955076, 0.04177196884155274, 0.04141318511962891, 0.04132195281982422, 0.04151270294189453, 0.04106947326660156, 0.04110732650756836, 0.04254105758666992, 0.041098369598388675, 0.04187615966796875, 0.04390111923217774, 0.0413040657043457, 0.04133251190185547, 0.041242847442626955, 0.041164798736572264, 0.04129132843017578, 0.04113043212890625, 0.04124812698364258, 0.0411363525390625, 0.0415624008178711, 0.041189502716064454, 0.04116070556640625, 0.041124961853027345, 0.04112003326416016, 0.040993408203125, 0.04126310348510742, 0.04111702346801758, 0.04124934387207031, 0.04116284942626953, 0.04111955261230469, 0.04104816055297852, 0.041049217224121096, 0.04102838516235351, 0.04109097671508789, 0.04105683135986328, 0.040963935852050784, 0.0410599365234375, 0.041337120056152345, 0.04120598220825195, 0.0411295051574707, 0.041099262237548825, 0.041156864166259764, 0.04150067138671875, 0.04095795059204101, 0.04103071975708008, 0.041048351287841796, 0.04102006530761719, 0.04100096130371094, 0.04158371353149414, 0.04100732803344727, 0.041215904235839845, 0.04105401611328125, 0.04106243133544922, 0.04200057601928711, 0.04097488021850586, 0.04088800048828125, 0.041013790130615235, 0.0409747200012207, 0.04097177505493164, 0.04092707061767578, 0.040935295104980465, 0.04130857467651367, 0.04143417739868164, 0.04136851119995117, 0.04109718322753906, 0.04121193695068359, 0.04103500747680664, 0.04167951965332031, 0.04090275192260742, 0.04091494369506836, 0.041011390686035154, 0.041156192779541016, 0.041373695373535156, 0.04130428695678711, 0.041276927947998046, 0.041154399871826175, 0.041228702545166016, 0.04111324691772461, 0.04103843307495117, 0.04110918426513672, 0.041158912658691406, 0.041236286163330076, 0.04120947265625, 0.04111836624145508, 0.04106991958618164, 0.04114905548095703, 0.04123980712890625, 0.04113808059692383, 0.04111974334716797, 0.041336673736572266, 0.04106563186645508, 0.04103286361694336, 0.04108972930908203, 0.04114432144165039, 0.04101529693603516, 0.04118924713134765, 0.04108915328979492, 0.041441024780273436, 0.04109062576293945, 0.041108352661132816, 0.04135715103149414, 0.04116873550415039, 0.041165279388427733, 0.04113568115234375, 0.041128032684326174, 0.04111974334716797, 0.04117299270629883, 0.044035488128662106, 0.04153200149536133, 0.04117913436889648, 0.04122828674316406, 0.041852928161621096, 0.04167641448974609, 0.04136495971679687, 0.04115727996826172, 0.04107059097290039, 0.04153164672851563, 0.041120929718017576, 0.04116976165771485, 0.041024993896484375, 0.04109161758422852, 0.041259326934814454, 0.041377471923828124, 0.041043872833251956, 0.041621601104736325, 0.0412421760559082, 0.041000606536865235, 0.041269054412841795, 0.046633792877197267]",tokens/s,24.15811662656495,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,15981.961216,10162.66752,0.0,9860.808704,9842.786304,s,1,36.29084765625,36.29084765625,0.0,36.29084765625,36.29084765625,36.29084765625,36.29084765625,[36.29084765625],,kWh,0.0008390498792666563,9.254650124423646e-05,0.0002616190981840072,0.0011932154786948998,,MB,4521.69728,11318.198272,0.0,10890.510336,10339.079168,s,10,6.619237121582032,0.6619237121582031,0.00474585908132618,0.6608150024414062,0.6676619689941407,0.6694010223388672,0.6707922650146485,"[0.6672755126953125, 0.6606759033203125, 0.6711400756835938, 0.6672438354492187, 0.6558547973632812, 0.658312255859375, 0.6568683471679687, 0.6609541015625, 0.6612008666992187, 0.65971142578125]",tokens/s,386.7515172787989,kWh,1.9438482936978828e-05,2.1427885569464264e-06,8.435388692750075e-06,3.001666018667533e-05,tokens/kWh,8528597.066026712,MB,4525.8752,11320.295424,0.0,10892.607488,10341.047808,s,10,100.4824091796875,10.04824091796875,0.06294827329988352,10.03284912109375,10.131389257812499,10.14391630859375,10.15393794921875,"[10.156443359375, 10.12860546875, 10.0948232421875, 10.094025390625, 10.025169921875, 9.97828125, 10.0405283203125, 9.9964326171875, 10.003814453125, 9.96428515625]",tokens/s,6.269754130530485,kWh,0.0002921956864801046,3.223210552726303e-05,0.0001303319167654504,0.00045475970877281784,tokens/kWh,138534.7003805753,,s,630,100.48042831420901,0.15949274335588728,0.001734761335726313,0.15932787322998046,0.16155320587158203,0.1622549690246582,0.1652786442565918,"[0.16196595764160157, 0.16126290893554687, 0.16931634521484376, 0.16210377502441406, 0.16048681640625, 0.16058035278320312, 0.1583061065673828, 0.1702362823486328, 0.15927296447753905, 0.16079798889160157, 0.16551190185546874, 0.15971737670898437, 0.1614028778076172, 0.16070246887207032, 0.15963667297363282, 0.16085824584960937, 0.1599249267578125, 0.1614520263671875, 0.16070249938964845, 0.1606082305908203, 0.16054246520996093, 0.16071090698242188, 0.16094618225097657, 0.16154237365722657, 0.16172940063476562, 0.16129525756835938, 0.16117555236816405, 0.16132301330566406, 0.1606781768798828, 0.16110893249511718, 0.16233346557617187, 0.16145613098144532, 0.16289315795898437, 0.1629313659667969, 0.16220979309082031, 0.16075120544433594, 0.16080323791503906, 0.1619435577392578, 0.16093504333496095, 0.16112652587890625, 0.1608805694580078, 0.16342918395996095, 0.16220979309082031, 0.16059341430664062, 0.15983424377441408, 0.16072947692871092, 0.16022674560546876, 0.15996780395507812, 0.15966822814941406, 0.16044032287597657, 0.15954534912109375, 0.15905996704101563, 0.16016998291015624, 0.16065330505371095, 0.16095146179199218, 0.16170889282226564, 0.15940415954589843, 0.16076377868652345, 0.16214630126953125, 0.1603832244873047, 0.15992396545410156, 0.1604464569091797, 0.1598279724121094, 0.16067776489257812, 0.16170742797851562, 0.16536003112792969, 0.16078662109375, 0.16168960571289062, 0.16252517700195312, 0.15934873962402343, 0.16042189025878906, 0.16169573974609375, 0.16002371215820313, 0.1607422332763672, 0.15945709228515625, 0.16024783325195313, 0.16120230102539063, 0.1595371856689453, 0.1596927947998047, 0.1593314208984375, 0.16032643127441407, 0.16001353454589845, 0.16039004516601563, 0.15950563049316407, 0.15952566528320314, 0.15927296447753905, 0.16031539916992188, 0.16034611511230468, 0.15971737670898437, 0.16070246887207032, 0.16311091613769532, 0.16065676879882812, 0.16085055541992188, 0.16112435913085937, 0.15990988159179687, 0.16098822021484374, 0.15998252868652343, 0.1604464569091797, 0.16155033874511718, 0.16064306640625, 0.16072012329101562, 0.16227778625488282, 0.16044886779785156, 0.16055667114257813, 0.16074111938476562, 0.16084442138671876, 0.1615069122314453, 0.16255162048339844, 0.1651755828857422, 0.16215875244140626, 0.1606739501953125, 0.1611705322265625, 0.1612027587890625, 0.16091375732421875, 0.16107264709472657, 0.16044432067871095, 0.16050764465332032, 0.1599332733154297, 0.16005885314941407, 0.1591893768310547, 0.16010572814941407, 0.15968150329589845, 0.16188589477539062, 0.16030335998535156, 0.16135910034179687, 0.1591057891845703, 0.1634596405029297, 0.16078044128417968, 0.16179200744628905, 0.15975013732910157, 0.15990374755859374, 0.16050994873046875, 0.16039631652832032, 0.16071746826171876, 0.16083180236816405, 0.15980543518066406, 0.16059379577636718, 0.15988339233398438, 0.1614720916748047, 0.15949046325683594, 0.16003001403808595, 0.16242758178710937, 0.15958790588378907, 0.15999635314941407, 0.1601925048828125, 0.1606177978515625, 0.16074946594238282, 0.16081776428222655, 0.16164883422851561, 0.16078623962402344, 0.16143379211425782, 0.1602100830078125, 0.15967718505859374, 0.16049183654785157, 0.15940333557128905, 0.15920994567871094, 0.16062054443359375, 0.1597337646484375, 0.1601781768798828, 0.16065093994140625, 0.158911865234375, 0.16353286743164064, 0.1586960906982422, 0.15927273559570312, 0.15956121826171876, 0.15931219482421874, 0.1605655059814453, 0.1585725402832031, 0.16043251037597656, 0.1593507843017578, 0.16068751525878905, 0.16038768005371093, 0.1600284423828125, 0.15926815795898439, 0.1608338623046875, 0.16002639770507812, 0.1601974334716797, 0.1599877166748047, 0.1593579864501953, 0.15897084045410156, 0.16291168212890625, 0.15906259155273436, 0.15901286315917967, 0.15939741516113282, 0.15862828063964843, 0.16141314697265624, 0.15898188781738282, 0.15988540649414062, 0.15952093505859374, 0.1622337646484375, 0.16060211181640624, 0.1612926025390625, 0.1606986541748047, 0.16047698974609376, 0.16150752258300782, 0.16, 0.1600614471435547, 0.15963340759277345, 0.15970454406738283, 0.16532073974609374, 0.15998617553710937, 0.15937945556640626, 0.15946957397460937, 0.1598279724121094, 0.1592524871826172, 0.15890771484375, 0.15957267761230468, 0.1611961212158203, 0.16011459350585938, 0.15962503051757812, 0.1593489227294922, 0.15918675231933593, 0.15932191467285156, 0.15985708618164063, 0.1601259765625, 0.16113487243652344, 0.16091314697265624, 0.16017414855957032, 0.1601892547607422, 0.15917465209960938, 0.15978839111328125, 0.15994741821289063, 0.1600555877685547, 0.1610358123779297, 0.1609299774169922, 0.1597412109375, 0.16034844970703124, 0.16062460327148437, 0.16082377624511718, 0.1595897674560547, 0.1599981689453125, 0.1603356475830078, 0.1609876403808594, 0.16010403442382812, 0.16050025939941406, 0.16056704711914063, 0.16133555603027344, 0.16285491943359376, 0.16042393493652343, 0.16100674438476562, 0.15904351806640624, 0.1590322265625, 0.15901890563964843, 0.16038819885253905, 0.1607362518310547, 0.15972966003417968, 0.15972352600097656, 0.15935282897949218, 0.16036659240722656, 0.15910038757324219, 0.15837603759765626, 0.1596666259765625, 0.15822026062011718, 0.15770013427734375, 0.15721827697753907, 0.15787251281738282, 0.15792547607421875, 0.15738887023925782, 0.1577852783203125, 0.15874949645996095, 0.15923507690429686, 0.15923507690429686, 0.1582335968017578, 0.15826422119140626, 0.15752105712890624, 0.16109869384765624, 0.15784451293945312, 0.15806767272949218, 0.1590497283935547, 0.15819517517089843, 0.15837420654296874, 0.15800137329101563, 0.1580187530517578, 0.158997314453125, 0.15980851745605468, 0.1574835205078125, 0.15814707946777343, 0.15747247314453125, 0.15866499328613282, 0.15917240905761718, 0.15912570190429687, 0.16023551940917968, 0.15918080139160157, 0.15845170593261718, 0.15835955810546876, 0.15836671447753906, 0.16018435668945313, 0.15984722900390624, 0.15932432556152343, 0.15990127563476564, 0.15944908142089845, 0.1598201904296875, 0.1585473327636719, 0.1606080322265625, 0.15970594787597656, 0.16000408935546875, 0.15809928894042968, 0.16386451721191406, 0.16198690795898438, 0.15833689880371093, 0.158566650390625, 0.16050326538085938, 0.16253952026367188, 0.15880172729492187, 0.15796844482421876, 0.1582364501953125, 0.16111474609375, 0.16713679504394532, 0.15927285766601562, 0.15956845092773436, 0.15975424194335938, 0.1583943634033203, 0.1587077178955078, 0.1587314910888672, 0.15851805114746093, 0.15966738891601562, 0.15801837158203125, 0.15903062438964843, 0.15871043395996093, 0.15918284606933594, 0.16118783569335937, 0.16157901000976563, 0.15942393493652343, 0.15780848693847657, 0.16166336059570313, 0.16076007080078125, 0.15804220581054687, 0.15782687377929688, 0.1576798095703125, 0.15812815856933593, 0.15785749816894531, 0.1582881317138672, 0.158320068359375, 0.1582285461425781, 0.1581448974609375, 0.1584805145263672, 0.15742771911621095, 0.16340377807617187, 0.1569764862060547, 0.15786665344238282, 0.15661875915527343, 0.15798838806152343, 0.1570473327636719, 0.15814361572265626, 0.1580567321777344, 0.15766773986816407, 0.15793919372558593, 0.15744064331054688, 0.15845368957519532, 0.15866044616699218, 0.16008213806152344, 0.15744613647460937, 0.15827290344238282, 0.1583294677734375, 0.158593017578125, 0.15700323486328124, 0.16205673217773436, 0.15706317138671874, 0.15735005187988282, 0.1565895690917969, 0.15791305541992187, 0.16103462219238282, 0.15755030822753907, 0.1583250274658203, 0.15821395874023436, 0.1574827880859375, 0.16221224975585938, 0.15737065124511718, 0.15690304565429689, 0.15670640563964844, 0.15663267517089843, 0.1573200988769531, 0.15690736389160156, 0.1576900177001953, 0.15789833068847656, 0.15833743286132812, 0.1579677734375, 0.15711497497558594, 0.15834262084960937, 0.1574303741455078, 0.15753794860839843, 0.15746406555175782, 0.15745053100585937, 0.16168199157714844, 0.1590614013671875, 0.15937596130371093, 0.1590470733642578, 0.15895315551757813, 0.1604940185546875, 0.15926835632324218, 0.15934121704101561, 0.15810166931152345, 0.15816925048828126, 0.15804208374023437, 0.15848042297363282, 0.15806588745117187, 0.15897004699707032, 0.15887420654296874, 0.15887571716308593, 0.15790483093261717, 0.15861526489257813, 0.15783705139160156, 0.1579586944580078, 0.1622118377685547, 0.15870748901367188, 0.15858067321777344, 0.15848602294921876, 0.15789932250976563, 0.15736175537109376, 0.16162197875976564, 0.1609888916015625, 0.16184620666503907, 0.16011456298828125, 0.15905616760253907, 0.1578453369140625, 0.16180947875976562, 0.15826165771484374, 0.15939637756347655, 0.15724922180175782, 0.1605753936767578, 0.1639729309082031, 0.16080137634277344, 0.1591826171875, 0.1580832977294922, 0.15740237426757814, 0.16223513793945313, 0.162538818359375, 0.1593555908203125, 0.160606201171875, 0.15954483032226563, 0.159127197265625, 0.16227824401855467, 0.16008790588378907, 0.1612571258544922, 0.16010905456542968, 0.16037887573242188, 0.16017768859863282, 0.15890071105957032, 0.15972966003417968, 0.15880397033691407, 0.15839642333984374, 0.16376611328125, 0.16015580749511718, 0.1588607635498047, 0.15945567321777343, 0.16019448852539062, 0.1622711944580078, 0.1604322509765625, 0.1602043914794922, 0.1615897216796875, 0.1597665252685547, 0.1596600341796875, 0.15758537292480468, 0.15729391479492189, 0.15694508361816406, 0.15785098266601563, 0.1572150115966797, 0.1582923583984375, 0.15796365356445313, 0.15833958435058593, 0.15853164672851563, 0.1574339141845703, 0.157802490234375, 0.15799909973144532, 0.15804415893554688, 0.15751895141601563, 0.1570470428466797, 0.15929411315917968, 0.1575362548828125, 0.15763157653808593, 0.15807171630859376, 0.15749119567871095, 0.15731507873535155, 0.15954931640625, 0.1598629150390625, 0.1574686737060547, 0.15942771911621093, 0.15752896118164061, 0.15722119140625, 0.16096018981933594, 0.1573130187988281, 0.15754818725585937, 0.15955708312988282, 0.16010739135742189, 0.1662197723388672, 0.15871795654296875, 0.15891232299804686, 0.15927110290527344, 0.1571962890625, 0.1587281951904297, 0.1576565399169922, 0.15753680419921876, 0.1588940734863281, 0.15885107421875, 0.15803590393066405, 0.15768173217773437, 0.15829330444335937, 0.15791363525390625, 0.1576326141357422, 0.15757113647460938, 0.1572703094482422, 0.1581375732421875, 0.15802383422851562, 0.1575837097167969, 0.15900259399414063, 0.15755413818359376, 0.15815721130371094, 0.15905325317382812, 0.15983091735839844, 0.15880805969238282, 0.15909274291992187, 0.15903129577636718, 0.15815679931640625, 0.16240025329589844, 0.1584904327392578, 0.15865670776367188, 0.15832864379882813, 0.15769206237792968, 0.15854595947265626, 0.1580770263671875, 0.157556640625, 0.15875436401367188, 0.15862541198730468, 0.15845458984375, 0.15861964416503907, 0.15805030822753907, 0.1586728973388672, 0.15830426025390626, 0.15838348388671875, 0.15726451110839842, 0.1583873291015625, 0.15844149780273437, 0.1612126007080078, 0.1603365478515625, 0.15847247314453125, 0.15869500732421876, 0.1586525421142578, 0.16131590270996093, 0.1597159423828125, 0.1593319396972656, 0.15855078125, 0.15874575805664062, 0.1577091522216797, 0.16146432495117188, 0.15812748718261718, 0.1573684539794922, 0.158216064453125, 0.15824284362792967, 0.15679959106445313, 0.16302490234375, 0.16280966186523438, 0.15958444213867187, 0.15870559692382813, 0.15876101684570312, 0.15828378295898438, 0.15868435668945313, 0.157542724609375, 0.15770265197753905, 0.1589324188232422, 0.15765184020996093, 0.15750009155273437, 0.15735609436035156, 0.15685317993164063, 0.16166297912597657, 0.15893504333496095, 0.15793058776855468, 0.15834953308105468, 0.16029257202148436, 0.15799559020996093, 0.15729161071777345, 0.15763909912109375, 0.15724386596679688, 0.1573130187988281, 0.15710829162597656, 0.15698037719726562, 0.15817558288574218, 0.15882695007324218, 0.15853977966308594, 0.15848585510253907, 0.15824461364746092, 0.15902537536621095, 0.15839506530761718, 0.1580416259765625, 0.15750396728515625, 0.15754985046386719, 0.15823088073730468, 0.1570082550048828, 0.15714927673339843, 0.15732931518554688, 0.15823635864257812, 0.15787181091308594, 0.16032421875, 0.15856559753417968, 0.158885986328125, 0.15844012451171874, 0.15839027404785155, 0.16050381469726563, 0.15884083557128906, 0.15805186462402343, 0.15711801147460938, 0.15824295043945313, 0.15735238647460936, 0.1621422119140625, 0.16195120239257813, 0.15808419799804688, 0.1586460418701172, 0.15863714599609374, 0.1582021484375, 0.15854042053222656, 0.15779974365234375, 0.15910765075683594, 0.15773692321777344, 0.15784565734863282, 0.157839111328125, 0.15810934448242187, 0.15756758117675781, 0.15705497741699218, 0.15749020385742188, 0.15734883117675783, 0.15791513061523438, 0.15705413818359376, 0.15674246215820312, 0.159633056640625, 0.15775779724121095, 0.15834521484375, 0.15782231140136718, 0.15705973815917967, 0.1574541473388672, 0.15707933044433595, 0.15793174743652344]",tokens/s,6.269877732108665,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,12172.288,7099.84256,0.0,6704.594944,6690.791936,s,1,28.497341796875,28.497341796875,0.0,28.497341796875,28.497341796875,28.497341796875,28.497341796875,[28.497341796875],,kWh,0.0006255994436999998,6.900108954029212e-05,0.0002113479468559996,0.0009059484800962916,,MB,1412.722688,7313.752064,0.0,6897.532928,6816.50432,s,10,1.2305203475952147,0.12305203475952146,0.0007433982943910377,0.12308745574951171,0.12405353775024414,0.12416759986877442,0.12425884956359863,"[0.12309510040283203, 0.12346208190917969, 0.12428166198730468, 0.12237039947509766, 0.12226799774169922, 0.12353353881835938, 0.12194550323486328, 0.1230798110961914, 0.12402819061279297, 0.12245606231689453]",tokens/s,2080.420697636545,kWh,3.641560362294242e-06,4.015982918952097e-07,2.406383955283955e-06,6.449542609473406e-06,tokens/kWh,39692737.22201239,MB,1432.256512,7320.04352,0.0,6903.824384,6816.50688,s,10,75.43501416015624,7.543501416015625,0.011593906221494027,7.547277099609374,7.557584716796875,7.557750610351563,7.557883325195312,"[7.5575478515625, 7.5541923828125, 7.55791650390625, 7.5298935546875, 7.5258388671875, 7.53303369140625, 7.54868701171875, 7.5317109375, 7.550326171875, 7.5458671875]",tokens/s,8.351559378811086,kWh,0.00022198080847728922,2.448530608798314e-05,9.876297475791608e-05,0.0003452290893231884,tokens/kWh,182487.5190080583,,s,630,75.43231571197506,0.11973383446345252,0.0010853508873399532,0.11953097915649413,0.1208431755065918,0.12180508804321288,0.1233563981628418,"[0.12017369842529296, 0.11974950408935547, 0.12236595153808594, 0.11982217407226563, 0.11972799682617187, 0.11903129577636719, 0.11889539337158203, 0.11908914947509766, 0.11950438690185547, 0.11949842834472656, 0.12097618865966797, 0.12004380798339843, 0.11917692565917969, 0.11951513671875, 0.11868569946289062, 0.11904000091552734, 0.12015821075439453, 0.12295577239990234, 0.12003052520751953, 0.11928646087646484, 0.11939820861816407, 0.11965001678466797, 0.11915420532226563, 0.11925936126708984, 0.11907721710205078, 0.11928307342529297, 0.11932733154296875, 0.12049027252197266, 0.12084236907958984, 0.12067635345458984, 0.12052384185791015, 0.11941983795166015, 0.11929510498046875, 0.12006079864501953, 0.11981414031982422, 0.12062854766845703, 0.12023878479003906, 0.11937721252441406, 0.12015481567382813, 0.11939839935302735, 0.11896825408935546, 0.11968723297119141, 0.11998822021484375, 0.1203671646118164, 0.12025644683837891, 0.12007014465332032, 0.11990016174316406, 0.11966397094726562, 0.11902365112304687, 0.11949529266357421, 0.1200618896484375, 0.11942281341552734, 0.12074806213378907, 0.11966893005371093, 0.11984598541259765, 0.11938841247558593, 0.12033478546142579, 0.12052912139892578, 0.12127046203613281, 0.12126217651367187, 0.12033200073242188, 0.12016230773925782, 0.12303523254394531, 0.11941024017333984, 0.11940252685546875, 0.12077526092529296, 0.1200986557006836, 0.12036691284179687, 0.1194967041015625, 0.11953171539306641, 0.12077875518798828, 0.11889254760742188, 0.11883929443359376, 0.11906861114501953, 0.1190393295288086, 0.11952928161621093, 0.12067731475830078, 0.11974038696289062, 0.11956428527832032, 0.11905433654785157, 0.11932672119140625, 0.11914176177978515, 0.11939084625244141, 0.11959091186523438, 0.12280012512207031, 0.12056371307373047, 0.11980159759521485, 0.11990857696533203, 0.1194486083984375, 0.11981680297851563, 0.11970396423339844, 0.11965644836425782, 0.12053404998779296, 0.12012140655517578, 0.12037213134765624, 0.11997296142578125, 0.12016323089599609, 0.12007395172119141, 0.11935718536376953, 0.12235443115234375, 0.11959478759765625, 0.12073779296875, 0.12017974090576172, 0.1204397735595703, 0.11925484466552734, 0.11946208190917969, 0.11925299072265624, 0.12060835266113282, 0.11953807830810546, 0.12065315246582031, 0.12032272338867188, 0.11942707061767578, 0.12098774719238281, 0.12001475524902344, 0.1195335693359375, 0.11951103973388671, 0.11990857696533203, 0.12049385833740234, 0.11943917083740234, 0.11993222045898437, 0.12033631896972656, 0.11973884582519531, 0.11930636596679688, 0.11972422027587891, 0.11981228637695313, 0.11934281921386719, 0.12033773040771484, 0.11980461120605469, 0.1191523208618164, 0.11911507415771484, 0.11897714996337891, 0.119283203125, 0.12200434875488281, 0.12114329528808594, 0.12062310028076172, 0.11924249267578126, 0.11996141052246094, 0.11985945892333984, 0.1215797119140625, 0.1196605453491211, 0.12184941101074219, 0.12103110504150391, 0.120495361328125, 0.11960297393798829, 0.12009552001953125, 0.12055526733398438, 0.119275390625, 0.12351084899902344, 0.11954585266113281, 0.11976345825195313, 0.12031568145751953, 0.12047113800048828, 0.12023872375488281, 0.11945369720458984, 0.11949385833740235, 0.11954640197753906, 0.11932051086425781, 0.12116204833984374, 0.1209170913696289, 0.12039020538330078, 0.11999807739257813, 0.12185993957519531, 0.11925987243652343, 0.1195502700805664, 0.11920777893066406, 0.11951827239990234, 0.12053148651123047, 0.12157357025146484, 0.11950511932373047, 0.11962572479248047, 0.11866019439697266, 0.11882125091552734, 0.1187369613647461, 0.11877011108398437, 0.11928099060058593, 0.1208941421508789, 0.11994111633300782, 0.11931238555908204, 0.11899903869628906, 0.11833753967285156, 0.11914854431152344, 0.1184686050415039, 0.12202754974365235, 0.11965692901611329, 0.11960137939453125, 0.12070304107666016, 0.11990313720703125, 0.11891180419921875, 0.1190606689453125, 0.11922438049316406, 0.12041939544677735, 0.12306905364990234, 0.119959228515625, 0.12003385925292968, 0.11974861145019532, 0.1189080352783203, 0.11911609649658203, 0.11900572967529296, 0.11905990600585938, 0.12029312133789062, 0.11942588806152343, 0.11945919799804687, 0.1195546875, 0.11861952209472656, 0.11865366363525391, 0.11871622467041015, 0.11912214660644531, 0.12035059356689454, 0.11973216247558593, 0.11871785736083984, 0.11942160034179687, 0.11896451568603515, 0.11820003509521484, 0.11908505249023438, 0.11866031646728516, 0.12065666961669921, 0.11917212677001954, 0.11984381103515625, 0.11957453155517578, 0.1189375991821289, 0.12100198364257812, 0.11918473815917968, 0.12328412628173828, 0.11903794860839843, 0.12036441802978516, 0.11977922821044922, 0.11969510650634765, 0.11891158294677734, 0.11863283538818359, 0.11860921478271484, 0.1203944320678711, 0.11884483337402343, 0.11953024291992187, 0.11991584014892578, 0.11878627014160156, 0.11979603576660157, 0.11898473358154296, 0.11812246704101563, 0.11908441925048828, 0.11921193695068359, 0.11885027313232421, 0.12033023834228515, 0.11973139190673829, 0.1191673583984375, 0.11845244598388673, 0.11827235412597656, 0.11862207794189453, 0.11865702056884765, 0.11893059539794922, 0.1237982406616211, 0.12161446380615235, 0.11831478118896484, 0.11835533142089844, 0.11866796875, 0.11876319885253907, 0.11946809387207032, 0.11959532928466797, 0.11958806610107423, 0.11869459533691407, 0.1190832977294922, 0.11849612426757812, 0.11825456237792968, 0.11972169494628906, 0.11940675354003906, 0.12141094207763672, 0.12022438049316406, 0.11983609771728515, 0.11922489929199219, 0.11875113677978516, 0.11841065979003906, 0.11845868682861328, 0.1189458236694336, 0.11932911682128906, 0.11976294708251953, 0.11969478607177735, 0.11935174560546875, 0.11970572662353515, 0.1192973403930664, 0.11841782379150391, 0.11844841766357422, 0.12321382141113281, 0.12017638397216797, 0.12212454223632813, 0.12025577545166016, 0.11922505950927734, 0.11892940521240235, 0.11887411499023437, 0.11883312225341797, 0.11932879638671876, 0.11900284576416016, 0.12014006042480468, 0.11928959655761719, 0.11932844543457032, 0.11848063659667969, 0.11837312316894531, 0.1185567398071289, 0.11860157012939453, 0.11929821014404297, 0.11971788787841797, 0.1192959976196289, 0.1189862060546875, 0.11897100830078125, 0.11919757080078125, 0.12073561859130859, 0.11951651000976563, 0.12201042938232422, 0.11860326385498046, 0.12077107238769531, 0.1192591323852539, 0.12309404754638673, 0.1184176025390625, 0.11821036529541015, 0.11983548736572265, 0.1207410888671875, 0.11881292724609376, 0.12162969970703125, 0.12009926605224609, 0.1191410903930664, 0.11902960205078125, 0.11987776184082032, 0.11907660675048828, 0.118953857421875, 0.12059871673583984, 0.12020451354980469, 0.12092076873779296, 0.11985849761962891, 0.11920880126953125, 0.1196933135986328, 0.11943321228027344, 0.12038758087158204, 0.11987145233154296, 0.12205856323242187, 0.11998175811767578, 0.11914498901367188, 0.11816918182373047, 0.11828470611572266, 0.11852767944335937, 0.11904156494140625, 0.11864351654052735, 0.12085244750976562, 0.12006809234619141, 0.11981756591796874, 0.11900982666015625, 0.1187957763671875, 0.11894438171386719, 0.11927552032470704, 0.12119654083251953, 0.12207308959960937, 0.12013565063476563, 0.11995343780517578, 0.11986249542236328, 0.12001769256591797, 0.11947366333007813, 0.11882476806640625, 0.1192925796508789, 0.11859561920166016, 0.11991808319091797, 0.12048454284667968, 0.11933602905273437, 0.11914307403564453, 0.1182449951171875, 0.11882694244384766, 0.11893196868896484, 0.11976258850097657, 0.1197202911376953, 0.1200721893310547, 0.11922796630859375, 0.11962207794189453, 0.11897264099121094, 0.11866429138183594, 0.11856079864501953, 0.11910524749755859, 0.11958550262451172, 0.11937814331054687, 0.11987331390380859, 0.11903612518310547, 0.11957817840576172, 0.11868972778320312, 0.11839830780029297, 0.11922908782958984, 0.11951103973388671, 0.11991180419921875, 0.11948300933837891, 0.11889459228515625, 0.12053094482421875, 0.12058624267578125, 0.11993836975097656, 0.12243174743652344, 0.12015010833740235, 0.12074752044677735, 0.11937059020996094, 0.11965644836425782, 0.11974156951904297, 0.11881084442138672, 0.12347583770751953, 0.1205355224609375, 0.12010323333740235, 0.11951097869873047, 0.12101229095458985, 0.11928985595703125, 0.11854163360595703, 0.11886825561523437, 0.11954576110839844, 0.1188496322631836, 0.11849565124511718, 0.12003763580322266, 0.12017024230957031, 0.12015535736083985, 0.11934194946289063, 0.11935958099365235, 0.11947103881835938, 0.12020758056640625, 0.12015068817138672, 0.12155494689941407, 0.11986637115478516, 0.11898982238769532, 0.11916006469726563, 0.11862818908691407, 0.11830326080322266, 0.11834323120117188, 0.11905625915527343, 0.11838559722900391, 0.11991871643066407, 0.11871132659912109, 0.1193071060180664, 0.11882064056396484, 0.11860537719726562, 0.11870649719238281, 0.11976943969726563, 0.12995289611816407, 0.12005875396728516, 0.1201295394897461, 0.12266057586669922, 0.11874470520019531, 0.11855699157714844, 0.11855907440185547, 0.11875910186767578, 0.11998258972167969, 0.12102227020263671, 0.11961753845214844, 0.11972799682617187, 0.11947325134277344, 0.1183814697265625, 0.11890013122558593, 0.11888495635986328, 0.11874864196777343, 0.11902210998535157, 0.11976454162597656, 0.11972819519042968, 0.11898210906982422, 0.11856710052490234, 0.118293212890625, 0.11903929901123046, 0.11930284881591798, 0.12033392333984375, 0.12022621154785157, 0.11950694274902343, 0.11888374328613281, 0.11922492980957031, 0.11878387451171875, 0.11865094757080077, 0.11992070770263671, 0.11991859436035156, 0.11967203521728516, 0.11921692657470703, 0.12027699279785156, 0.11898473358154296, 0.12137042999267578, 0.11994332885742187, 0.12132147216796875, 0.12020313262939453, 0.12175091552734375, 0.11989708709716797, 0.11976809692382813, 0.11936835479736328, 0.11899107360839843, 0.11905007934570312, 0.12322611236572266, 0.11964006042480468, 0.12085043334960938, 0.11978956604003907, 0.11939020538330078, 0.11895193481445313, 0.11870003509521485, 0.11848499298095704, 0.11873484802246094, 0.11853756713867188, 0.11924137878417969, 0.12106969451904297, 0.11973033905029297, 0.1193919677734375, 0.11925122833251953, 0.11961417388916015, 0.11893981170654297, 0.11966345977783203, 0.11872627258300782, 0.12338591766357422, 0.11997443389892579, 0.11978892517089844, 0.11907727813720703, 0.11898880004882813, 0.11859964752197266, 0.1188288345336914, 0.1185159683227539, 0.11902928161621094, 0.11888880157470703, 0.11925107574462891, 0.118171875, 0.11781660461425782, 0.11955455780029296, 0.11839488220214844, 0.12229798126220703, 0.12013196563720703, 0.11974041748046875, 0.11957657623291015, 0.11893071746826171, 0.11974291229248046, 0.11915926361083984, 0.11900294494628906, 0.1192366714477539, 0.1213306884765625, 0.12013254547119141, 0.1199636459350586, 0.11930009460449219, 0.11869120025634766, 0.11899897766113281, 0.11831366729736328, 0.11905795288085938, 0.12022422027587891, 0.12071731567382812, 0.119046142578125, 0.11970511627197265, 0.11934767913818359, 0.11867750549316407, 0.11878399658203125, 0.11980111694335938, 0.11917759704589843, 0.12096656036376953, 0.11997641754150391, 0.11966524505615235, 0.11906985473632813, 0.11888713836669922, 0.11882003021240234, 0.11921862030029297, 0.11992054748535157, 0.12004771423339844, 0.11992237091064453, 0.11963158416748047, 0.11954070281982422, 0.1223392333984375, 0.12478288269042968, 0.11962745666503906, 0.12157520294189453, 0.12046371459960938, 0.11999215698242187, 0.12306857299804687, 0.11988172912597657, 0.11947007751464844, 0.11947212982177734, 0.12280422210693359, 0.11980729675292968, 0.12030950164794922, 0.11969631958007812, 0.11939609527587891, 0.12063081359863281, 0.12204105377197266, 0.1188331527709961, 0.11879241943359375, 0.11958943939208984, 0.12048393249511719, 0.12021334075927734, 0.11963362884521485, 0.1195976333618164, 0.11908271789550781, 0.118724609375, 0.1191805419921875, 0.1185719985961914, 0.12153040313720703, 0.12006124877929687, 0.1194986572265625, 0.12027094268798828, 0.11915644836425782, 0.11922710418701171, 0.11937324523925781, 0.12075065612792969, 0.12003123474121094, 0.12056390380859375, 0.11997574615478515, 0.11982848358154297, 0.11911081695556641, 0.11942082977294922, 0.119384765625, 0.11925939178466796, 0.11914403533935547, 0.12470108795166016, 0.12011724853515625, 0.12056781005859375, 0.11986310577392578, 0.11982428741455078, 0.11924864196777343, 0.11916342163085937, 0.11943116760253907, 0.12045516967773437, 0.1199774398803711, 0.11952003479003906, 0.12041600036621093, 0.1200396499633789, 0.11951395416259766, 0.11895606231689453, 0.11920851135253906, 0.12040841674804688, 0.11894371032714844, 0.11964627075195312, 0.1196748504638672, 0.1196640625, 0.11950342559814453, 0.11921193695068359, 0.11898067474365234, 0.11929814147949219, 0.12081065368652344, 0.11950307464599609, 0.12139167785644531, 0.1194617919921875, 0.11941487884521484, 0.1190277099609375, 0.12060374450683593, 0.11904691314697266, 0.12010675048828125, 0.11995331573486329, 0.11949100494384765]",tokens/s,8.351858140025069,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,,MB,19008.323584,9680.32256,0.0,9277.800448,9256.005632,s,1,40.1634765625,40.1634765625,0.0,40.1634765625,40.1634765625,40.1634765625,40.1634765625,[40.1634765625],,kWh,0.0009520982686833312,0.0001050163868328573,0.0003213588681980073,0.0013784735237141958,,MB,2182.361088,9866.969088,0.0,9443.475456,9414.503424,s,10,1.6638313903808595,0.16638313903808594,0.001984479444760709,0.16580045318603515,0.16801620483398436,0.1697295913696289,0.17110030059814452,"[0.16723114013671875, 0.1676354522705078, 0.17144297790527344, 0.16478697204589843, 0.16489913940429687, 0.16686991882324217, 0.16573770141601563, 0.16481205749511718, 0.16455282592773438, 0.1658632049560547]",tokens/s,1538.6174433300018,kWh,4.826221819330497e-06,5.322511646677185e-07,3.206610033409849e-06,8.565083017408066e-06,tokens/kWh,29888793.778144814,MB,2199.105536,9883.746304,0.0,9458.15552,9358.127616,s,10,102.8934501953125,10.28934501953125,0.037229436585284756,10.28347412109375,10.3324421875,10.346754296875,10.358203984375,"[10.290201171875, 10.3246435546875, 10.258732421875, 10.3010703125, 10.36106640625, 10.2767470703125, 10.32926171875, 10.2472060546875, 10.244619140625, 10.25990234375]",tokens/s,6.122838711347836,kWh,0.00029965533456025457,3.3053650562293215e-05,0.00013616000145978978,0.0004688689865823375,tokens/kWh,134365.89282481075,,s,630,102.8900487213136,0.16331753765287854,0.0018267661698116223,0.16301532745361327,0.16562399139404296,0.16642240600585936,0.16917306091308595,"[0.16282608032226562, 0.16336997985839843, 0.16048947143554687, 0.16084378051757814, 0.1619251251220703, 0.1638973388671875, 0.1611336669921875, 0.1632431640625, 0.1610280303955078, 0.16307347106933595, 0.16207705688476562, 0.16316166687011718, 0.16181497192382813, 0.16719366455078125, 0.16069932556152344, 0.1629388732910156, 0.16256816101074217, 0.16182858276367187, 0.16516064453125, 0.16221658325195312, 0.1626746826171875, 0.16251446533203126, 0.16273228454589844, 0.16301692199707032, 0.16248582458496094, 0.1644703369140625, 0.16440205383300782, 0.16407276916503907, 0.164586181640625, 0.16488858032226564, 0.1641160888671875, 0.1659224395751953, 0.1651862030029297, 0.164999267578125, 0.16407887268066407, 0.16460003662109374, 0.16296730041503907, 0.16186968994140624, 0.16233970642089843, 0.17337539672851562, 0.16192063903808593, 0.16247856140136718, 0.16225074768066405, 0.16164370727539062, 0.16046368408203124, 0.16297080993652344, 0.16288645935058593, 0.16320921325683593, 0.16464675903320314, 0.16264761352539062, 0.16380902099609376, 0.16254637145996093, 0.16106224060058594, 0.160940673828125, 0.16515705871582032, 0.16254531860351562, 0.16355686950683593, 0.16138922119140625, 0.16353298950195314, 0.1623362274169922, 0.16468637084960938, 0.16923648071289063, 0.1671327667236328, 0.16311427307128906, 0.1636844482421875, 0.16611756896972657, 0.1653552703857422, 0.16496278381347657, 0.16478959655761719, 0.16461500549316407, 0.1636065673828125, 0.16358387756347656, 0.1651549377441406, 0.1660999755859375, 0.16498313903808592, 0.16540739440917968, 0.16484255981445312, 0.16300332641601561, 0.16562562561035157, 0.16407369995117188, 0.16836941528320312, 0.16409881591796874, 0.1638089599609375, 0.16217324829101562, 0.16228965759277345, 0.16311666870117186, 0.16293309020996094, 0.16357705688476562, 0.16387770080566405, 0.16367779541015626, 0.16178598022460938, 0.16324966430664062, 0.16130128479003905, 0.162234375, 0.16211785888671876, 0.1621596221923828, 0.16190953063964844, 0.16202342224121094, 0.1616609344482422, 0.16459776306152343, 0.16191424560546874, 0.16375001525878907, 0.16305612182617188, 0.1629854736328125, 0.1620804138183594, 0.16199356079101562, 0.16198451232910155, 0.16266822814941406, 0.1662684783935547, 0.16134835815429688, 0.1615257568359375, 0.1610826873779297, 0.16187008666992186, 0.1628594512939453, 0.16642025756835938, 0.16425372314453124, 0.166357177734375, 0.16742604064941408, 0.16392784118652343, 0.16532406616210937, 0.16393087768554687, 0.1670965118408203, 0.1679462432861328, 0.1651261444091797, 0.16714547729492188, 0.16401795959472656, 0.16131686401367187, 0.16174592590332032, 0.16247705078125, 0.1629839324951172, 0.16206192016601562, 0.16316018676757812, 0.16230633544921874, 0.16561357116699219, 0.1610749816894531, 0.16221372985839844, 0.1627262725830078, 0.16202546691894532, 0.1615667266845703, 0.16207872009277344, 0.16215858459472657, 0.16426188659667967, 0.16423904418945312, 0.16452787780761718, 0.1648236083984375, 0.161570556640625, 0.16277938842773437, 0.16249232482910156, 0.16233016967773437, 0.1650564727783203, 0.1624661407470703, 0.16246400451660156, 0.16259429931640626, 0.16222869873046875, 0.16216000366210936, 0.1631297302246094, 0.1621895751953125, 0.163420166015625, 0.16306790161132811, 0.16286720275878908, 0.1641185302734375, 0.16105868530273437, 0.1637827911376953, 0.16362457275390624, 0.16190658569335936, 0.16180067443847657, 0.16018534851074218, 0.16118060302734374, 0.1672458953857422, 0.1635365447998047, 0.16358845520019533, 0.1632235565185547, 0.16693766784667968, 0.16147113037109376, 0.1637987518310547, 0.16066207885742187, 0.16247734069824218, 0.16374652099609374, 0.16284877014160157, 0.1629835205078125, 0.16140071105957032, 0.16197482299804689, 0.16641229248046874, 0.16403456115722656, 0.16268287658691405, 0.16281138610839843, 0.16243280029296875, 0.16141151428222655, 0.16089097595214844, 0.16220681762695313, 0.1624113006591797, 0.16284275817871094, 0.1623039093017578, 0.16642416381835937, 0.16607026672363281, 0.16194125366210937, 0.1615572509765625, 0.16067379760742187, 0.1616131591796875, 0.16273680114746095, 0.16211148071289064, 0.161617919921875, 0.16222732543945312, 0.16181657409667968, 0.16218789672851563, 0.16089244079589843, 0.1631116485595703, 0.16249174499511718, 0.161614501953125, 0.16349795532226563, 0.1625513916015625, 0.16264236450195313, 0.16351026916503905, 0.16478617858886718, 0.1644130859375, 0.16406492614746093, 0.1646865539550781, 0.1625843811035156, 0.16584658813476563, 0.16431983947753906, 0.16704920959472655, 0.16545074462890624, 0.16430387878417968, 0.16601087951660157, 0.1632174072265625, 0.16457884216308594, 0.1616237487792969, 0.16229196166992188, 0.16437303161621095, 0.16238946533203125, 0.1658876495361328, 0.16492633056640624, 0.16475932312011718, 0.16347978210449218, 0.16562380981445313, 0.16490435791015626, 0.1629354248046875, 0.16353276062011718, 0.1650380859375, 0.16551913452148437, 0.1624168701171875, 0.16272589111328126, 0.1659678649902344, 0.1640323486328125, 0.16313565063476562, 0.16348570251464845, 0.16247398376464844, 0.16366592407226563, 0.1634464569091797, 0.16364083862304687, 0.16262841796875, 0.16345703125, 0.16477798461914062, 0.16760812377929687, 0.16590867614746094, 0.16641023254394532, 0.16398130798339844, 0.163162109375, 0.1635674285888672, 0.1636409912109375, 0.16223397827148436, 0.17223362731933595, 0.16640818786621095, 0.16622918701171874, 0.16333468627929687, 0.1631294708251953, 0.16397868347167968, 0.16185775756835938, 0.16427430725097655, 0.1653702392578125, 0.1629654998779297, 0.16321484375, 0.1627423095703125, 0.16379747009277343, 0.162914306640625, 0.1640424041748047, 0.1626125793457031, 0.16200192260742188, 0.16331980895996093, 0.16378880310058594, 0.1706577911376953, 0.16256410217285155, 0.16442105102539062, 0.1632528076171875, 0.16265933227539062, 0.1640696563720703, 0.16273884582519532, 0.16339555358886718, 0.16645094299316407, 0.16429446411132811, 0.16555882263183594, 0.16578764343261718, 0.16536480712890625, 0.16391183471679688, 0.16413679504394532, 0.16510867309570312, 0.16328294372558594, 0.16245269775390625, 0.16712783813476562, 0.1628897247314453, 0.16312728881835936, 0.16505973815917968, 0.16590934753417969, 0.16480665588378907, 0.16505987548828124, 0.1642884521484375, 0.16598710632324218, 0.163212646484375, 0.16403427124023437, 0.16517161560058594, 0.1657472381591797, 0.1646959991455078, 0.16363063049316406, 0.16352876281738282, 0.16685696411132814, 0.16600802612304688, 0.16216348266601563, 0.16378880310058594, 0.16602316284179688, 0.16214950561523436, 0.16302499389648437, 0.16259767150878907, 0.1629015350341797, 0.16204217529296874, 0.16252703857421874, 0.16083798217773437, 0.1617100830078125, 0.1607352294921875, 0.1620125732421875, 0.1613336639404297, 0.1621195831298828, 0.16148509216308593, 0.16139468383789063, 0.16043417358398437, 0.16051715087890625, 0.16083004760742187, 0.1618740234375, 0.16202371215820313, 0.16273350524902344, 0.16256185913085938, 0.16578636169433594, 0.1607797088623047, 0.16259039306640624, 0.1616159973144531, 0.16307276916503907, 0.16092965698242187, 0.1619454345703125, 0.16228950500488282, 0.1644027862548828, 0.16515977478027344, 0.16312503051757812, 0.1635166473388672, 0.16610917663574218, 0.16241868591308595, 0.16327008056640624, 0.16445677185058594, 0.16456935119628907, 0.16511795043945313, 0.1643171844482422, 0.16554803466796875, 0.16326451110839843, 0.1623019561767578, 0.16394650268554686, 0.16735337829589844, 0.16344163513183593, 0.16399119567871093, 0.16429481506347657, 0.16356985473632812, 0.1624637451171875, 0.1636188201904297, 0.16271565246582032, 0.16374989318847658, 0.1633454132080078, 0.16254591369628907, 0.16726092529296874, 0.165001220703125, 0.16330685424804686, 0.16335504150390626, 0.1638260498046875, 0.16218960571289062, 0.16277503967285156, 0.1650708465576172, 0.16408575439453124, 0.16516505432128906, 0.16394419860839843, 0.1649522247314453, 0.16331507873535156, 0.1650894012451172, 0.1637423095703125, 0.16148028564453126, 0.16452239990234374, 0.16392601013183594, 0.16308633422851562, 0.16337510681152342, 0.16317439270019532, 0.16301600646972655, 0.16318275451660155, 0.1620155487060547, 0.1655274200439453, 0.16418182373046875, 0.16656752014160156, 0.16315487670898438, 0.16580607604980468, 0.16396493530273437, 0.16237478637695313, 0.16280982971191407, 0.16415423583984376, 0.16428604125976562, 0.16901779174804688, 0.1678028869628906, 0.16615423583984376, 0.16453622436523438, 0.16343869018554688, 0.16354304504394532, 0.16484556579589843, 0.16687718200683593, 0.16599644470214844, 0.163440185546875, 0.16523114013671875, 0.16423475646972657, 0.1645470733642578, 0.16487423706054688, 0.16490669250488282, 0.16758201599121095, 0.16213095092773439, 0.1620018310546875, 0.1640894775390625, 0.16146060180664062, 0.16236863708496094, 0.1611348419189453, 0.16183920288085937, 0.1629231414794922, 0.16308018493652343, 0.1630146484375, 0.16443341064453126, 0.16226924133300782, 0.16357830810546875, 0.1617032928466797, 0.16448576354980468, 0.16356761169433592, 0.1630146484375, 0.16420761108398438, 0.16525209045410155, 0.16288153076171874, 0.1634212188720703, 0.16052268981933593, 0.16368489074707032, 0.16152166748046876, 0.16204591369628907, 0.16271363830566407, 0.160980224609375, 0.16086912536621092, 0.1614661407470703, 0.16043212890625, 0.16196835327148437, 0.16244248962402344, 0.16316220092773437, 0.16292710876464844, 0.16289920043945313, 0.162046630859375, 0.1648080291748047, 0.16243991088867188, 0.16320326232910157, 0.16375193786621095, 0.16317837524414064, 0.16299977111816405, 0.1627975311279297, 0.16192582702636718, 0.16489881896972655, 0.1675614776611328, 0.16251239013671875, 0.16285693359375, 0.16294070434570312, 0.1616324462890625, 0.16181263732910156, 0.1628100128173828, 0.1619230651855469, 0.16211666870117186, 0.16362797546386718, 0.16307133483886718, 0.16179164123535156, 0.1631508483886719, 0.163157958984375, 0.17363360595703126, 0.1610784912109375, 0.1606900177001953, 0.16106796264648438, 0.16585299682617188, 0.16172601318359375, 0.16128268432617188, 0.16035011291503906, 0.16035440063476564, 0.16180018615722655, 0.16357376098632812, 0.16215449523925782, 0.1645096893310547, 0.16062669372558594, 0.16026419067382813, 0.1607346954345703, 0.16283293151855469, 0.16279347229003907, 0.16099655151367187, 0.16254649353027345, 0.16157901000976563, 0.16164646911621094, 0.16236341857910155, 0.16200985717773436, 0.16243507385253905, 0.16191017150878906, 0.16331837463378907, 0.16126156616210938, 0.16047923278808593, 0.16084786987304686, 0.16090316772460939, 0.16002662658691405, 0.16076800537109376, 0.16026829528808595, 0.16140048217773437, 0.160855712890625, 0.161425537109375, 0.16160362243652343, 0.16486647033691407, 0.16096620178222656, 0.1625134735107422, 0.16199449157714843, 0.1616894989013672, 0.1611116485595703, 0.16154086303710938, 0.1606983642578125, 0.1651445770263672, 0.16187767028808595, 0.16199920654296876, 0.16432333374023436, 0.16141311645507814, 0.16144178771972656, 0.16165263366699217, 0.16323799133300781, 0.1627150421142578, 0.16271830749511718, 0.16196380615234374, 0.1626474609375, 0.16241746520996095, 0.16157696533203125, 0.1625866241455078, 0.16995327758789064, 0.16549250793457032, 0.1646955261230469, 0.16372998046875, 0.16376553344726563, 0.16284559631347656, 0.16144998168945313, 0.16231015014648437, 0.1626132507324219, 0.16473046875, 0.16494770812988283, 0.16705142211914062, 0.16440745544433594, 0.16397859191894532, 0.1610878448486328, 0.1631761016845703, 0.16171519470214843, 0.1646612548828125, 0.16229887390136719, 0.16340071105957032, 0.16344796752929688, 0.16217135620117187, 0.1637109375, 0.16369544982910156, 0.16276454162597656, 0.16340812683105468, 0.16167085266113282, 0.16080650329589843, 0.1616596221923828, 0.16391949462890626, 0.16387423706054688, 0.1617499542236328, 0.16211935424804688, 0.16321568298339845, 0.16157901000976563, 0.16568479919433593, 0.16260960388183593, 0.16524691772460937, 0.1621094970703125, 0.1620193328857422, 0.16329522705078126, 0.16229580688476564, 0.16309158325195314, 0.16383680725097657, 0.16198042297363283, 0.1612349395751953, 0.1652889862060547, 0.16199369812011719, 0.1644031982421875, 0.1634569549560547, 0.16285292053222655, 0.16192410278320313, 0.16252415466308595, 0.16265420532226563, 0.16250405883789062, 0.16083407592773438, 0.162967041015625, 0.16127650451660155, 0.16174899291992187, 0.1621790771484375, 0.16745266723632812, 0.17014988708496093, 0.1623201904296875, 0.16382102966308593, 0.1621011199951172, 0.16307696533203125, 0.1615501708984375, 0.16127740478515626, 0.16200364685058594, 0.16101580810546876, 0.1623299560546875, 0.1613741455078125, 0.164917724609375, 0.16168780517578124, 0.16153961181640625, 0.1622302703857422, 0.16377667236328125, 0.1614903106689453, 0.16166188049316407, 0.1617960968017578, 0.16640614318847657, 0.1641360626220703, 0.16283103942871094, 0.1632659912109375, 0.1629529571533203, 0.1619484405517578]",tokens/s,6.123041128169829,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,1097.445376,709.820416,0.0,314.5728,299.62752,s,1,8.251154296875,8.251154296875,0.0,8.251154296875,8.251154296875,8.251154296875,8.251154296875,[8.251154296875],,kWh,3.137829248750374e-05,3.4540586563329293e-06,1.0579730685994493e-05,4.541208182983116e-05,,MB,1266.520064,818.87232,0.0,402.653184,359.612416,s,10,0.3170804176330566,0.03170804176330567,0.00022690988150239584,0.03174241638183594,0.031969792556762695,0.03197713623046875,0.0319830111694336,"[0.03198447990417481, 0.031940736770629884, 0.03196816062927246, 0.03174582481384278, 0.03183510398864746, 0.031739007949829104, 0.03159184074401856, 0.0313450870513916, 0.03159705543518066, 0.03133312034606934]",tokens/s,8073.661625369045,kWh,9.299740253450393e-07,1.025596733217605e-07,4.2864949366239933e-07,1.4611831923291992e-06,tokens/kWh,175200482.28307578,MB,1300.754432,841.940992,0.0,425.721856,360.53248,s,10,19.3361962890625,1.93361962890625,0.010603024772909005,1.9284808349609377,1.9499725585937502,1.950110546875,1.9502209375,"[1.94994189453125, 1.9247216796875, 1.95024853515625, 1.9486453857421875, 1.9264437255859375, 1.92957958984375, 1.9292215576171876, 1.9277401123046876, 1.9248831787109375, 1.9247706298828124]",tokens/s,32.581382117865594,kWh,5.587296759256952e-05,6.162513693174058e-06,2.1699062652938576e-05,8.373454393868214e-05,tokens/kWh,752377.657256176,,s,630,19.330464256286625,0.030683276597280353,0.0004544658682586854,0.030625375747680664,0.031081328582763672,0.03126024866104126,0.03246809272766115,"[0.030281471252441405, 0.03037238311767578, 0.030021631240844726, 0.030042015075683593, 0.030056543350219726, 0.030045984268188476, 0.030284000396728517, 0.03070319938659668, 0.030895679473876954, 0.03063216018676758, 0.030739072799682618, 0.03137059211730957, 0.03217891311645508, 0.03151600074768066, 0.031154783248901367, 0.03147529602050781, 0.03094099235534668, 0.031064672470092772, 0.030953535079956053, 0.030599103927612305, 0.030621952056884765, 0.03084262466430664, 0.030508447647094726, 0.03057891273498535, 0.030730623245239258, 0.030427007675170897, 0.03045199966430664, 0.03071574401855469, 0.030662656784057617, 0.03100467109680176, 0.031178976058959963, 0.030877248764038086, 0.030712032318115236, 0.030627840042114256, 0.03081625556945801, 0.03078963279724121, 0.0307893123626709, 0.03063225555419922, 0.03073843193054199, 0.03081964874267578, 0.031077056884765624, 0.03114147186279297, 0.03092889595031738, 0.030979808807373048, 0.0310064640045166, 0.030968767166137695, 0.03106719970703125, 0.031080543518066408, 0.0310784969329834, 0.030921472549438476, 0.03143654441833496, 0.031029504776000978, 0.031117311477661135, 0.03136422348022461, 0.031000640869140624, 0.031144767761230468, 0.03504537582397461, 0.0311910400390625, 0.031229888916015625, 0.03147545623779297, 0.031080768585205077, 0.031006719589233397, 0.031122880935668944, 0.03089619255065918, 0.030849407196044922, 0.030539552688598634, 0.030498367309570312, 0.03055683135986328, 0.030631935119628906, 0.03061564826965332, 0.03057872009277344, 0.0304899845123291, 0.03042355155944824, 0.03028976058959961, 0.030458015441894533, 0.030522848129272463, 0.03063248062133789, 0.030864736557006837, 0.030747295379638672, 0.03077849578857422, 0.030429855346679687, 0.030340543746948244, 0.030425600051879883, 0.030222463607788085, 0.03029599952697754, 0.03016873550415039, 0.030132863998413088, 0.0302073917388916, 0.030236480712890625, 0.030417503356933592, 0.030826623916625977, 0.031233152389526366, 0.03082966423034668, 0.030920480728149412, 0.030965728759765623, 0.030707679748535156, 0.03134694480895996, 0.030478303909301757, 0.030318431854248047, 0.030552480697631838, 0.03049760055541992, 0.030303232192993163, 0.030392095565795897, 0.03041916847229004, 0.030457151412963866, 0.030312543869018556, 0.030615936279296874, 0.030663999557495117, 0.03063382339477539, 0.03064713668823242, 0.030658559799194338, 0.030457855224609375, 0.03060438346862793, 0.03019580841064453, 0.030532447814941407, 0.03044588851928711, 0.03038787269592285, 0.03029520034790039, 0.03022729682922363, 0.030104896545410157, 0.03027014350891113, 0.030410560607910156, 0.03049478340148926, 0.030797952651977538, 0.030900447845458985, 0.030946271896362305, 0.030674367904663085, 0.03073683166503906, 0.030707839965820313, 0.030923999786376954, 0.031034143447875976, 0.03094528007507324, 0.030891008377075195, 0.030863872528076174, 0.030956031799316407, 0.030819583892822265, 0.03089036750793457, 0.030966144561767578, 0.03092633628845215, 0.031016799926757814, 0.031054431915283204, 0.03124995231628418, 0.031113759994506836, 0.031356576919555665, 0.031268672943115236, 0.03180803108215332, 0.031013151168823243, 0.03109449577331543, 0.031029247283935548, 0.030908416748046875, 0.030891263961791992, 0.031104927062988282, 0.031234912872314453, 0.03106800079345703, 0.031154336929321288, 0.031195199966430665, 0.031532991409301755, 0.031210559844970703, 0.03099535942077637, 0.030863391876220704, 0.030682912826538088, 0.03148342323303223, 0.030949344635009767, 0.031054271697998046, 0.030797887802124023, 0.03070787239074707, 0.030611520767211915, 0.03056764793395996, 0.030561248779296876, 0.030670591354370117, 0.030664800643920898, 0.030762496948242186, 0.03072617530822754, 0.030781024932861327, 0.03192508888244629, 0.03082838439941406, 0.0312291202545166, 0.030749664306640626, 0.031202655792236328, 0.030743200302124022, 0.03079529571533203, 0.030480159759521484, 0.030503616333007813, 0.030511104583740234, 0.03079167938232422, 0.030949695587158203, 0.03073606491088867, 0.030642175674438478, 0.031086368560791014, 0.030702655792236327, 0.030596416473388673, 0.030536096572875978, 0.030541952133178712, 0.03077324867248535, 0.030560192108154298, 0.030467872619628907, 0.030322975158691406, 0.03034832000732422, 0.03069593620300293, 0.03258620834350586, 0.030707296371459962, 0.031177120208740236, 0.030778879165649413, 0.030644927978515625, 0.030706527709960938, 0.03097923278808594, 0.03069523239135742, 0.0310435848236084, 0.030795936584472657, 0.03116534423828125, 0.030826847076416017, 0.03054243278503418, 0.030822656631469728, 0.03067840003967285, 0.03089446449279785, 0.03103116798400879, 0.03102511978149414, 0.030967071533203126, 0.03100364875793457, 0.031340831756591796, 0.03165920066833496, 0.03120518493652344, 0.03130380821228027, 0.030951904296875, 0.03096486473083496, 0.031349632263183595, 0.030934560775756837, 0.030840831756591795, 0.030859743118286133, 0.030934207916259764, 0.030817472457885742, 0.03090598487854004, 0.031024351119995117, 0.030904991149902344, 0.030974079132080078, 0.031005823135375976, 0.031054431915283204, 0.03107811164855957, 0.030912256240844725, 0.03127193641662598, 0.030947328567504883, 0.03090640068054199, 0.031299360275268556, 0.030877824783325195, 0.031035263061523436, 0.031025375366210937, 0.03077507209777832, 0.030889984130859374, 0.03091561508178711, 0.03074905586242676, 0.030786304473876952, 0.03094646453857422, 0.030737119674682616, 0.03072323226928711, 0.030710336685180663, 0.031189184188842773, 0.030703231811523436, 0.030824192047119142, 0.030362560272216798, 0.03031235122680664, 0.03057744026184082, 0.03088662338256836, 0.03024105644226074, 0.030525440216064452, 0.030300159454345704, 0.030365695953369142, 0.030600959777832032, 0.03062156867980957, 0.030501056671142578, 0.03046009635925293, 0.030681087493896485, 0.030351648330688475, 0.03039398384094238, 0.030701663970947264, 0.030621023178100587, 0.030500640869140627, 0.030448511123657228, 0.030320640563964843, 0.030369728088378907, 0.030604671478271485, 0.030321344375610352, 0.03015884780883789, 0.030277631759643556, 0.03046928024291992, 0.030390975952148437, 0.030424703598022462, 0.030253887176513672, 0.030369504928588868, 0.030329183578491212, 0.03031376075744629, 0.030170976638793947, 0.030257856369018555, 0.030459423065185547, 0.030505279541015624, 0.030537727355957032, 0.03045487976074219, 0.03042755126953125, 0.030325056076049805, 0.03044937515258789, 0.030551904678344725, 0.03347520065307617, 0.03093708801269531, 0.030636032104492186, 0.030551647186279295, 0.031021087646484376, 0.030540159225463867, 0.030751903533935546, 0.03047305679321289, 0.030624959945678713, 0.03045984077453613, 0.030507583618164063, 0.030744928359985352, 0.03064169692993164, 0.030651968002319337, 0.030729087829589843, 0.0306527042388916, 0.03064089584350586, 0.03054207992553711, 0.03067568016052246, 0.03062579154968262, 0.030583040237426758, 0.03077299118041992, 0.030666175842285155, 0.030486623764038087, 0.03041267204284668, 0.030580415725708007, 0.03049776077270508, 0.03069126319885254, 0.030814207077026368, 0.030810111999511718, 0.031161535263061525, 0.033751873016357424, 0.030863359451293947, 0.03094233512878418, 0.03089308738708496, 0.031235776901245117, 0.03082255935668945, 0.03088912010192871, 0.030720767974853517, 0.030542112350463866, 0.030544960021972656, 0.030548736572265624, 0.03057254409790039, 0.03061257553100586, 0.03069635200500488, 0.030697471618652345, 0.030705663681030275, 0.03071574401855469, 0.030554655075073243, 0.03064713668823242, 0.03074502372741699, 0.030666112899780273, 0.030725088119506836, 0.03056025505065918, 0.030386560440063475, 0.030324575424194335, 0.03025708770751953, 0.03019513511657715, 0.030183839797973632, 0.030293407440185546, 0.030425504684448244, 0.030314687728881837, 0.03028963279724121, 0.030361888885498046, 0.030281919479370117, 0.03028665542602539, 0.030333951950073244, 0.030547584533691406, 0.03106217575073242, 0.03034339141845703, 0.03070534324645996, 0.03018783950805664, 0.03021571159362793, 0.03012041664123535, 0.030218175888061524, 0.030386240005493163, 0.030396703720092774, 0.030605152130126954, 0.030218143463134766, 0.030325632095336914, 0.03040009689331055, 0.030400480270385742, 0.03138179206848145, 0.030402559280395508, 0.03037139129638672, 0.03039267158508301, 0.030416704177856444, 0.030326400756835938, 0.030496831893920898, 0.03051375961303711, 0.030515296936035156, 0.03050281524658203, 0.030608800888061522, 0.03052400016784668, 0.030518272399902343, 0.0306997127532959, 0.030583616256713866, 0.03081785583496094, 0.030515935897827147, 0.03139590454101562, 0.030652063369750977, 0.030543872833251953, 0.030797695159912108, 0.030588640213012695, 0.03058527946472168, 0.030909984588623048, 0.030722496032714843, 0.03056230354309082, 0.03072380828857422, 0.030771488189697264, 0.03060121536254883, 0.030724096298217773, 0.03076313591003418, 0.03078335952758789, 0.03062326431274414, 0.03073891258239746, 0.030683135986328124, 0.030654464721679688, 0.030700927734375, 0.030556800842285157, 0.030552064895629883, 0.03059916877746582, 0.03072822380065918, 0.030592159271240236, 0.03066691207885742, 0.030616224288940428, 0.03076300811767578, 0.030756864547729492, 0.030799776077270507, 0.03052137565612793, 0.030517311096191407, 0.030530624389648438, 0.031003135681152344, 0.03063225555419922, 0.030403039932250978, 0.03086649513244629, 0.03036627197265625, 0.03024668884277344, 0.030417152404785156, 0.030633983612060548, 0.030448991775512694, 0.03015100860595703, 0.030436416625976563, 0.03111849594116211, 0.03123356819152832, 0.030482368469238283, 0.030579008102416993, 0.030422367095947266, 0.030368127822875977, 0.03028611183166504, 0.03046112060546875, 0.03047097587585449, 0.03038934326171875, 0.030499359130859376, 0.03053001594543457, 0.030404512405395507, 0.030428768157958985, 0.030536096572875978, 0.03034511947631836, 0.03059312057495117, 0.030405855178833006, 0.031025951385498046, 0.030601568222045898, 0.0309529914855957, 0.030681215286254882, 0.03058483123779297, 0.030734336853027344, 0.030459680557250977, 0.030634208679199217, 0.030312320709228516, 0.03021836853027344, 0.03024892807006836, 0.030234655380249022, 0.03042416000366211, 0.030649248123168944, 0.03041279983520508, 0.03043971252441406, 0.030359039306640623, 0.030527999877929687, 0.030569375991821288, 0.030500991821289063, 0.030370208740234376, 0.030503231048583983, 0.030525407791137694, 0.030658559799194338, 0.03056025505065918, 0.03068012809753418, 0.030559167861938477, 0.030580448150634765, 0.030576351165771485, 0.03050044822692871, 0.030611583709716797, 0.030817119598388672, 0.030772319793701174, 0.030837663650512694, 0.03173904037475586, 0.0312390079498291, 0.030682720184326173, 0.030659231185913086, 0.030973695755004884, 0.030666751861572264, 0.03063199996948242, 0.030631423950195313, 0.030653087615966797, 0.030483488082885743, 0.030589920043945312, 0.030521343231201172, 0.030507328033447266, 0.030664384841918944, 0.030447616577148437, 0.030529535293579102, 0.03047756767272949, 0.030513919830322266, 0.030531871795654298, 0.030658559799194338, 0.030631647109985352, 0.030627552032470702, 0.030615455627441408, 0.030315103530883788, 0.031026975631713867, 0.03260416030883789, 0.030616704940795898, 0.030356128692626952, 0.03041878318786621, 0.030318944931030274, 0.030266847610473633, 0.03038470458984375, 0.030554239273071288, 0.03058470344543457, 0.030410943984985353, 0.03035081672668457, 0.030613536834716796, 0.030311071395874023, 0.03088921546936035, 0.030595712661743164, 0.03086089515686035, 0.030742719650268556, 0.03078758430480957, 0.03166550445556641, 0.03057142448425293, 0.031127328872680663, 0.03262051010131836, 0.03105753517150879, 0.0307142391204834, 0.030441471099853516, 0.030390592575073243, 0.030219968795776368, 0.03016499137878418, 0.030523584365844725, 0.030860992431640626, 0.03043984031677246, 0.030096511840820312, 0.03030112075805664, 0.030364799499511718, 0.030173919677734376, 0.030137855529785155, 0.03018998336791992, 0.030166463851928713, 0.030276159286499023, 0.030244768142700194, 0.030174816131591797, 0.030511135101318358, 0.03006502342224121, 0.02994790458679199, 0.02998886489868164, 0.02998201560974121, 0.03011859130859375, 0.02995712089538574, 0.030079072952270507, 0.030020511627197266, 0.03038640022277832, 0.030219520568847656, 0.03042153549194336, 0.03072982406616211, 0.030522943496704102, 0.03038822364807129, 0.030524255752563477, 0.030430335998535157, 0.03043008041381836, 0.030584800720214845, 0.03058684730529785, 0.03059244728088379, 0.030970624923706055, 0.030689407348632812, 0.03071308708190918, 0.03060326385498047, 0.03052400016784668, 0.030680992126464843, 0.030912031173706056, 0.03074640083312988, 0.030769855499267577, 0.03076710319519043, 0.030873600006103515, 0.030619647979736327, 0.030619455337524415, 0.030759008407592773, 0.03064841651916504, 0.03141427230834961, 0.03123404884338379, 0.0307587833404541, 0.03060758399963379, 0.03056835174560547, 0.030674943923950194, 0.030685184478759765, 0.030653791427612306, 0.030657024383544923, 0.030552511215209962, 0.03050499153137207, 0.030217248916625975, 0.03015331268310547, 0.030006336212158202, 0.02993257522583008, 0.02990412712097168, 0.02996281623840332, 0.03018988800048828, 0.030218080520629884, 0.030113792419433592, 0.030343135833740233, 0.030023263931274413, 0.029884864807128906, 0.03014784049987793, 0.030324735641479493, 0.030281984329223632, 0.034619422912597654, 0.03074662399291992, 0.030825279235839845, 0.030555904388427733, 0.030314399719238282, 0.03031235122680664, 0.030097375869750975]",tokens/s,32.591043424894075,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,14032.388096,7835.942912,0.0,7440.695296,7427.899392,s,1,31.7943828125,31.7943828125,0.0,31.7943828125,31.7943828125,31.7943828125,31.7943828125,[31.7943828125],,kWh,0.0007155273096708242,7.892078725048271e-05,0.00027156299502797965,0.0010660110919492867,,MB,1232.048128,8416.854016,0.0,8000.63488,7884.32384,s,10,1.18458154296875,0.11845815429687498,0.0005181299128065095,0.11830814361572266,0.11893774261474609,0.11936403961181641,0.11970507720947265,"[0.11794691467285157, 0.11884300994873047, 0.11811257934570313, 0.11855452728271484, 0.1179632339477539, 0.11837686157226562, 0.11820524597167968, 0.11854940795898437, 0.11979033660888672, 0.11823942565917969]",tokens/s,2161.1006985506738,kWh,3.5326098436747686e-06,3.8957991699163003e-07,2.3319262296625744e-06,6.254115990328973e-06,tokens/kWh,40933043.19840959,MB,1254.621184,8458.797056,0.0,8042.57792,7975.158272,s,10,51.00871875,5.100871875,0.011844859828030999,5.098849853515625,5.114230322265625,5.118435229492188,5.121799155273437,"[5.08038671875, 5.0956025390625, 5.11215966796875, 5.12264013671875, 5.0955703125, 5.1132958984375, 5.09888427734375, 5.0988154296875, 5.103091796875, 5.08827197265625]",tokens/s,12.350829729476395,kWh,0.00014881954907215942,1.6415289741388052e-05,8.737122886613683e-05,0.0002526060676796843,tokens/kWh,249400.1849547288,,s,630,51.005125488281166,0.08096051664806547,0.0009011453758256804,0.08084107208251953,0.08152753982543945,0.08219734191894532,0.08396592407226564,"[0.08004889678955078, 0.07973391723632813, 0.07983353424072266, 0.0806155548095703, 0.08050505828857422, 0.08078720092773438, 0.07982838439941406, 0.07984630584716797, 0.07974092864990234, 0.08012009429931641, 0.07998220825195312, 0.08041891479492187, 0.07992320251464843, 0.08207769775390625, 0.08002559661865234, 0.0801334686279297, 0.08059318542480469, 0.08030451202392579, 0.0802117462158203, 0.08243987274169921, 0.08063030242919922, 0.08403353881835937, 0.08063385772705078, 0.08059490966796876, 0.08050211334228516, 0.0803081283569336, 0.08033139038085937, 0.08036777496337891, 0.08077926635742187, 0.08039833831787109, 0.07983103942871093, 0.08004402923583985, 0.08027942657470703, 0.08069132995605469, 0.08065599822998047, 0.08053388977050781, 0.0803082275390625, 0.08032665252685547, 0.07985151672363282, 0.07983881378173828, 0.08025334167480469, 0.08040447998046875, 0.08015257263183594, 0.0828006362915039, 0.08027954864501953, 0.08037785339355469, 0.0799109115600586, 0.0802806396484375, 0.08057337951660157, 0.080442626953125, 0.08004841613769531, 0.07978160095214844, 0.08043292999267578, 0.08143711853027344, 0.08266336059570313, 0.08175062561035157, 0.08216140747070312, 0.08129881286621093, 0.08122252655029297, 0.0813465576171875, 0.08169881439208984, 0.08120223999023438, 0.08140032196044922, 0.08118975830078125, 0.08097689819335938, 0.08098303985595703, 0.08020582580566406, 0.08011366271972656, 0.07996211242675781, 0.07992249298095704, 0.08051757049560547, 0.08083884429931641, 0.080404541015625, 0.08034413146972656, 0.08112429046630859, 0.0808980484008789, 0.08125440216064453, 0.08090160369873046, 0.08059324645996094, 0.08050911712646484, 0.0808095703125, 0.08083293151855468, 0.08079277038574219, 0.08090707397460938, 0.0809144287109375, 0.08101888275146485, 0.08074649810791015, 0.08096514892578124, 0.08108489227294922, 0.08265510559082032, 0.08139170837402344, 0.08125440216064453, 0.0809552993774414, 0.08106610870361328, 0.08107212829589844, 0.08144281768798828, 0.08083660888671874, 0.08111270141601562, 0.08083084869384766, 0.08103321838378906, 0.08073011016845703, 0.08105165100097657, 0.08126054382324219, 0.08063085174560547, 0.08065449523925781, 0.08044035339355468, 0.08105753326416015, 0.0806903076171875, 0.08100953674316407, 0.08043692779541016, 0.08067833709716797, 0.079967041015625, 0.08032249450683594, 0.08075071716308593, 0.08089177703857423, 0.08078054046630859, 0.0804668197631836, 0.08044915008544921, 0.08434633636474609, 0.08055490875244141, 0.08090764617919922, 0.08079341125488282, 0.08096441650390625, 0.08107612609863281, 0.08100179290771484, 0.08087334442138672, 0.08291065979003906, 0.08087238311767578, 0.08107955169677734, 0.08168319702148437, 0.08113152313232422, 0.0807383041381836, 0.08071766662597656, 0.08222281646728516, 0.08216620635986328, 0.08061542510986328, 0.08112127685546874, 0.08092025756835937, 0.08056588745117188, 0.08085574340820313, 0.08094310760498047, 0.08212480163574219, 0.08206130981445313, 0.08191382598876953, 0.08083245086669921, 0.08097187042236328, 0.0809814682006836, 0.08033062744140625, 0.08018806457519531, 0.08141209411621093, 0.08112332916259765, 0.08071782684326172, 0.08326143646240235, 0.08258560180664062, 0.08129945373535157, 0.08103862762451172, 0.08113839721679687, 0.08151859283447266, 0.08114380645751954, 0.08117453002929688, 0.08117862701416016, 0.08144838714599609, 0.08123859405517578, 0.08148172760009766, 0.0809349136352539, 0.08097567749023438, 0.08053369903564453, 0.0805516128540039, 0.08087551879882812, 0.08033721923828124, 0.08062889862060547, 0.0804497299194336, 0.08049839782714843, 0.08056845092773438, 0.08132691192626954, 0.08110079956054687, 0.08049404907226562, 0.0802043228149414, 0.08057619476318359, 0.08092626953125, 0.08078550720214844, 0.08090636444091796, 0.08090064239501953, 0.08057036590576172, 0.08291123199462891, 0.08111011505126953, 0.08103932952880859, 0.08149292755126954, 0.08139366149902344, 0.08096649932861329, 0.08095958709716797, 0.08088889312744141, 0.08139228820800781, 0.08089826965332031, 0.0810558090209961, 0.08134041595458984, 0.08101478576660157, 0.08101251220703125, 0.08053533172607422, 0.08062137603759766, 0.0915882568359375, 0.08059161376953125, 0.08042015838623047, 0.08066294097900391, 0.08052377319335938, 0.08016281890869141, 0.08044748687744141, 0.08073126220703125, 0.08040035247802735, 0.08440064239501953, 0.09083535766601562, 0.08089766693115234, 0.08106009674072266, 0.0811357421875, 0.08083455657958985, 0.08096934509277344, 0.08074483489990235, 0.08132403564453125, 0.08083622741699219, 0.08070909118652343, 0.08089488220214844, 0.08169801330566406, 0.0811743392944336, 0.08082736206054687, 0.08098925018310547, 0.08123388671875, 0.08099734497070313, 0.0809202880859375, 0.08095772552490234, 0.08094924926757813, 0.08061542510986328, 0.08086937713623046, 0.08088780975341797, 0.08078540802001953, 0.08082637023925782, 0.08070953369140625, 0.08069308471679687, 0.08096953582763672, 0.08057401275634765, 0.08450956726074219, 0.08097586822509766, 0.08085913848876954, 0.0804290542602539, 0.08049868774414062, 0.08143030548095703, 0.0804085464477539, 0.08128492736816406, 0.08056368255615234, 0.08176710510253907, 0.0807503662109375, 0.08054220581054687, 0.08071340942382813, 0.08091126251220702, 0.08127251434326171, 0.08380038452148438, 0.08160665893554687, 0.08105779266357421, 0.08145101165771484, 0.0815308837890625, 0.0810618896484375, 0.0811473617553711, 0.08192607879638672, 0.08238345336914063, 0.08071903991699218, 0.08081081390380859, 0.08052249908447266, 0.0805445098876953, 0.08058060455322266, 0.08064595031738281, 0.08101074981689453, 0.08066780853271484, 0.08064665222167969, 0.08113404846191406, 0.08073654174804687, 0.08034480285644531, 0.08060022735595704, 0.08074050903320312, 0.08024281311035156, 0.08073990631103516, 0.08038706970214844, 0.08062566375732422, 0.08103116607666015, 0.08034265899658204, 0.08129074859619141, 0.08023948669433593, 0.0802529296875, 0.08023859405517578, 0.08051039886474609, 0.08079212951660156, 0.08089190673828126, 0.08092784118652344, 0.08074742126464844, 0.08110284423828125, 0.08100454711914062, 0.08120320129394532, 0.08103282928466797, 0.08135664367675781, 0.08090678405761718, 0.0802911376953125, 0.08042697906494141, 0.08101961517333985, 0.08063999938964844, 0.08032978820800782, 0.0802948455810547, 0.08022179412841797, 0.0836710433959961, 0.0809005126953125, 0.0814571533203125, 0.08068096160888671, 0.08060313415527344, 0.08034674835205079, 0.07997382354736328, 0.08023545837402343, 0.08017715454101562, 0.08024172973632812, 0.08123939514160156, 0.08133904266357422, 0.08138098907470703, 0.08116591644287109, 0.08124425506591797, 0.08126329803466797, 0.08127078247070313, 0.08150947570800782, 0.08163581085205078, 0.08141458892822266, 0.08143257904052735, 0.0809512939453125, 0.08140799713134765, 0.08148915100097656, 0.08261068725585938, 0.08113123321533203, 0.08202909088134766, 0.08114304351806641, 0.08084146881103516, 0.08121139526367188, 0.08123299407958984, 0.0810912322998047, 0.08125443267822266, 0.08090032196044922, 0.08061542510986328, 0.0805580825805664, 0.08080793762207031, 0.0808427505493164, 0.0807995834350586, 0.08105299377441406, 0.08079216003417969, 0.08053289794921875, 0.08067120361328126, 0.08055846405029297, 0.08075263977050781, 0.08085094451904297, 0.08082841491699219, 0.0811878433227539, 0.08141497802734375, 0.08142457580566406, 0.08128717041015625, 0.08237789154052734, 0.08122454071044923, 0.08117862701416016, 0.08277094268798828, 0.08162723541259766, 0.08117670440673828, 0.08117081451416015, 0.08144528198242187, 0.08143462371826173, 0.08103846740722656, 0.0808600311279297, 0.08057357025146485, 0.08130239868164063, 0.0809144287109375, 0.08043094635009766, 0.08051875305175782, 0.08091907501220703, 0.08052329254150391, 0.08084067535400391, 0.08097180938720704, 0.08166925048828125, 0.08079814147949219, 0.08140204620361328, 0.08101888275146485, 0.08317478179931641, 0.08085363006591798, 0.08183586883544922, 0.08112115478515625, 0.08056861114501954, 0.08135065460205078, 0.08077721405029296, 0.08056832122802735, 0.08079564666748047, 0.08159228515625, 0.0813199691772461, 0.08103321838378906, 0.08123168182373047, 0.08070982360839844, 0.08101683044433594, 0.08100563049316406, 0.08061023712158204, 0.0810322265625, 0.08115094757080078, 0.08021593475341797, 0.08009740447998047, 0.08063795471191407, 0.08045299530029297, 0.08105391693115234, 0.08084931182861328, 0.08064720153808594, 0.08066950225830079, 0.0803430404663086, 0.08022566223144531, 0.08052531433105468, 0.08069404602050781, 0.08019967651367188, 0.08020301055908204, 0.08060176086425781, 0.08104934692382812, 0.08076528167724609, 0.08087142181396484, 0.08062566375732422, 0.08054579162597657, 0.08055712127685546, 0.08075154876708984, 0.08087075042724609, 0.08075325012207031, 0.08074237060546875, 0.08084489440917969, 0.0826235809326172, 0.08175913238525391, 0.08191795349121093, 0.08123916625976563, 0.08141820526123048, 0.08096246337890625, 0.08109260559082031, 0.08105165100097657, 0.08069734191894531, 0.08076083374023438, 0.08137865447998047, 0.08043170928955078, 0.08011504364013672, 0.08052310180664063, 0.08127334594726562, 0.08130802917480469, 0.08099430084228515, 0.08097996520996094, 0.08072767639160157, 0.08068134307861329, 0.08069660949707032, 0.08046870422363281, 0.08032169342041015, 0.08022911834716796, 0.08085453033447265, 0.08131439971923828, 0.08150761413574219, 0.0812033920288086, 0.08137372589111327, 0.08083455657958985, 0.08109017944335938, 0.08152716827392578, 0.08161228942871093, 0.08114636993408203, 0.08093875122070313, 0.08092697906494141, 0.08156732940673828, 0.08150640106201172, 0.08116665649414062, 0.08132198333740234, 0.08115724945068359, 0.08122000122070312, 0.08005248260498046, 0.0801487045288086, 0.07994080352783203, 0.08049657440185547, 0.08038079833984375, 0.08004605102539063, 0.07992323303222656, 0.08062710571289063, 0.08065673828125, 0.08066483306884766, 0.08076668548583985, 0.08093110656738281, 0.08085081481933594, 0.08140940856933594, 0.08228457641601562, 0.080650146484375, 0.08051181030273437, 0.08005836486816406, 0.07957478332519531, 0.07974118041992187, 0.08030352020263672, 0.08082006072998046, 0.08195353698730469, 0.08097100830078124, 0.08158428955078124, 0.08113008117675781, 0.08093081665039062, 0.080932861328125, 0.0810414047241211, 0.08338822174072266, 0.08137075042724609, 0.08134217834472657, 0.08150828552246094, 0.08105792236328126, 0.08148397064208984, 0.08108707427978516, 0.08048226928710937, 0.08066416168212891, 0.08056310272216796, 0.08062322998046875, 0.08073458862304687, 0.08014643096923828, 0.08061542510986328, 0.08065023803710937, 0.08083232116699218, 0.08081161499023437, 0.08032316589355469, 0.08068035125732421, 0.08053616333007813, 0.0807852783203125, 0.08068313598632812, 0.08058265686035156, 0.0810250244140625, 0.08065638732910156, 0.0803737564086914, 0.08051020812988281, 0.08195954895019532, 0.08096112060546876, 0.08099894714355468, 0.08116143798828125, 0.08078825378417968, 0.08065760040283203, 0.08083683013916015, 0.08108092498779297, 0.08096153259277344, 0.08125186920166015, 0.08105788421630859, 0.08095782470703125, 0.08235826873779296, 0.08088780975341797, 0.08094499206542968, 0.08076509094238281, 0.08044454193115234, 0.08045148468017578, 0.08071676635742188, 0.08059699249267578, 0.08047821044921875, 0.0805946273803711, 0.08053548431396484, 0.08214073944091797, 0.08063632202148438, 0.08095763397216797, 0.08097200012207031, 0.08092467498779297, 0.08097792053222656, 0.08060313415527344, 0.08065229034423828, 0.08093695831298828, 0.08077311706542968, 0.08065424346923829, 0.08088127899169922, 0.08126306915283203, 0.08127078247070313, 0.08161603546142578, 0.08125116729736329, 0.08143993377685547, 0.08660809326171875, 0.08251433563232421, 0.0809959716796875, 0.08138604736328126, 0.08077455902099609, 0.08055414581298828, 0.08166143798828125, 0.081355712890625, 0.08076070404052735, 0.08063807678222656, 0.08092655944824219, 0.08052751922607422, 0.08066028594970703, 0.0798082275390625, 0.08052374267578125, 0.08027545928955078, 0.08044892883300782, 0.08014851379394532, 0.08025145721435546, 0.08020582580566406, 0.080648193359375, 0.08056832122802735, 0.08072176361083984, 0.08075443267822266, 0.0803117446899414, 0.08000800323486328, 0.07980867004394532, 0.0807442855834961, 0.0808951644897461, 0.08038658905029297, 0.08074285125732422, 0.08074960327148438, 0.08101577758789062, 0.08071887969970704, 0.08105878448486328, 0.08091648101806641, 0.08107606506347656, 0.08022370910644532, 0.08024467468261719, 0.08276863861083984, 0.08038768005371094, 0.07992361450195312, 0.07995954895019532, 0.08014899444580079, 0.08000921630859376, 0.08063180541992188, 0.08131734466552734, 0.08291712188720703, 0.08053225708007812, 0.08039628601074218, 0.0802930908203125, 0.08041142272949219, 0.08047615814208985, 0.0805580825805664, 0.0825384979248047, 0.08075414276123047, 0.08031215667724609, 0.08017375946044922, 0.08142642974853516, 0.08091033935546875, 0.08060022735595704, 0.08081455993652344, 0.08329663848876953, 0.08177436828613281, 0.08138755035400391, 0.0812171859741211, 0.08086102294921875]",tokens/s,12.351699833475491,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,,MB,26175.434752,13916.5696,0.0,13618.905088,13613.80352,s,1,53.75973828125,53.75973828125,0.0,53.75973828125,53.75973828125,53.75973828125,53.75973828125,[53.75973828125],,kWh,0.001350468553087482,0.00014895979873512868,0.00045603730927400166,0.0019554656610966123,,MB,1265.41824,14132.576256,0.0,13709.082624,13670.340608,s,10,1.6482692108154293,0.16482692108154298,0.0004572045780802144,0.16471437072753906,0.16551067962646482,0.16555860061645508,0.16559693740844728,"[0.16513999938964843, 0.1644509735107422, 0.16442025756835937, 0.16437164306640625, 0.1643513641357422, 0.1649752960205078, 0.1644534454345703, 0.16499967956542969, 0.1655000305175781, 0.16560652160644532]",tokens/s,1553.1443426850883,kWh,4.895102422292312e-06,5.398366930039592e-07,3.256368345833325e-06,8.691307461129597e-06,tokens/kWh,29454716.812736947,MB,1298.055168,14300.348416,0.0,13876.854784,13822.915584,s,10,74.646583984375,7.4646583984375,0.011918092030375596,7.462617187499999,7.483004833984374,7.483400122070313,7.483716352539062,"[7.4829169921875, 7.453759765625, 7.48379541015625, 7.451357421875, 7.450875, 7.46136279296875, 7.47392138671875, 7.46387158203125, 7.4543837890625, 7.47033984375]",tokens/s,8.439769998475368,kWh,0.00021795574103479124,2.4041462252440785e-05,0.00012498717869336543,0.00036698438198059743,tokens/kWh,171669.4308896525,,s,630,74.64246426391607,0.11848010200621589,0.0010406838777948808,0.11821535873413086,0.11935227737426758,0.12048230094909668,0.1222396103668213,"[0.11894992065429688, 0.11880242919921875, 0.11831603240966797, 0.11909519958496094, 0.1185538558959961, 0.11876633453369141, 0.11935059356689454, 0.1203944320678711, 0.1191731185913086, 0.11887165069580079, 0.1181126708984375, 0.11866239929199218, 0.12888255310058594, 0.11930303955078125, 0.11897392272949218, 0.12146482849121094, 0.11815376281738281, 0.11879833221435547, 0.11826995086669922, 0.11890688323974609, 0.12061634826660156, 0.11922492980957031, 0.11770470428466796, 0.11799251556396484, 0.11781830596923829, 0.11815315246582031, 0.11820243072509766, 0.11806934356689452, 0.1188289566040039, 0.11818335723876953, 0.11908255767822265, 0.12140611267089843, 0.11878435516357422, 0.11813273620605469, 0.11822281646728515, 0.11796521759033203, 0.11802323150634765, 0.11819657897949219, 0.11815548706054688, 0.11852620697021485, 0.11836390686035156, 0.1182043228149414, 0.11821453094482422, 0.11844630432128907, 0.1179516830444336, 0.11843257904052734, 0.11865497589111328, 0.118168701171875, 0.11888265228271484, 0.11791020965576172, 0.11773321533203125, 0.11867164611816407, 0.11845549011230469, 0.12104348754882813, 0.1179090576171875, 0.11811590576171875, 0.117836669921875, 0.11796803283691407, 0.1183875503540039, 0.11808156585693359, 0.11777645111083984, 0.11795263671875, 0.11828406524658203, 0.11824368286132812, 0.12039523315429687, 0.11791340637207032, 0.12306275177001953, 0.11822077178955079, 0.11788038635253906, 0.11828883361816406, 0.11802559661865235, 0.11841190338134766, 0.11789081573486328, 0.11803193664550782, 0.11777827453613281, 0.1185882568359375, 0.11770169830322266, 0.11783216094970703, 0.12079561614990235, 0.11800505828857422, 0.11770716857910156, 0.11852188873291016, 0.12133548736572265, 0.11748748779296875, 0.11761529541015625, 0.11741734313964844, 0.11832179260253907, 0.11788486480712891, 0.11790505981445312, 0.1189883804321289, 0.11932755279541016, 0.11791974639892579, 0.11770777893066406, 0.11769068908691406, 0.11773321533203125, 0.12221116638183593, 0.1180917739868164, 0.11809308624267578, 0.11957833862304687, 0.11808870697021484, 0.11791769409179688, 0.1176962890625, 0.11752432250976562, 0.11800796508789063, 0.11785804748535156, 0.11810269165039063, 0.11752012634277344, 0.1177127685546875, 0.11767212677001954, 0.11777865600585938, 0.11755289459228516, 0.11778256225585937, 0.11764940643310547, 0.11797708892822266, 0.12105846405029297, 0.11808956909179688, 0.11738214111328125, 0.11778047943115234, 0.11755315399169922, 0.1178419189453125, 0.11804083251953125, 0.1182099838256836, 0.118169921875, 0.1182064666748047, 0.11788470458984375, 0.1176764144897461, 0.118340576171875, 0.11758796691894531, 0.11781529235839844, 0.11803142547607422, 0.11919993591308593, 0.11835222625732422, 0.11827452850341796, 0.11878559875488282, 0.11853043365478516, 0.1184616928100586, 0.11839360046386718, 0.1190374755859375, 0.11892784118652344, 0.11906764984130859, 0.11876454162597656, 0.11826790618896485, 0.11791529846191406, 0.11865491485595703, 0.11857548522949218, 0.11877785491943359, 0.11979154968261718, 0.12103660583496094, 0.11838902282714844, 0.12049958038330078, 0.11908159637451173, 0.11853353881835937, 0.11881334686279296, 0.11842678070068359, 0.1190052490234375, 0.11865734100341797, 0.11898716735839844, 0.11816105651855469, 0.11807791900634766, 0.11782701110839844, 0.11766214752197265, 0.12099954986572266, 0.1187713623046875, 0.11853199768066407, 0.11935417938232422, 0.1181286392211914, 0.11823251342773437, 0.11806915283203125, 0.1181657943725586, 0.1181464614868164, 0.117834716796875, 0.11858284759521484, 0.1205110092163086, 0.1200014419555664, 0.11830751800537109, 0.11849574279785156, 0.11811583709716797, 0.11819817352294922, 0.12981919860839844, 0.11823292541503906, 0.11925638580322266, 0.11916767883300781, 0.11776000213623047, 0.11835334777832031, 0.11759446716308594, 0.11802976226806641, 0.11824617767333985, 0.1177702407836914, 0.11800307464599609, 0.11819971466064454, 0.11807401275634766, 0.11752857971191406, 0.11776976013183593, 0.11783837127685547, 0.11762681579589844, 0.12089523315429687, 0.11950105285644531, 0.11837238311767578, 0.11809174346923829, 0.1178359375, 0.11784111785888672, 0.11794627380371094, 0.11838742065429687, 0.11776409912109374, 0.11835353851318359, 0.11805017852783203, 0.12277247619628906, 0.1181075210571289, 0.11836479949951172, 0.11772844696044922, 0.11785295867919922, 0.11799868774414063, 0.11813983917236329, 0.11871459197998047, 0.11792771148681641, 0.11805286407470703, 0.11734604644775391, 0.11778924560546875, 0.11751340484619141, 0.11777101135253906, 0.11787366485595703, 0.11786310577392578, 0.11823929595947266, 0.11855990600585938, 0.11797577667236328, 0.11766387176513672, 0.1179525146484375, 0.11754496002197265, 0.11792588806152343, 0.11822099304199218, 0.11830048370361328, 0.11774697875976563, 0.12108258819580078, 0.11780095672607421, 0.1179135971069336, 0.11805091094970703, 0.11754723358154297, 0.11806278228759766, 0.117501953125, 0.118642333984375, 0.11800412750244141, 0.11857299041748047, 0.11775917053222656, 0.118074462890625, 0.12225917053222657, 0.1181995849609375, 0.11809446716308594, 0.1194427490234375, 0.11795142364501954, 0.11806937408447266, 0.11783344268798829, 0.11804646301269531, 0.11831302642822265, 0.11852003479003906, 0.11871788787841797, 0.12024447631835937, 0.11823702239990234, 0.11794003295898438, 0.11774626922607422, 0.11775267028808593, 0.11964297485351562, 0.1183826904296875, 0.11787471771240235, 0.11802416229248047, 0.11837849426269531, 0.11915058898925782, 0.11774928283691406, 0.11797164916992188, 0.1179993896484375, 0.11831705474853516, 0.1181286392211914, 0.11820435333251954, 0.1181072998046875, 0.11807593536376954, 0.11807373046875, 0.11799088287353515, 0.11799945831298828, 0.11810441589355469, 0.11950319671630859, 0.11804441833496093, 0.1182149429321289, 0.11800982666015625, 0.1180057601928711, 0.11802114868164063, 0.11809174346923829, 0.11788595581054688, 0.11831910705566406, 0.11816754913330078, 0.11877283477783203, 0.11828931427001953, 0.11793030548095704, 0.11804422760009765, 0.11808985900878906, 0.11803250885009765, 0.11824752044677735, 0.11826563262939453, 0.11868569946289062, 0.11952259063720704, 0.11807193756103515, 0.11795811462402343, 0.11826480102539062, 0.11794806671142578, 0.1183846435546875, 0.11815074920654296, 0.11806556701660156, 0.11834162902832031, 0.11823023986816407, 0.1178590087890625, 0.1175547866821289, 0.11909375762939453, 0.11772108459472656, 0.11826710510253906, 0.11792054748535157, 0.11843583679199218, 0.11842086029052734, 0.11855181121826172, 0.11889926147460937, 0.11772911834716797, 0.11823958587646484, 0.11805907440185547, 0.11770162963867188, 0.11844525146484375, 0.11788057708740235, 0.11788082885742188, 0.11773462677001953, 0.11768879699707031, 0.120461181640625, 0.1181572494506836, 0.11827865600585938, 0.11821212768554687, 0.12142607879638671, 0.11797856140136719, 0.11826265716552735, 0.11821577453613281, 0.11805174255371094, 0.11777347564697266, 0.11848137664794922, 0.11809120178222657, 0.1198990707397461, 0.1183006362915039, 0.11818966674804687, 0.11793788909912109, 0.11771568298339843, 0.11872982025146485, 0.1195160675048828, 0.11867318725585937, 0.11784623718261719, 0.1185709457397461, 0.11831871795654297, 0.11838886260986328, 0.11833171081542969, 0.1190868148803711, 0.1185426254272461, 0.1181122589111328, 0.11826175689697266, 0.11821469116210938, 0.11928342437744141, 0.11835533142089844, 0.11848918151855468, 0.1183424301147461, 0.118748291015625, 0.11988028717041016, 0.12009843444824218, 0.11864335632324219, 0.11939183807373047, 0.1182027816772461, 0.11789011383056641, 0.11801692962646484, 0.11788313293457031, 0.11847481536865234, 0.11821756744384766, 0.11827833557128906, 0.11818495941162109, 0.11763619232177734, 0.11763744354248047, 0.11783824157714844, 0.11853196716308594, 0.11808982086181641, 0.11965267181396484, 0.11849568176269532, 0.1177047348022461, 0.11764940643310547, 0.11801634979248046, 0.11782518768310547, 0.11796275329589843, 0.11832927703857422, 0.11779714965820312, 0.11856626892089844, 0.11808735656738281, 0.11815805053710937, 0.11763302612304688, 0.11864883422851563, 0.11787251281738281, 0.11812876892089844, 0.11847248077392578, 0.11877193450927734, 0.11825151824951172, 0.12026383972167969, 0.11806934356689452, 0.11844214630126954, 0.11854454040527344, 0.11878031921386718, 0.11890914916992187, 0.11863021087646484, 0.1188232650756836, 0.11859728240966796, 0.11915878295898437, 0.1180549087524414, 0.1180355224609375, 0.12246966552734374, 0.11853788757324218, 0.11910307312011718, 0.11937776184082032, 0.11834969329833984, 0.11877190399169922, 0.11892153930664062, 0.1184276123046875, 0.11880448150634766, 0.11843606567382813, 0.11844812774658203, 0.11894374084472656, 0.11992793273925781, 0.1183787841796875, 0.11846502685546875, 0.11872879791259766, 0.11828134155273437, 0.11821965026855469, 0.12153036499023437, 0.11874095916748047, 0.11837033843994141, 0.11827200317382812, 0.11799961853027344, 0.11794226837158203, 0.11814409637451172, 0.1201011505126953, 0.11892352294921875, 0.11827033233642578, 0.1183109130859375, 0.11909043121337891, 0.11841551971435547, 0.11843164825439453, 0.11859967803955078, 0.11792534637451171, 0.12183353424072266, 0.11859196472167968, 0.11843788909912109, 0.11785788726806641, 0.11794678497314454, 0.11806511688232423, 0.1179843521118164, 0.11789590454101563, 0.11806537628173829, 0.11783372497558593, 0.11838054656982422, 0.12225122833251953, 0.11798735809326172, 0.1181488037109375, 0.11830726623535157, 0.11854630279541016, 0.118301025390625, 0.1183556137084961, 0.11891667175292969, 0.1183271713256836, 0.1179832992553711, 0.11867801666259765, 0.11827609252929687, 0.11816706848144531, 0.11807491302490235, 0.11782035064697266, 0.11779686737060546, 0.11777843475341797, 0.11862016296386718, 0.11824928283691406, 0.11778665924072265, 0.11807555389404296, 0.11821049499511718, 0.11809593963623047, 0.11816255950927734, 0.1187910385131836, 0.11792998504638671, 0.12145049285888672, 0.11803145599365235, 0.11866000366210938, 0.11774361419677734, 0.11814297485351563, 0.11810972595214844, 0.11811682891845703, 0.11862220764160156, 0.11833135986328125, 0.1187790756225586, 0.11796521759033203, 0.11839142608642578, 0.12147602844238281, 0.11836914825439453, 0.11971699523925781, 0.11846739196777344, 0.11865682983398437, 0.11845865631103515, 0.1181593246459961, 0.11774172973632813, 0.11791295623779297, 0.11878652954101562, 0.11806716918945312, 0.11825769805908203, 0.11836006164550782, 0.11833753967285156, 0.11745391845703125, 0.11775478363037109, 0.11791078186035156, 0.11956243133544922, 0.11876000213623047, 0.11784178924560547, 0.1173402557373047, 0.11844918060302734, 0.11759539031982422, 0.11771673583984375, 0.11953116607666016, 0.11828435516357422, 0.11765760040283203, 0.11851805114746093, 0.1185315170288086, 0.11774352264404297, 0.11859184265136719, 0.11811052703857422, 0.11807743835449219, 0.11811430358886718, 0.12107981109619141, 0.1190749740600586, 0.1181361312866211, 0.1180752944946289, 0.11819667053222656, 0.11791702270507813, 0.11815203094482422, 0.11777433776855468, 0.11844963073730469, 0.11806998443603516, 0.11828614044189453, 0.11855574035644531, 0.1183446044921875, 0.11859056091308594, 0.11847772979736328, 0.1185116195678711, 0.11806515502929688, 0.11815676879882812, 0.1194134750366211, 0.1179268798828125, 0.11808035278320313, 0.11812393951416016, 0.11820204925537109, 0.11828246307373047, 0.11790771484375, 0.11794252777099609, 0.11800192260742187, 0.11784918212890624, 0.11813980865478516, 0.11788076782226563, 0.11817353820800781, 0.11802342224121094, 0.11829545593261719, 0.11796803283691407, 0.11813565063476562, 0.11944780731201173, 0.11795635223388672, 0.1202503662109375, 0.1185255355834961, 0.11874124908447266, 0.11855081939697265, 0.11846086120605469, 0.11833344268798827, 0.11852390289306641, 0.11938195037841796, 0.11845600128173828, 0.11819468688964843, 0.12093571472167969, 0.11863100433349609, 0.11824537658691406, 0.11897148895263672, 0.11933491516113282, 0.12059535980224609, 0.11839405059814453, 0.11916886138916015, 0.11817878723144531, 0.11854438018798828, 0.11832032012939453, 0.11779360198974609, 0.12055145263671875, 0.11827152252197265, 0.11773158264160156, 0.11803257751464843, 0.118614013671875, 0.11858697509765626, 0.11817817687988282, 0.12139318084716796, 0.11914832305908203, 0.11807759857177734, 0.11843545532226563, 0.11822284698486328, 0.11842364501953125, 0.11860733032226563, 0.11838349151611328, 0.11786022186279296, 0.1182779541015625, 0.11777037048339843, 0.11801734161376953, 0.11808787536621093, 0.11789791870117187, 0.11882086181640625, 0.1187099838256836, 0.11859075164794922, 0.12084326171875, 0.12012483215332032, 0.11766758728027343, 0.1179615707397461, 0.11822284698486328, 0.11808563232421875, 0.11846015930175781, 0.1181551055908203, 0.11830867004394531, 0.11788758087158203, 0.11800371551513672, 0.11805286407470703, 0.11807321929931641, 0.1179455337524414, 0.11935206604003906, 0.11810425567626953, 0.11852390289306641, 0.1180313949584961, 0.11853884887695312, 0.11821094512939453, 0.11820236968994141]",tokens/s,8.440235812318395,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,,MB,13813.858304,7486.701568,0.0,7084.179456,7080.583168,s,1,31.31094921875,31.31094921875,0.0,31.31094921875,31.31094921875,31.31094921875,31.31094921875,[31.31094921875],,kWh,0.0007008937193125121,7.730539442620638e-05,0.00023631130015998314,0.0010145104138987015,,MB,5132.20608,7591.559168,0.0,7168.065536,7128.086528,s,10,1.0703444137573244,0.10703444137573244,0.0007815358506602269,0.1068664779663086,0.10729854965209962,0.10830807304382324,0.10911569175720215,"[0.10684210968017578, 0.10707421112060547, 0.10931759643554688, 0.10689459228515626, 0.10694681549072266, 0.10689084625244141, 0.1064840316772461, 0.10670336151123047, 0.10666864013671876, 0.10652220916748047]",tokens/s,2391.7535020465107,kWh,3.1799630399909213e-06,3.5060950209038893e-07,2.1192800770651066e-06,5.649852619146417e-06,tokens/kWh,45310916.453370534,MB,5132.20608,7696.416768,0.0,7272.923136,7223.91552,s,10,60.7752216796875,6.07752216796875,0.01148666381905837,6.075971923828125,6.092815283203125,6.093394702148438,6.093858237304688,"[6.0926865234375, 6.0777548828125, 6.0725390625, 6.09397412109375, 6.090630859375, 6.078267578125, 6.07418896484375, 6.07404931640625, 6.06397705078125, 6.0571533203125]",tokens/s,10.366066673032979,kWh,0.00017780353387917294,1.961251291824538e-05,8.687404111753607e-05,0.0002842900879149544,tokens/kWh,221604.63089675677,,s,630,60.77161200714114,0.09646287620181129,0.0008284777539721683,0.09628259277343751,0.09721044464111328,0.09803347434997559,0.09979694297790528,"[0.09670655822753907, 0.0970035171508789, 0.09600316619873046, 0.09651216125488281, 0.0958461456298828, 0.09595516967773438, 0.09526761627197265, 0.09662403106689453, 0.09606409454345703, 0.0966451187133789, 0.09668915557861328, 0.09726976013183594, 0.09637094116210937, 0.09622179412841797, 0.09637494659423829, 0.0965183334350586, 0.09620256042480468, 0.09586003112792969, 0.0966932144165039, 0.09651171112060547, 0.09675711822509765, 0.09832621002197266, 0.09673753356933594, 0.09640825653076172, 0.09869503784179688, 0.09655718231201171, 0.0964078369140625, 0.09580073547363281, 0.09636895751953126, 0.09688473510742188, 0.09717964935302735, 0.09575772857666015, 0.09724905395507813, 0.09582284545898437, 0.09626319885253906, 0.097546142578125, 0.09725836944580078, 0.09814665222167969, 0.09726732635498046, 0.09775830078125, 0.09671775817871094, 0.09604434967041016, 0.09841222381591797, 0.09655935668945312, 0.10067635345458985, 0.09627420806884765, 0.09726179504394532, 0.09662470245361328, 0.09618425750732422, 0.09684992218017578, 0.09681283569335937, 0.0962276153564453, 0.09843833923339844, 0.09676390075683594, 0.09614739227294922, 0.09581673431396484, 0.09616361236572266, 0.09617120361328126, 0.09631161499023437, 0.09610835266113281, 0.09662252807617187, 0.0961502685546875, 0.09644809722900391, 0.0968306884765625, 0.0966233901977539, 0.09582790374755859, 0.09597344207763672, 0.09607373046875, 0.09647926330566406, 0.09660002899169921, 0.0963031005859375, 0.09569071960449219, 0.0968638687133789, 0.09658719635009766, 0.09678128051757813, 0.09624095916748047, 0.09635910034179687, 0.0962448959350586, 0.09665827178955078, 0.0968458251953125, 0.09755238342285157, 0.09874153900146485, 0.09805078125, 0.09730662536621094, 0.09654476928710938, 0.09595830535888672, 0.0963427505493164, 0.09636188507080078, 0.09661910247802734, 0.09644825744628906, 0.0966556167602539, 0.09601023864746094, 0.09628189086914063, 0.09693869018554688, 0.0967577896118164, 0.09617132568359375, 0.09623452758789063, 0.09604208374023437, 0.09645891571044922, 0.09621731567382813, 0.09628463745117187, 0.09648332977294923, 0.09604323577880859, 0.09683177947998046, 0.09715071868896484, 0.09597325134277344, 0.0959079360961914, 0.09623078155517578, 0.09617062377929687, 0.09583443450927734, 0.09637251281738281, 0.09587513732910156, 0.09625558471679688, 0.09646294403076172, 0.097046142578125, 0.096633056640625, 0.09584051513671875, 0.09610393524169922, 0.0960292510986328, 0.09617222595214844, 0.09599539184570313, 0.09709552001953126, 0.0961428451538086, 0.0955847396850586, 0.09725762939453125, 0.09693328094482422, 0.09642556762695312, 0.09600656127929688, 0.09585203552246094, 0.09608761596679688, 0.09594947052001954, 0.09665769958496094, 0.09642707061767578, 0.09581049346923828, 0.09631324768066406, 0.09614742279052735, 0.0954201889038086, 0.09573334503173828, 0.09907500457763672, 0.09597523498535156, 0.09877654266357422, 0.09660435485839844, 0.09650646209716797, 0.09707904052734374, 0.09597337341308594, 0.09679462432861329, 0.09669798278808593, 0.09626457977294922, 0.0963420181274414, 0.09585273742675782, 0.09629987335205079, 0.09644857788085938, 0.09677327728271484, 0.09593590545654297, 0.09769404602050781, 0.09657341003417969, 0.09683747100830079, 0.0962524185180664, 0.09608979034423829, 0.09589555358886719, 0.0959170913696289, 0.09596578979492187, 0.09607619476318359, 0.09594790649414063, 0.0965804443359375, 0.09690726470947265, 0.09771212768554688, 0.09672678375244141, 0.09626265716552734, 0.0960318374633789, 0.09596697235107422, 0.09570626831054688, 0.096133056640625, 0.09594633483886719, 0.09609366607666016, 0.09543100738525391, 0.09684390258789062, 0.09593791961669922, 0.09622121429443359, 0.09569481658935547, 0.09619107055664063, 0.09574960327148438, 0.09616172790527344, 0.09620162963867188, 0.09801232147216797, 0.09776009368896485, 0.09586278533935547, 0.09651100921630859, 0.09605628967285157, 0.09589513397216796, 0.09577244567871093, 0.09517494201660157, 0.09602454376220704, 0.09592031860351563, 0.09613097381591797, 0.0960000991821289, 0.09671475219726562, 0.10504598236083984, 0.09669551849365235, 0.0962895965576172, 0.09660620880126954, 0.09633990478515625, 0.09713654327392578, 0.09668009948730469, 0.09699430084228515, 0.09637967681884765, 0.09655728149414063, 0.09628787231445313, 0.09643673706054688, 0.09628710174560547, 0.09613926696777343, 0.09703218841552734, 0.09669363403320312, 0.09636265563964844, 0.09657801818847657, 0.09646412658691406, 0.09641053009033203, 0.09640118408203124, 0.0964948501586914, 0.09636646270751953, 0.0965355224609375, 0.09603887939453125, 0.09615270233154297, 0.09721692657470703, 0.09633020782470703, 0.0968807373046875, 0.09635971069335937, 0.09600592041015625, 0.09875132751464843, 0.09599590301513672, 0.09628444671630859, 0.09614486694335937, 0.09642880249023437, 0.0961095962524414, 0.09630818939208985, 0.09622525024414062, 0.09633757019042968, 0.09614784240722657, 0.09682134246826171, 0.09714678192138672, 0.09633586883544921, 0.10010944366455078, 0.09835337829589844, 0.09643484497070312, 0.09709792327880859, 0.09647494506835938, 0.09871564483642578, 0.09627648162841797, 0.09711980438232422, 0.09654332733154297, 0.09720972442626953, 0.09640998077392578, 0.09612687683105468, 0.09672630310058594, 0.0962874526977539, 0.09829373168945313, 0.09602156829833984, 0.09670345306396484, 0.09648102569580078, 0.09623168182373047, 0.09608601379394531, 0.09609248352050781, 0.09647830200195312, 0.09612550354003906, 0.09647516632080078, 0.09706086730957031, 0.09663488006591797, 0.09613948822021484, 0.0961923828125, 0.09631529235839843, 0.09609324645996094, 0.09623757171630859, 0.1015871353149414, 0.09653939056396485, 0.09701139068603516, 0.09611843109130859, 0.09936348724365235, 0.09624371337890625, 0.09627648162841797, 0.09692774200439454, 0.09652041625976562, 0.0964441909790039, 0.09730652618408203, 0.09679472351074218, 0.09682742309570312, 0.10071241760253906, 0.0968551025390625, 0.09636483001708984, 0.09632755279541015, 0.09737910461425782, 0.09600819396972657, 0.09649766540527344, 0.09595667266845703, 0.09669475555419922, 0.09626306915283203, 0.09641056060791016, 0.09618172454833984, 0.0962771224975586, 0.09609126281738281, 0.09614415740966797, 0.09623117065429687, 0.09621478271484375, 0.09606966400146484, 0.09651033782958984, 0.09666569519042968, 0.09653862762451172, 0.09647206115722656, 0.0967874526977539, 0.09638706970214844, 0.09647936248779297, 0.0963889923095703, 0.09598770904541015, 0.0959568328857422, 0.09810959625244141, 0.09654886627197265, 0.0963193588256836, 0.09621507263183594, 0.09566422271728516, 0.09611004638671874, 0.09613980865478515, 0.09636067199707031, 0.09599919891357422, 0.09579373168945313, 0.09595085144042968, 0.09645696258544922, 0.09610749053955078, 0.0978091812133789, 0.09622057342529297, 0.09620336151123046, 0.097261474609375, 0.09635552215576172, 0.095994140625, 0.09612351989746094, 0.09596109008789062, 0.09653584289550782, 0.09646358489990234, 0.09601638031005859, 0.095702880859375, 0.09582720184326173, 0.09589852905273437, 0.09592364501953125, 0.09608863830566407, 0.09595494079589843, 0.09952652740478515, 0.09819344329833984, 0.09640303802490234, 0.09664972686767578, 0.09709110260009765, 0.09639356994628906, 0.10065113830566406, 0.09664102172851563, 0.09662054443359375, 0.09627238464355468, 0.09594464111328126, 0.09641986846923828, 0.09583411407470703, 0.09662671661376954, 0.09794927978515625, 0.09636700439453125, 0.09580300903320313, 0.09610649871826171, 0.09616790771484375, 0.09650748443603516, 0.09583904266357422, 0.09605084991455078, 0.09675606536865235, 0.09668550109863282, 0.09657791900634766, 0.09749238586425782, 0.0961497573852539, 0.09641584014892578, 0.09630169677734375, 0.0964155502319336, 0.09651971435546874, 0.09659808349609375, 0.09621887969970704, 0.09625872039794922, 0.09600959777832031, 0.09624972534179688, 0.09724652862548828, 0.09701042938232422, 0.09625798034667969, 0.09641983795166016, 0.09635356903076171, 0.09629318237304688, 0.0962371826171875, 0.09990739440917969, 0.09768470764160156, 0.09647824096679687, 0.09628540802001953, 0.09597747039794922, 0.09608096313476562, 0.09633888244628906, 0.09628777313232421, 0.09581053161621093, 0.09616083526611328, 0.09654982757568359, 0.09611993408203125, 0.09613504028320312, 0.09616281890869141, 0.09613507080078125, 0.09607408142089843, 0.09576140594482421, 0.09627273559570312, 0.095912353515625, 0.09647618865966796, 0.09669840240478515, 0.09619961547851562, 0.09670620727539063, 0.09643452453613281, 0.09610070037841797, 0.09628329467773437, 0.09637891387939453, 0.09679151916503906, 0.09597350311279297, 0.09640092468261718, 0.09625839996337891, 0.09700147247314453, 0.09644646453857422, 0.09644544219970703, 0.09695040130615235, 0.09611315155029297, 0.09610409545898438, 0.09595894622802735, 0.09663983917236328, 0.09598358154296875, 0.09604710388183593, 0.09648896026611328, 0.09603533172607422, 0.09650498962402344, 0.09619747161865234, 0.09729631805419922, 0.09722898864746093, 0.09612684631347657, 0.09643583679199219, 0.09628614044189453, 0.0959938201904297, 0.09626854705810547, 0.09640150451660157, 0.09587094116210937, 0.09609410858154296, 0.09701663970947266, 0.09646640014648437, 0.09624172973632812, 0.09766105651855468, 0.09844684600830078, 0.09631177520751953, 0.09609171295166016, 0.09602124786376953, 0.09625110626220704, 0.09581388854980469, 0.09606204986572266, 0.09626403045654297, 0.09614761352539063, 0.09651721954345703, 0.09676892852783203, 0.09599529266357422, 0.09842694091796875, 0.09641375732421875, 0.09842495727539062, 0.09587977600097657, 0.09560553741455079, 0.09641468811035156, 0.09628070068359375, 0.09664466857910156, 0.0962872314453125, 0.09694707489013672, 0.0961478042602539, 0.09614118194580078, 0.09650045013427734, 0.0959283218383789, 0.09585286712646485, 0.09573081970214843, 0.09618595123291016, 0.09594976043701171, 0.09621689605712891, 0.09603257751464844, 0.09580780792236328, 0.09660431671142578, 0.09602655792236328, 0.09620015716552735, 0.09615615844726562, 0.0965059814453125, 0.09627203369140624, 0.09662694549560547, 0.09632720184326173, 0.09605372619628906, 0.095853759765625, 0.09609622192382812, 0.09825981140136719, 0.09700563049316406, 0.09725740814208984, 0.09607315063476562, 0.09627091217041016, 0.09631539154052735, 0.09586073303222656, 0.09591193389892579, 0.09633805084228515, 0.09587455749511718, 0.09611302185058594, 0.09602403259277344, 0.0956322250366211, 0.09672057342529297, 0.09740399932861328, 0.09699507141113281, 0.09572233581542969, 0.09525997161865235, 0.09610514831542968, 0.09606348419189453, 0.09564157104492188, 0.09653858947753906, 0.09609222412109375, 0.09570272064208984, 0.09584262084960937, 0.09567177581787109, 0.09606387329101562, 0.0959731216430664, 0.09660038757324219, 0.09588540649414062, 0.09629081726074219, 0.09594464111328126, 0.0957976303100586, 0.09563692474365235, 0.09583232116699218, 0.09552486419677735, 0.09601193237304688, 0.09615574645996093, 0.09654297637939453, 0.09575833892822265, 0.09714060974121094, 0.09674559783935546, 0.09665945434570312, 0.09596109008789062, 0.0993812484741211, 0.0963892822265625, 0.09618396759033203, 0.09616947174072266, 0.09705696105957032, 0.0969507827758789, 0.09675071716308593, 0.09569577789306641, 0.09606159973144532, 0.09570489501953125, 0.09599747467041016, 0.09633197021484376, 0.09586102294921875, 0.09599581146240234, 0.09909808349609375, 0.09886899566650391, 0.09654566192626954, 0.09594076538085937, 0.09624355316162109, 0.09642400360107421, 0.09627232360839844, 0.0954612808227539, 0.09590489959716797, 0.09575663757324218, 0.09579174041748047, 0.09639302062988281, 0.09567443084716797, 0.09564076995849609, 0.09576914978027344, 0.097387939453125, 0.09577708435058593, 0.09653654479980468, 0.09607484436035156, 0.09536470031738281, 0.09631756591796875, 0.09618339538574219, 0.09601481628417968, 0.09614995574951171, 0.09681501007080077, 0.09587753295898438, 0.09597676849365235, 0.09609664154052734, 0.09566403198242188, 0.09594416046142579, 0.09590057373046874, 0.09642774200439454, 0.09573990631103516, 0.09596259307861328, 0.09568428802490235, 0.095771484375, 0.09608748626708985, 0.09636873626708985, 0.09561341094970703, 0.09585404968261718, 0.09560118103027344, 0.0964136962890625, 0.09631539154052735, 0.0967045135498047, 0.09781062316894532, 0.09673503875732421, 0.09543475341796875, 0.0955863037109375, 0.095830078125, 0.09576748657226562, 0.09540835571289062, 0.09577507019042969, 0.09573545837402343, 0.096606689453125, 0.09601197052001953, 0.09562995147705078, 0.09551654052734375, 0.09545331573486328, 0.09579634857177734, 0.09618521881103516, 0.09608099365234375, 0.09585142517089844, 0.09668931579589844, 0.09650070190429688, 0.0964238052368164, 0.09610240173339844, 0.09674547576904297, 0.096304931640625, 0.09586099243164062, 0.09571539306640625, 0.09580902099609374, 0.09814848327636719, 0.09609347534179688, 0.09638809967041016, 0.09628166198730469, 0.09685609436035156, 0.0962548828125, 0.09626537322998047, 0.09747097778320313, 0.09625206756591796, 0.09589552307128907, 0.09606985473632812, 0.09595289611816406]",tokens/s,10.366682389895637,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,4344.356864,1721.63072,0.0,1319.108608,1304.104448,s,1,11.1713115234375,11.1713115234375,0.0,11.1713115234375,11.1713115234375,11.1713115234375,11.1713115234375,[11.1713115234375],,kWh,0.00012045677886663572,1.327978223037925e-05,3.78375302699796e-05,0.00017157409136699458,,MB,4394.102784,1801.322496,0.0,1377.828864,1350.328832,s,10,0.8276184005737304,0.08276184005737304,0.0006009382308734037,0.08261775970458984,0.08332083358764648,0.0838339054107666,0.08424436286926269,"[0.0823691177368164, 0.08320681762695313, 0.08258128356933593, 0.08262918090820312, 0.08277654266357422, 0.08271990203857423, 0.08228749084472656, 0.08434697723388672, 0.0820947494506836, 0.08260633850097657]",tokens/s,3093.2130052030375,kWh,2.4037740882854723e-06,2.6509455156251955e-07,1.1095181918032678e-06,3.7783868316512594e-06,tokens/kWh,67753782.60783345,MB,4398.358528,1803.419648,0.0,1377.828864,1340.622848,s,10,52.37803466796875,5.2378034667968745,0.02160671202634865,5.23849560546875,5.25526298828125,5.2675030761718755,5.277295146484375,"[5.21613427734375, 5.2482451171875, 5.23187744140625, 5.24511376953125, 5.2797431640625, 5.22692919921875, 5.25254296875, 5.25080224609375, 5.19690087890625, 5.22974560546875]",tokens/s,12.027942705251407,kWh,0.0001542548188037943,1.7014790136846092e-05,5.3451331012396076e-05,0.0002247209399530365,tokens/kWh,280347.7059733112,,s,630,52.37316440582274,0.08313200699336944,0.0009204352843367174,0.08294585418701172,0.08394587554931641,0.08454393997192383,0.08675271148681642,"[0.08273715209960937, 0.08246825408935547, 0.08237014770507813, 0.08279961395263671, 0.08239315032958984, 0.0826316146850586, 0.08256409454345703, 0.08255510711669922, 0.08229228973388672, 0.08220694732666016, 0.08235212707519532, 0.08217804718017578, 0.0824070053100586, 0.083787841796875, 0.08389823913574218, 0.08291280364990235, 0.08243705749511719, 0.08295855712890625, 0.08298454284667969, 0.0833325424194336, 0.08219235229492187, 0.08215440368652344, 0.08201612854003906, 0.08430614471435546, 0.08361369323730469, 0.08242082977294922, 0.08275424194335937, 0.08230499267578124, 0.08318959808349609, 0.08262470245361328, 0.08236962890625, 0.08327859497070313, 0.08236819458007813, 0.08271920013427735, 0.08239718627929687, 0.08248294067382812, 0.08238719940185547, 0.08249903869628906, 0.08293791961669922, 0.08363779449462891, 0.08347539520263672, 0.0826982421875, 0.08304640197753907, 0.08325472259521484, 0.08268627166748047, 0.08264524841308593, 0.08284896087646484, 0.0824044189453125, 0.08258236694335938, 0.08326143646240235, 0.08347641754150391, 0.0830165786743164, 0.08302796936035156, 0.08263279724121093, 0.08288153839111329, 0.08332351684570312, 0.08284111785888672, 0.08303398132324219, 0.08238301086425781, 0.08288883209228516, 0.0827022705078125, 0.08251846313476563, 0.08315119934082031, 0.0832151336669922, 0.08254054260253907, 0.08297187042236329, 0.08383977508544922, 0.08292278289794922, 0.08276041412353516, 0.08282931518554687, 0.08278339385986327, 0.0836021728515625, 0.08616973114013672, 0.08394332885742188, 0.08406742095947266, 0.08427401733398437, 0.08349504089355468, 0.0836956787109375, 0.08341407775878906, 0.08318179321289063, 0.08301225280761719, 0.08265727996826172, 0.08246886444091797, 0.08267343902587891, 0.08349104309082031, 0.08393113708496094, 0.083355712890625, 0.08364614105224609, 0.08282553863525391, 0.08282514953613282, 0.08342483520507812, 0.083140380859375, 0.08307190704345703, 0.08282086181640624, 0.08535008239746093, 0.08253887939453125, 0.08615315246582031, 0.0837754898071289, 0.0834767074584961, 0.08339955139160156, 0.08280976104736328, 0.08279654693603515, 0.08303580474853516, 0.08320767974853516, 0.08340156555175782, 0.08315494537353516, 0.08286204528808594, 0.08285788726806641, 0.082901123046875, 0.08284547424316406, 0.08438969421386719, 0.08359977722167969, 0.08286796569824219, 0.08386176300048828, 0.08334918212890625, 0.08303209686279298, 0.08313680267333984, 0.08308531188964843, 0.08279849243164063, 0.08273935699462891, 0.08270022583007812, 0.08291712188720703, 0.08310604858398438, 0.08313241577148438, 0.08268364715576172, 0.08278246307373047, 0.08232959747314453, 0.08298230743408203, 0.08283200073242188, 0.082567138671875, 0.08265513610839843, 0.08243606567382812, 0.08520921325683593, 0.08412509155273437, 0.08311459350585937, 0.08276992034912109, 0.08340595245361328, 0.08330646514892578, 0.0836382064819336, 0.0834467544555664, 0.08315494537353516, 0.08313446044921875, 0.08529666900634765, 0.08311856079101562, 0.08334877014160157, 0.08409900665283203, 0.08410192108154296, 0.08371711730957031, 0.08344697570800781, 0.08356339263916016, 0.08302483367919922, 0.08321961975097657, 0.08322710418701172, 0.08293721771240234, 0.082468994140625, 0.0827495346069336, 0.08266012573242187, 0.08359302520751953, 0.0837408676147461, 0.08321430206298829, 0.08317481231689453, 0.08343411254882813, 0.0827042236328125, 0.08308956909179688, 0.08284073638916016, 0.08284060668945313, 0.08249324798583985, 0.08290493011474609, 0.0830506591796875, 0.08293785858154297, 0.08275350189208984, 0.08262364959716798, 0.08246975708007813, 0.08259542083740234, 0.08270070648193359, 0.08267084503173829, 0.08263552093505859, 0.08281702423095703, 0.08266751861572266, 0.08223347473144531, 0.08233766174316406, 0.08274534606933594, 0.083240478515625, 0.08232905578613281, 0.082106689453125, 0.08251667022705078, 0.08256511688232422, 0.08277811431884766, 0.08249974060058594, 0.08265731048583984, 0.08660969543457031, 0.08331001281738282, 0.08365875244140625, 0.08415833282470703, 0.08367401885986328, 0.08322354888916016, 0.08295731353759765, 0.08350924682617188, 0.08272895812988282, 0.08270585632324219, 0.08285753631591797, 0.083198974609375, 0.08268185424804687, 0.082693603515625, 0.08286672210693359, 0.08332857513427734, 0.08282572937011719, 0.08293965148925782, 0.08306861114501952, 0.08261273956298829, 0.08284921264648437, 0.08270012664794922, 0.0831126708984375, 0.08465318298339844, 0.08277091217041016, 0.08246466827392578, 0.08261199951171876, 0.08321826934814452, 0.08278038024902344, 0.08271440124511718, 0.08252777862548828, 0.08248521423339844, 0.08236531066894531, 0.08265523529052735, 0.08314470672607421, 0.08301551818847656, 0.08314281463623047, 0.08288256072998047, 0.0830374755859375, 0.08395238494873047, 0.08374269104003906, 0.08371011352539062, 0.08385724639892578, 0.08384063720703125, 0.08409030151367188, 0.08368224334716796, 0.08363132476806641, 0.08342813110351563, 0.08360137939453124, 0.08351980590820313, 0.0830704345703125, 0.08364876556396485, 0.0834784927368164, 0.08314268493652344, 0.08323481750488282, 0.08285932922363282, 0.08299539184570312, 0.0832701416015625, 0.08282112121582032, 0.08258748626708984, 0.08261167907714843, 0.08637920379638672, 0.08391769409179688, 0.08372659301757812, 0.08336271667480469, 0.08340739440917969, 0.08320412445068359, 0.08299139404296875, 0.08356390380859376, 0.08521382141113282, 0.08662032318115234, 0.08369657897949219, 0.08364739227294922, 0.08356476593017578, 0.08377273559570313, 0.08351996612548829, 0.08327372741699218, 0.08343682861328125, 0.08321507263183593, 0.0831098861694336, 0.08332492828369141, 0.08323235321044922, 0.09271952056884766, 0.08346198272705078, 0.08371737670898438, 0.08370063781738281, 0.08610777282714843, 0.08385574340820312, 0.0836485137939453, 0.0843446044921875, 0.083331298828125, 0.08501042938232421, 0.08486914825439452, 0.08402057647705079, 0.08321443176269532, 0.08399747467041016, 0.08388992309570313, 0.08390819549560546, 0.08573731231689453, 0.08577017974853515, 0.08398944091796876, 0.08394515228271485, 0.08306246185302735, 0.08310816192626953, 0.08272447967529296, 0.08285430145263672, 0.08269830322265626, 0.0823419189453125, 0.08255680084228516, 0.08320438385009765, 0.0829755859375, 0.08342604827880859, 0.0829234848022461, 0.08366928100585938, 0.08295779418945312, 0.0828235855102539, 0.08273919677734375, 0.08283106994628907, 0.0841602554321289, 0.08453174591064454, 0.08326982116699219, 0.08399187469482422, 0.08354659271240235, 0.08316336059570313, 0.08265657806396484, 0.08221695709228516, 0.08223859405517578, 0.08234687805175782, 0.08223334503173828, 0.08281491088867188, 0.082565185546875, 0.08244429016113282, 0.08290918731689453, 0.08376531219482422, 0.08323030090332031, 0.08249510192871094, 0.08324988555908203, 0.08261631774902344, 0.08277811431884766, 0.08261532592773438, 0.08258041381835937, 0.0857903060913086, 0.08288006591796875, 0.08268070220947266, 0.08261436462402344, 0.08303196716308593, 0.08338227081298828, 0.08261631774902344, 0.08301340484619141, 0.08271485137939454, 0.08304434967041016, 0.08250383758544921, 0.08252400207519531, 0.08284925079345704, 0.08246495819091797, 0.08265462493896485, 0.082608642578125, 0.08311424255371094, 0.08379388427734374, 0.08322061157226562, 0.08321612548828125, 0.08267401885986328, 0.08259903717041016, 0.08279305267333985, 0.08254409790039062, 0.08250627136230469, 0.0825162582397461, 0.08268720245361329, 0.08272092437744141, 0.08333990478515625, 0.08316694641113281, 0.08319414520263672, 0.08327986907958984, 0.08285593414306641, 0.08258131408691406, 0.08287248229980469, 0.08256924438476562, 0.0827507553100586, 0.0827174072265625, 0.08699494171142579, 0.08343142700195312, 0.08345516967773438, 0.08332166290283204, 0.08336921691894532, 0.08356326293945313, 0.08290470123291016, 0.08258914947509766, 0.08264591979980469, 0.08489324951171875, 0.08322684478759766, 0.08269980621337891, 0.08281798553466797, 0.08262230682373047, 0.08275164794921876, 0.0829665298461914, 0.08314265441894532, 0.08280883026123047, 0.08258895874023438, 0.08290582275390625, 0.0826317138671875, 0.08264173126220703, 0.08253250885009765, 0.08266294097900391, 0.08284134674072266, 0.08328265380859375, 0.08295766448974609, 0.08288639831542968, 0.08413465881347656, 0.08333436584472656, 0.08269641876220703, 0.08299094390869141, 0.08300633239746094, 0.0826060791015625, 0.08287641906738281, 0.08358707427978515, 0.0830687026977539, 0.08274896240234375, 0.08321504211425781, 0.08300863647460938, 0.08362483215332031, 0.08345394897460938, 0.08301340484619141, 0.08321660614013672, 0.08314419555664063, 0.08316780853271484, 0.0833166732788086, 0.08344576263427735, 0.08327311706542968, 0.08353033447265625, 0.08344989013671875, 0.08455760192871094, 0.08442073822021484, 0.08399263763427735, 0.08343462371826171, 0.08348761749267578, 0.08366649627685546, 0.08326374053955078, 0.0852762908935547, 0.08435097503662109, 0.0841869125366211, 0.08414083099365234, 0.08399052429199219, 0.08401631927490234, 0.08456684875488281, 0.08395372772216797, 0.08340678405761719, 0.08338022613525391, 0.08383650970458985, 0.08390278625488282, 0.08330809783935547, 0.08307164764404297, 0.08557571411132812, 0.08483353424072265, 0.08423248291015625, 0.08406880187988282, 0.08312860870361329, 0.08359273529052734, 0.08455391693115234, 0.0833818588256836, 0.0875440673828125, 0.08400077056884765, 0.08329593658447265, 0.08303411102294922, 0.08295267486572265, 0.08301516723632812, 0.08313702392578125, 0.08375852966308593, 0.08305516815185547, 0.08338432312011719, 0.0829438705444336, 0.0828531494140625, 0.08269091033935547, 0.08279449462890626, 0.0825528335571289, 0.08336732482910156, 0.08337059020996093, 0.08331788635253906, 0.08284864044189454, 0.08353782653808593, 0.08297257232666015, 0.08304659271240235, 0.08296857452392578, 0.08300077056884765, 0.08287670135498047, 0.08272425842285157, 0.08279129791259765, 0.08278540802001953, 0.08283634948730469, 0.08280608367919921, 0.0829241943359375, 0.08349033355712891, 0.08365312194824219, 0.08322847747802735, 0.08314447784423828, 0.08289321899414062, 0.08305270385742188, 0.08268988800048828, 0.087904541015625, 0.08354991912841797, 0.08327340698242187, 0.0828846435546875, 0.08303366088867188, 0.0839399642944336, 0.08344790649414062, 0.08339440155029297, 0.08303427124023438, 0.08288050842285156, 0.08414950561523438, 0.0823440933227539, 0.08224950408935547, 0.08229357147216797, 0.08227225494384766, 0.08257711791992188, 0.0823536605834961, 0.08209900665283203, 0.0815615005493164, 0.0817786865234375, 0.08219033813476563, 0.08198722839355468, 0.08143907165527343, 0.0813157730102539, 0.0812503662109375, 0.08208589172363281, 0.0818166732788086, 0.081619873046875, 0.08230067443847656, 0.08175341033935547, 0.08170162963867188, 0.08170515441894531, 0.08189132690429687, 0.08222720336914062, 0.0814940185546875, 0.08225360107421875, 0.08202467346191407, 0.0815964126586914, 0.08167987060546875, 0.0817996826171875, 0.08157183837890625, 0.0829942398071289, 0.08246800231933593, 0.08311158752441407, 0.0839902114868164, 0.08280652618408203, 0.08261702728271485, 0.08246578979492188, 0.08266409301757813, 0.08386185455322266, 0.08280429077148438, 0.08264966583251954, 0.08305651092529297, 0.08280908966064453, 0.0824354248046875, 0.08226649475097657, 0.08219241333007812, 0.08249158477783203, 0.08489759826660156, 0.08304434967041016, 0.08312761688232422, 0.08327814483642579, 0.08307772827148438, 0.08268163299560546, 0.0828345947265625, 0.08359613037109374, 0.08269209289550782, 0.08260406494140625, 0.08252822113037109, 0.08265113830566406, 0.08294195556640625, 0.08280678558349609, 0.08287232208251953, 0.08315692901611328, 0.0826484146118164, 0.08247779083251953, 0.0827713623046875, 0.08321241760253906, 0.08286605072021484, 0.08282582092285157, 0.08255091094970703, 0.08832669067382813, 0.08296144104003907, 0.08260502624511719, 0.0825487060546875, 0.08281449890136719, 0.08239974212646485, 0.0824559326171875, 0.08281558227539063, 0.08280377960205078, 0.08306582641601562, 0.08291104125976563, 0.08254278564453126, 0.08259123229980468, 0.0828353271484375, 0.08248384094238281, 0.08351103973388672, 0.08301779174804688, 0.0829416961669922, 0.08256556701660156, 0.08316339111328125, 0.08288230133056641, 0.08298086547851563, 0.08256102752685547, 0.08245228576660156, 0.08248646545410156, 0.08231833648681641, 0.0824865951538086, 0.08232825469970703, 0.08230707550048828, 0.08256233978271485, 0.08267644500732421, 0.08268585968017578, 0.08259750366210937, 0.08248982238769531, 0.08286524963378906, 0.08316201782226562, 0.08330966186523438, 0.08333609771728516, 0.0868067855834961, 0.08318905639648437, 0.08294783782958984, 0.08280659484863281, 0.08255142211914063, 0.0828229751586914, 0.08278701019287109, 0.08258713531494141, 0.08264640045166016, 0.08350399780273438, 0.08719737243652344, 0.08283551788330078, 0.08311129760742188, 0.08274854278564453, 0.08249113464355469, 0.08275558471679688, 0.08247705841064454, 0.08275730895996093, 0.08259564971923829, 0.08260601806640624, 0.08238950347900391, 0.08230022430419921, 0.08228530883789062, 0.08569446563720703]",tokens/s,12.029061202380925,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,6504.046592,3721.330688,0.0,3326.083072,3249.416192,s,1,17.414017578125,17.414017578125,0.0,17.414017578125,17.414017578125,17.414017578125,17.414017578125,[17.414017578125],,kWh,0.0002995280871333383,3.30327763859229e-05,0.00011265064567600458,0.00044521150919526575,,MB,1889.583104,4042.194944,0.0,3625.975808,3532.033024,s,10,0.867203971862793,0.0867203971862793,0.0005968515790660602,0.08662177658081055,0.08762337341308593,0.08768650360107422,0.08773700775146484,"[0.08588054656982422, 0.08587254333496094, 0.08660931396484375, 0.08633586883544922, 0.08662614440917969, 0.08698297882080078, 0.0877496337890625, 0.08760934448242187, 0.08692018890380859, 0.08661740875244141]",tokens/s,2952.0159997664737,kWh,2.579792823355307e-06,2.844798573809867e-07,1.7135613123684615e-06,4.5778339931047555e-06,tokens/kWh,55921643.37666971,MB,1902.194688,4105.109504,0.0,3688.890368,3607.643648,s,10,50.7168623046875,5.0716862304687504,0.03591276366724254,5.0900815429687505,5.106282861328125,5.106867993164062,5.107336098632812,"[5.0016416015625, 5.012900390625, 5.0552880859375, 5.063921875, 5.090361328125, 5.10615283203125, 5.107453125, 5.0963046875, 5.09303662109375, 5.0898017578125]",tokens/s,12.421904103909288,kWh,0.00014732052235080748,1.624966519550249e-05,6.893524520663266e-05,0.00023250543275294257,tokens/kWh,270961.4104670966,,s,630,50.71322232055666,0.08049717828659785,0.0009795821879450537,0.08053220748901369,0.08134216079711913,0.0819085422515869,0.08407345359802247,"[0.07928537750244141, 0.07928717041015625, 0.07934070587158203, 0.07931795501708984, 0.07935785675048829, 0.07912857818603515, 0.07912006378173828, 0.079107421875, 0.0795322265625, 0.07950825500488282, 0.07913881683349609, 0.07914208221435547, 0.0790536346435547, 0.07914905548095703, 0.07874559783935547, 0.07897071838378907, 0.07879081726074219, 0.07854694366455078, 0.07908509063720703, 0.07946217346191406, 0.08268870544433594, 0.07963190460205079, 0.07993312072753907, 0.07856617736816406, 0.07898931121826172, 0.07890329742431641, 0.08491165161132813, 0.07867235565185547, 0.0790282211303711, 0.07913632202148438, 0.07879724884033203, 0.0789599380493164, 0.07950816345214844, 0.08011280059814453, 0.07996482849121093, 0.07969331359863281, 0.07948719787597656, 0.07973353576660157, 0.07992288208007813, 0.07955859375, 0.0793515853881836, 0.07952617645263672, 0.080031005859375, 0.07949193572998046, 0.07977561950683594, 0.07977276611328125, 0.07890211486816406, 0.07849581146240234, 0.07886643218994141, 0.07878028869628906, 0.07904876708984375, 0.07922080230712891, 0.07914208221435547, 0.07864803314208985, 0.078991455078125, 0.07916726684570312, 0.07994582366943359, 0.07913484954833984, 0.07920358276367187, 0.0790676498413086, 0.07955891418457031, 0.07928012847900391, 0.07864729309082032, 0.07894393920898438, 0.07901564788818359, 0.0790063018798828, 0.07875379180908203, 0.07932109069824218, 0.0794972152709961, 0.07955235290527343, 0.07943183898925782, 0.07967510223388671, 0.07953164672851562, 0.07971087646484375, 0.07950534057617188, 0.0800416030883789, 0.07953453063964844, 0.07923097229003906, 0.07919974517822266, 0.07959193420410156, 0.07922191619873047, 0.08002582550048828, 0.07936812591552735, 0.07917577362060547, 0.08002754974365234, 0.07906988525390625, 0.07927193450927734, 0.07952588653564453, 0.07938028717041015, 0.07920006561279297, 0.07921475219726562, 0.07889676666259765, 0.07914701080322266, 0.08052591705322265, 0.07971635437011719, 0.07905023956298828, 0.07917823791503906, 0.07873744201660156, 0.07914460754394531, 0.07932550048828126, 0.08224153900146484, 0.08424425506591797, 0.08102729797363281, 0.07972978973388672, 0.07937836456298829, 0.07954879760742188, 0.079731201171875, 0.07948499298095703, 0.07930879974365235, 0.07984537506103516, 0.08050994873046875, 0.079395263671875, 0.07920697784423827, 0.07935164642333985, 0.07908163452148438, 0.07884595489501953, 0.07923641967773437, 0.07964128112792969, 0.07904243469238281, 0.07912188720703126, 0.07923779296875, 0.079388671875, 0.07892787170410157, 0.08155136108398438, 0.07939481353759766, 0.07929446411132812, 0.0792765121459961, 0.0792548828125, 0.07960169219970703, 0.07935145568847657, 0.07996444702148438, 0.0798966064453125, 0.07987062072753906, 0.07999811553955079, 0.0805383071899414, 0.08046403503417969, 0.08124598693847657, 0.08017100524902344, 0.08020400238037109, 0.08060265350341797, 0.081681884765625, 0.0803927001953125, 0.08056057739257813, 0.08104147338867188, 0.08264498901367187, 0.0811434555053711, 0.0802081298828125, 0.07989871978759766, 0.08018943786621094, 0.0814039077758789, 0.08365647888183594, 0.08006678771972656, 0.08008633422851562, 0.08045638275146484, 0.07982809448242187, 0.07979894256591796, 0.08017638397216797, 0.07980745697021484, 0.08083455657958985, 0.07980441284179687, 0.07999056243896484, 0.08030150604248047, 0.08034178924560546, 0.07985151672363282, 0.07966639709472656, 0.07957353973388671, 0.07949542236328125, 0.08009097290039062, 0.08028790283203124, 0.08037171173095703, 0.08089600372314452, 0.08034918212890625, 0.0804513931274414, 0.08038419342041016, 0.08036147308349609, 0.08001945495605468, 0.08036351776123046, 0.08008860778808594, 0.0797984619140625, 0.07931728363037109, 0.08054927825927734, 0.0798922576904297, 0.07983596801757813, 0.08002150726318359, 0.08010246276855469, 0.07895321655273438, 0.07962028503417969, 0.0802529296875, 0.07960118103027344, 0.0798023681640625, 0.0794642562866211, 0.07977289581298828, 0.07983529663085938, 0.0799181137084961, 0.0799125747680664, 0.07901609802246094, 0.07884934234619141, 0.07935785675048829, 0.0810146255493164, 0.08003475189208985, 0.0806317138671875, 0.07984339141845703, 0.08015875244140624, 0.08019910430908203, 0.08021049499511719, 0.08026345825195312, 0.08080150604248047, 0.08207974243164062, 0.08087757110595703, 0.08089395141601563, 0.08051097869873047, 0.08095123291015625, 0.0797225570678711, 0.07954134368896484, 0.07949199676513671, 0.07947779083251953, 0.08234595489501953, 0.07989740753173828, 0.07977193450927735, 0.08019753265380859, 0.08035123443603516, 0.07980850982666016, 0.07984742736816407, 0.08110079956054687, 0.08247705841064454, 0.08085708618164063, 0.08064614105224609, 0.07965286254882813, 0.0797286376953125, 0.0804290542602539, 0.08042240142822266, 0.08057875061035157, 0.08041458892822266, 0.07969776153564453, 0.07986646270751953, 0.0824486083984375, 0.08069657897949219, 0.08032105255126953, 0.08349696350097656, 0.08037171173095703, 0.08081817626953125, 0.0806789093017578, 0.08060646057128906, 0.0804769287109375, 0.08170496368408203, 0.08034300994873046, 0.08041852569580078, 0.08009145355224609, 0.08030003356933593, 0.07996819305419922, 0.08028697967529297, 0.07983369445800781, 0.07990057373046874, 0.07987948608398437, 0.08020252990722657, 0.08054541015625, 0.08024102020263672, 0.07984233856201171, 0.08009827423095703, 0.08042700958251953, 0.08012499237060547, 0.08013491058349609, 0.07957933044433593, 0.07957465362548828, 0.07957315063476562, 0.0807405776977539, 0.08373004913330079, 0.08193456268310546, 0.08078864288330079, 0.08061763000488281, 0.08081494140625, 0.08086707305908203, 0.08333132934570313, 0.08178688049316406, 0.08096940612792969, 0.08162131500244141, 0.0820101089477539, 0.08126815795898437, 0.08080441284179687, 0.08044876861572266, 0.08078412628173828, 0.08017011260986329, 0.08082112121582032, 0.08404582214355469, 0.08078521728515625, 0.08044534301757812, 0.08012009429931641, 0.08050688171386719, 0.08074649810791015, 0.08074444580078124, 0.08058396911621094, 0.08029872131347657, 0.07989568328857422, 0.08036441802978515, 0.0804290542602539, 0.08025907135009766, 0.08048230743408204, 0.08112537384033203, 0.08050409698486329, 0.08064892578125, 0.08058172607421875, 0.08129747009277344, 0.0811385269165039, 0.08089993286132813, 0.08197135925292968, 0.08109779357910156, 0.0810341796875, 0.08078336334228516, 0.08108441925048829, 0.0808652801513672, 0.08077439880371094, 0.08068787384033203, 0.08058841705322266, 0.08033932495117188, 0.0812290267944336, 0.08026112365722657, 0.08049459075927734, 0.08035737609863282, 0.08055366516113281, 0.0818271713256836, 0.08074291229248047, 0.0811564178466797, 0.08085657501220703, 0.0815970230102539, 0.08063740539550782, 0.08116422271728516, 0.0807266845703125, 0.08061542510986328, 0.08197238159179687, 0.08075350189208984, 0.08065200042724609, 0.08077954864501953, 0.0809752960205078, 0.0806016616821289, 0.0808729248046875, 0.08099251556396485, 0.08126287841796875, 0.083884033203125, 0.08204637145996094, 0.08100105285644531, 0.08154521942138672, 0.08090569305419922, 0.0811402587890625, 0.08123113250732422, 0.08135958099365234, 0.08372000122070312, 0.08185628509521484, 0.08071616363525391, 0.08041449737548828, 0.08050918579101562, 0.08133602905273438, 0.08070787048339843, 0.08066995239257813, 0.08122035217285156, 0.08053298950195313, 0.08028211212158202, 0.08107008361816406, 0.08058265686035156, 0.08071167755126953, 0.08080079650878906, 0.0807740478515625, 0.08082998657226563, 0.08096412658691406, 0.08114380645751954, 0.08094924926757813, 0.08068096160888671, 0.08073795318603516, 0.08080790710449219, 0.0809324493408203, 0.0812552032470703, 0.0811171875, 0.08087286376953125, 0.0809703369140625, 0.08081798553466797, 0.0809125747680664, 0.0815841293334961, 0.08112441253662109, 0.08119596862792969, 0.08062886047363281, 0.08061837005615234, 0.08076493072509766, 0.0811151351928711, 0.08068447875976563, 0.08097644805908204, 0.08067791748046875, 0.08088470458984375, 0.0809512939453125, 0.08472774505615234, 0.08102304077148438, 0.08097337341308594, 0.08045613098144531, 0.08085298919677734, 0.08078313446044921, 0.0803985595703125, 0.08412979125976562, 0.08123596954345703, 0.08078646087646485, 0.08106082916259766, 0.08093695831298828, 0.08150748443603516, 0.08142451477050781, 0.08115030670166015, 0.08138790130615234, 0.08106752014160157, 0.08083235168457031, 0.0810564193725586, 0.08097792053222656, 0.08099558258056641, 0.08082713317871094, 0.08071488189697265, 0.08074658966064453, 0.08141222381591796, 0.0810318374633789, 0.08233567810058594, 0.0813091812133789, 0.08104812622070312, 0.08088575744628906, 0.08063999938964844, 0.08094866943359375, 0.08103600311279296, 0.08061116790771485, 0.08044953918457032, 0.08029705810546875, 0.08050371551513671, 0.08088678741455078, 0.08076134490966796, 0.0811484146118164, 0.08082431793212891, 0.08057344055175782, 0.08037888336181641, 0.08091852569580078, 0.08060313415527344, 0.08092169952392578, 0.08175094604492188, 0.08093436431884765, 0.08087763214111328, 0.08179145812988281, 0.08120127868652344, 0.08142221069335938, 0.08094924926757813, 0.08124604797363282, 0.08092057800292969, 0.08082137298583984, 0.08080384063720703, 0.08049549102783203, 0.08026521301269532, 0.0804659194946289, 0.08054169464111328, 0.081157470703125, 0.08098175811767579, 0.08180201721191406, 0.08038633728027343, 0.08026914978027344, 0.08052909088134766, 0.08065248107910156, 0.08090217590332031, 0.08045577239990234, 0.08060425567626953, 0.0804320297241211, 0.08253440093994141, 0.0814911651611328, 0.08122652435302734, 0.08081613159179687, 0.08046134185791015, 0.08093743896484375, 0.0840847396850586, 0.08147138977050782, 0.08639836883544921, 0.0810370864868164, 0.08067984008789063, 0.08070963287353515, 0.08078125, 0.08108348846435547, 0.08155043029785156, 0.08109862518310547, 0.0806329574584961, 0.08111929321289063, 0.08078828430175782, 0.08035327911376954, 0.08027750396728515, 0.08003990173339844, 0.08006454467773437, 0.08088553619384765, 0.08043746948242188, 0.08033017730712891, 0.08109318542480469, 0.08154691314697265, 0.08022022247314453, 0.08035356903076171, 0.08062156677246093, 0.0808980484008789, 0.08050240325927735, 0.08048223876953126, 0.08090054321289063, 0.08031027221679687, 0.080216064453125, 0.08038604736328125, 0.08053964996337891, 0.08109465789794922, 0.08082550048828124, 0.08053142547607423, 0.08059503936767579, 0.08033564758300782, 0.08068013000488282, 0.08104499053955078, 0.08056473541259766, 0.0807383041381836, 0.08082790374755859, 0.08099440002441406, 0.08380377960205078, 0.08108521270751953, 0.08089395141601563, 0.08054783630371094, 0.08071340942382813, 0.0807573471069336, 0.08132374572753906, 0.08558080291748046, 0.08030303955078125, 0.08010348510742188, 0.08026557159423828, 0.08029286193847657, 0.08075740814208984, 0.08122140502929688, 0.08055830383300781, 0.08039186859130859, 0.08015833282470704, 0.08061612701416015, 0.08043843078613282, 0.08054192352294921, 0.0798032989501953, 0.07994338989257813, 0.08010883331298828, 0.08052194976806641, 0.08078950500488281, 0.08155923461914062, 0.08111135864257812, 0.08091238403320312, 0.08122492980957031, 0.08094364929199219, 0.08097551727294922, 0.08092527770996094, 0.08117033386230468, 0.08107222747802735, 0.08088495635986329, 0.08060189056396484, 0.08115718078613281, 0.08097212982177734, 0.08174566650390624, 0.0802619857788086, 0.08047561645507813, 0.08035587310791016, 0.08058879852294921, 0.0807874526977539, 0.08073136138916015, 0.08013699340820313, 0.08032160186767578, 0.08026771545410157, 0.08044185638427734, 0.08051302337646485, 0.08153702545166015, 0.08068688201904296, 0.0805513916015625, 0.08081689453125, 0.08063734436035157, 0.08054230499267578, 0.08141648101806641, 0.08064176177978516, 0.07997404479980469, 0.0802943344116211, 0.08029798126220702, 0.08126258850097656, 0.08240332794189453, 0.08063795471191407, 0.08086892700195313, 0.08066687774658203, 0.08123564910888671, 0.08106854248046876, 0.08127487945556641, 0.08132300567626953, 0.08067378997802735, 0.08340275573730468, 0.08059478759765625, 0.08028380584716797, 0.08162899017333984, 0.08032889556884766, 0.08063385772705078, 0.08074179077148437, 0.08047062683105469, 0.08284159851074219, 0.08057558441162109, 0.08017298889160156, 0.08058159637451172, 0.08022188568115235, 0.0801488037109375, 0.08041580963134766, 0.08093382263183593, 0.08074240112304687, 0.08035734558105469, 0.0799334716796875, 0.08019305419921875, 0.0803326416015625, 0.08052540588378906, 0.08038249969482422, 0.0804302749633789, 0.08069612884521485, 0.080801025390625, 0.08128361511230468, 0.0814097900390625, 0.08126716613769532, 0.08069894409179687, 0.08068550109863282, 0.08098201751708985, 0.08093081665039062, 0.08187673950195312, 0.08059446716308594, 0.07996691131591797, 0.0797511978149414, 0.08012185668945312, 0.08059494018554687, 0.08079708862304688, 0.08082649230957031, 0.08076335906982422, 0.08091798400878907, 0.08102143859863281, 0.08042201232910157, 0.08080892944335938, 0.08134022521972656, 0.08125247955322265, 0.08058396911621094, 0.08020764923095704]",tokens/s,12.422795696510672,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,2003.881984,1252.982784,0.0,857.735168,829.14304,s,1,9.8537939453125,9.8537939453125,0.0,9.8537939453125,9.8537939453125,9.8537939453125,9.8537939453125,[9.8537939453125],,kWh,7.172844104999664e-05,7.904875418538698e-06,2.6393354448012163e-05,0.0001060266709165475,,MB,2047.086592,1542.38976,0.0,1126.170624,1096.740864,s,10,0.9329628829956054,0.09329628829956055,0.000590798934897056,0.093272705078125,0.09374768829345703,0.09418995666503907,0.09454377136230468,"[0.09364940643310547, 0.09361481475830079, 0.0946322250366211, 0.09248512268066406, 0.09264518737792969, 0.09275679779052734, 0.093359619140625, 0.09352851104736327, 0.093185791015625, 0.09310540771484375]",tokens/s,2743.946245514312,kWh,2.7397998220013332e-06,3.0215033128273586e-07,1.2486588390092126e-06,4.290608992293282e-06,tokens/kWh,59665189.827323526,MB,2051.227648,1565.458432,0.0,1149.239296,1096.743424,s,10,56.882767089843746,5.688276708984374,0.012514615985806062,5.689633056640625,5.6981533203125005,5.704061767578125,5.708788525390625,"[5.69684033203125, 5.65869384765625, 5.6828154296875, 5.6897890625, 5.68760546875, 5.692412109375, 5.6944736328125, 5.70997021484375, 5.68068994140625, 5.68947705078125]",tokens/s,11.075410572149973,kWh,0.00016401599858299512,1.809152795798569e-05,6.381000431898992e-05,0.00024591753085997074,tokens/kWh,256183.44401755228,,s,630,56.88017211914056,0.09028598749069941,0.0009492433131047906,0.09004508590698243,0.09131274108886718,0.09205830993652343,0.09396634460449219,"[0.0898764190673828, 0.09033638763427734, 0.08989510345458984, 0.09205836486816406, 0.09064653015136719, 0.08993692779541015, 0.08951007843017578, 0.08959423828125, 0.09084470367431641, 0.09250291442871093, 0.0904044189453125, 0.08929529571533203, 0.08987001800537109, 0.0895880355834961, 0.08929293060302734, 0.08963468933105469, 0.09154354858398438, 0.09045606231689453, 0.08943202972412109, 0.09005686187744141, 0.09050460815429688, 0.09324153900146484, 0.08959820556640626, 0.09007516479492188, 0.08967577362060547, 0.08933990478515624, 0.09658086395263672, 0.089697021484375, 0.09261670684814453, 0.0900669403076172, 0.08992153930664062, 0.09029222106933593, 0.09006658935546875, 0.08983792114257813, 0.09046630096435547, 0.09008332824707031, 0.08990509033203126, 0.08978816223144531, 0.08988009643554687, 0.08987932586669922, 0.08989241790771485, 0.09026195526123047, 0.09126707458496094, 0.08991664123535156, 0.09031526184082031, 0.089923583984375, 0.09085740661621093, 0.09397283172607422, 0.0902981414794922, 0.08992610931396484, 0.08970569610595704, 0.08931382751464843, 0.09002153778076172, 0.09094560241699219, 0.0902347869873047, 0.09036796569824218, 0.09355510711669922, 0.08978636932373046, 0.0896424331665039, 0.08932157135009766, 0.08969264221191406, 0.08993084716796874, 0.0911379852294922, 0.09002751922607422, 0.08973363494873046, 0.09233408355712891, 0.09000057220458985, 0.09087484741210937, 0.08972611236572266, 0.08972563171386719, 0.08959999847412109, 0.089378173828125, 0.08978495788574219, 0.09022463989257813, 0.09036323547363281, 0.08968873596191407, 0.09052496337890625, 0.08964713287353515, 0.0895986557006836, 0.08978793334960937, 0.09003990173339843, 0.0898466567993164, 0.09025126647949219, 0.09017747497558594, 0.08949721527099609, 0.0896455078125, 0.0895052490234375, 0.08979084777832032, 0.08987868499755859, 0.09053612518310547, 0.08937398529052734, 0.08944416046142578, 0.08942179107666015, 0.0903298568725586, 0.08972083282470703, 0.08908550262451172, 0.08994473266601563, 0.08997660827636719, 0.08924905395507812, 0.08960688018798828, 0.08948086547851562, 0.08973689270019532, 0.08916969299316406, 0.08958041381835938, 0.0892286376953125, 0.08989148712158203, 0.09061357116699219, 0.08981100463867188, 0.08936255645751953, 0.08912630462646484, 0.08907123565673829, 0.08984799957275391, 0.08934255981445313, 0.08994016265869141, 0.0898713607788086, 0.08930406188964844, 0.09056249237060547, 0.0900455322265625, 0.08998397064208985, 0.09031199645996094, 0.08964959716796875, 0.08989663696289063, 0.09016102600097656, 0.08951622772216797, 0.089080322265625, 0.08951602935791016, 0.090133056640625, 0.08997885131835938, 0.08954255676269532, 0.08997869110107422, 0.09272048187255859, 0.08950876617431641, 0.09040080261230468, 0.0897927017211914, 0.09127823638916016, 0.09000847625732422, 0.08958566284179688, 0.09554313659667969, 0.09145536041259765, 0.09044611358642578, 0.08999644470214843, 0.08914620971679688, 0.08915904235839844, 0.09012697601318359, 0.08984575653076173, 0.08987648010253907, 0.09007718658447265, 0.09375949096679688, 0.09035980987548828, 0.08986156463623046, 0.0895555191040039, 0.09022259521484376, 0.08939929962158204, 0.08940748596191406, 0.09012553405761718, 0.08960079956054688, 0.08978377532958984, 0.08940720367431641, 0.09037830352783204, 0.08917596435546875, 0.08949219512939453, 0.08950489807128906, 0.08969302368164063, 0.09016950225830078, 0.08935833740234375, 0.09258393859863281, 0.09028217315673828, 0.09008927917480469, 0.08918630218505859, 0.08967782592773438, 0.08946688079833984, 0.09200230407714843, 0.09016716766357422, 0.08977545928955079, 0.08919276428222657, 0.09243484497070313, 0.0898082275390625, 0.09055715179443359, 0.08976179504394531, 0.09093881225585937, 0.08988515472412109, 0.08989295959472657, 0.0898682861328125, 0.0900341796875, 0.08952413177490234, 0.08940342712402344, 0.09079199981689454, 0.0895215072631836, 0.08985625457763671, 0.090063232421875, 0.09324403381347657, 0.0899636459350586, 0.09017024230957031, 0.08963481903076172, 0.08975888061523438, 0.09047052764892578, 0.08912969970703125, 0.08984371185302735, 0.0903024673461914, 0.08962064361572265, 0.08969932556152344, 0.0903031005859375, 0.08996681976318359, 0.08969420623779296, 0.09131212615966797, 0.09131827545166016, 0.08946073913574219, 0.08950921630859375, 0.08936720275878907, 0.09004463958740234, 0.09023792266845704, 0.08969913482666016, 0.09025116729736328, 0.0907305908203125, 0.08978358459472656, 0.0898403549194336, 0.08992768096923828, 0.08972451019287109, 0.08926659393310547, 0.08960610961914063, 0.09000348663330078, 0.08972284698486328, 0.08954268646240235, 0.090219970703125, 0.09027846527099609, 0.09060870361328124, 0.08956976318359375, 0.09004080200195312, 0.0896737289428711, 0.08970409393310547, 0.0898133773803711, 0.09012790679931641, 0.09068294525146485, 0.08978726196289062, 0.08974076843261719, 0.09161167907714844, 0.0899110107421875, 0.09308393859863281, 0.09069878387451172, 0.09389791870117188, 0.09144822692871094, 0.08991014099121093, 0.09143692779541016, 0.09217219543457031, 0.0901624984741211, 0.08999539184570313, 0.0918597412109375, 0.09082649230957031, 0.09001395416259765, 0.09048268890380859, 0.09051545715332031, 0.09003008270263672, 0.09090793609619141, 0.09075180816650391, 0.0896899185180664, 0.09012451171875, 0.08921907043457031, 0.08953855895996093, 0.08960982513427734, 0.08979293060302734, 0.09026764678955078, 0.09038953399658203, 0.09039356994628907, 0.09004259490966797, 0.0906053466796875, 0.09002361297607422, 0.0896987533569336, 0.08934182739257812, 0.0895467529296875, 0.0895831069946289, 0.089797119140625, 0.09043148803710938, 0.08957952117919922, 0.08955903625488282, 0.09452063751220703, 0.09052435302734375, 0.08985395050048828, 0.08945017242431641, 0.08917664337158203, 0.08924134063720703, 0.08982733154296875, 0.09067427062988281, 0.09205238342285156, 0.09111551666259765, 0.09404825592041016, 0.09064857482910156, 0.09162239837646484, 0.09202982330322265, 0.09147596740722656, 0.09033033752441406, 0.09022918701171875, 0.08947711944580078, 0.08986457824707031, 0.09089004516601562, 0.08971810913085937, 0.08999008178710938, 0.09071625518798829, 0.08997468566894531, 0.09159232330322266, 0.09049945831298828, 0.09089024353027343, 0.0906792984008789, 0.09010575866699219, 0.08936048126220703, 0.08925091552734375, 0.08903545379638672, 0.08962889862060547, 0.09032704162597656, 0.09057689666748046, 0.09009686279296875, 0.08917072296142578, 0.0895283203125, 0.0900505599975586, 0.08947698974609375, 0.09072447967529297, 0.09021849822998047, 0.09008454132080078, 0.08987942504882812, 0.09043913269042969, 0.09060717010498047, 0.08997727966308594, 0.09019235229492187, 0.09062339019775391, 0.08999382019042969, 0.09008946990966797, 0.09184457397460938, 0.09015708923339844, 0.08964915466308594, 0.09019197082519531, 0.09004022216796875, 0.0904949722290039, 0.09014681243896484, 0.09010176086425781, 0.09057855987548828, 0.08985011291503907, 0.09042649841308593, 0.08985906982421875, 0.09016729736328125, 0.08960614776611328, 0.09006678771972657, 0.09040406036376954, 0.09048159790039062, 0.08983757019042969, 0.08959318542480468, 0.09104239654541016, 0.0902042236328125, 0.09010131072998047, 0.09019026947021484, 0.09271826934814453, 0.09004086303710937, 0.0898782730102539, 0.09036016082763672, 0.09081196594238282, 0.09186742401123046, 0.09370454406738281, 0.09172112274169922, 0.0902081298828125, 0.089676513671875, 0.08969840240478516, 0.09027779388427734, 0.08985600280761719, 0.09035151672363281, 0.09003852844238282, 0.08980239868164062, 0.09076918029785157, 0.09013699340820312, 0.09022643280029297, 0.08975794982910157, 0.09015251159667968, 0.08978876495361328, 0.0901776351928711, 0.08966553497314453, 0.09159446716308593, 0.09019420623779296, 0.08985603332519532, 0.09103266906738282, 0.090251708984375, 0.09039231872558594, 0.09047042846679687, 0.09102745819091797, 0.09006607818603515, 0.08943907165527344, 0.09037763214111329, 0.09512201690673829, 0.08981913757324218, 0.09097830200195313, 0.09040211486816406, 0.09002374267578125, 0.08967667388916016, 0.09015090942382813, 0.08995769500732421, 0.08976659393310547, 0.09087324523925781, 0.09309040069580078, 0.09069529724121093, 0.0906731185913086, 0.09124086761474609, 0.09056460571289063, 0.08975564575195312, 0.08961135864257813, 0.09012723541259765, 0.09017142486572266, 0.0898744354248047, 0.0897551040649414, 0.08961859130859375, 0.08986662292480468, 0.09077536010742188, 0.09043721771240235, 0.09183200073242187, 0.0903927001953125, 0.09001017761230469, 0.0905401611328125, 0.09178121948242188, 0.09020537567138671, 0.08982342529296874, 0.08966390228271484, 0.09046243286132813, 0.08977817535400391, 0.0899051513671875, 0.09043526458740235, 0.08996227264404297, 0.08978476715087891, 0.09021858978271484, 0.09055340576171875, 0.09013116455078125, 0.08944457244873047, 0.08978022766113282, 0.09041423797607422, 0.09025007629394531, 0.09064035034179688, 0.09064246368408203, 0.09046221160888672, 0.09005827331542969, 0.09002015686035156, 0.08992726135253906, 0.09026345825195313, 0.09000962829589844, 0.09077347564697266, 0.09107881927490234, 0.09036598205566407, 0.0901903076171875, 0.0896426239013672, 0.08960857391357421, 0.09131008148193359, 0.0909222412109375, 0.08941379547119141, 0.08991190338134766, 0.09008537292480469, 0.09004415893554688, 0.08948172760009766, 0.08945638275146485, 0.09035775756835937, 0.09233817291259766, 0.09019497680664063, 0.08953282928466796, 0.090325439453125, 0.09077977752685547, 0.0929280014038086, 0.09008537292480469, 0.09204326629638672, 0.09092915344238281, 0.09089411163330079, 0.09079625701904297, 0.09144287872314454, 0.09248735809326172, 0.09024575805664062, 0.09078076934814452, 0.09104067230224609, 0.090363037109375, 0.08969712066650391, 0.09040422058105468, 0.09232653045654297, 0.09015296173095703, 0.09093325042724609, 0.09101312255859376, 0.09036370849609375, 0.08984390258789063, 0.09139571380615234, 0.09020403289794922, 0.09084467315673828, 0.0907315216064453, 0.09115411376953125, 0.08991705322265625, 0.08980345916748046, 0.08991334533691406, 0.09068953704833985, 0.08969625854492187, 0.08993376159667969, 0.08964019012451171, 0.08944518280029297, 0.09033318328857422, 0.09081839752197265, 0.09016284942626954, 0.09395046234130859, 0.090359619140625, 0.08968390655517579, 0.09032662200927734, 0.08984028625488281, 0.09178726196289062, 0.09092915344238281, 0.09072434997558594, 0.09052569580078125, 0.09335977935791015, 0.09136335754394531, 0.09035279846191406, 0.09016099548339844, 0.09099673461914062, 0.08969830322265625, 0.0897474594116211, 0.0895134048461914, 0.09052323150634765, 0.08982598114013672, 0.08934633636474609, 0.08980070495605469, 0.0902116470336914, 0.09026451110839843, 0.09004729461669922, 0.09303247833251953, 0.08939971160888673, 0.0893834228515625, 0.0894741439819336, 0.08978934478759766, 0.08966349029541015, 0.09041458892822266, 0.09042991638183594, 0.08964918518066406, 0.08929821014404297, 0.08975138854980469, 0.08986099243164063, 0.09048992156982422, 0.09039689636230469, 0.08993251037597656, 0.09089965057373046, 0.09018450927734376, 0.09017504119873047, 0.0906830062866211, 0.09151302337646484, 0.09012697601318359, 0.09014640045166016, 0.08977654266357422, 0.0901500473022461, 0.08945750427246094, 0.08941497802734374, 0.09056121826171876, 0.09076454162597657, 0.08965196990966796, 0.09040486145019531, 0.0900157470703125, 0.08918630218505859, 0.0907610855102539, 0.08994217681884766, 0.09079555511474609, 0.08937721252441407, 0.08926544189453126, 0.08970518493652344, 0.08991126251220703, 0.0907919692993164, 0.0895667495727539, 0.08992108917236329, 0.08959187316894532, 0.08932982635498046, 0.08973932647705078, 0.09205824279785156, 0.08962230682373047, 0.09012655639648437, 0.0903741455078125, 0.09497996520996094, 0.09177792358398437, 0.08991539001464843, 0.08998818969726563, 0.08975071716308594, 0.08999702453613281, 0.0918097915649414, 0.09102336120605468, 0.09078326416015625, 0.09097837066650391, 0.09079644775390625, 0.08959385681152343, 0.09043558502197266, 0.09034880065917969, 0.08994278717041015, 0.09041715240478515, 0.09082662200927734, 0.08996672058105469, 0.08956838226318359, 0.08937773132324218, 0.08936646270751954, 0.09068544006347656, 0.09000345611572266, 0.08993753814697265, 0.08985574340820313, 0.08979315185546875, 0.09002543640136719, 0.09040541076660157, 0.08992294311523437, 0.09066569519042969, 0.08981228637695313, 0.0906690902709961, 0.09050086212158204, 0.09079788970947265, 0.09157939147949219, 0.09171260833740234, 0.09160591888427734, 0.08969356536865235, 0.0897091827392578, 0.08940953826904297, 0.09013862609863281, 0.09101289367675781, 0.09090480041503907, 0.09043500518798828, 0.09053241729736328, 0.09335161590576171, 0.0904357452392578, 0.09004252624511719, 0.09021849822998047, 0.08968396759033204, 0.089525634765625, 0.09001538848876953, 0.08943011474609375, 0.08989170837402344, 0.09031884765625, 0.09107059478759766, 0.0900598373413086, 0.08976057434082031, 0.08980684661865235, 0.08945458984375, 0.09014201354980468, 0.08953311920166016, 0.08987673950195313, 0.09013632202148437]",tokens/s,11.075915851316491,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,5245.898752,3461.28384,0.0,3066.036224,2865.160192,s,1,13.449072265625,13.449072265625,0.0,13.449072265625,13.449072265625,13.449072265625,13.449072265625,[13.449072265625],,kWh,0.0001751543946833332,1.931352334526294e-05,6.485394077201945e-05,0.0002593218588006156,,MB,5298.737152,3798.925312,0.0,3382.706176,3158.448128,s,10,0.9427298507690429,0.09427298507690429,0.0004287602052404247,0.09424558258056641,0.0946105682373047,0.09491861114501954,0.09516504547119141,"[0.09522665405273438, 0.09449411010742187, 0.09451715087890625, 0.093910400390625, 0.0945421142578125, 0.09384623718261718, 0.09432064056396484, 0.09417052459716797, 0.09367011260986328, 0.09403190612792969]",tokens/s,2715.518128456047,kWh,2.76874553930794e-06,3.0534151131535305e-07,1.556185731320564e-06,4.630272781943857e-06,tokens/kWh,55288319.2969308,MB,5302.882304,3798.925312,0.0,3382.706176,3158.450688,s,10,56.98800439453125,5.698800439453126,0.015593749483295881,5.695649658203125,5.715228124999999,5.721299609375,5.726156796875,"[5.666302734375, 5.69577392578125, 5.70157568359375, 5.68772216796875, 5.695525390625, 5.72737109375, 5.71081103515625, 5.6939208984375, 5.69512255859375, 5.71387890625]",tokens/s,11.05495808623993,kWh,0.00016488129306569104,1.8186982917514692e-05,7.059048309708114e-05,0.0002536587590802869,tokens/kWh,248365.1667635082,,s,630,56.98545406341552,0.09045310168796115,0.0011146891581628154,0.09024007797241211,0.09134676818847656,0.09213751525878906,0.09454165992736817,"[0.08968605041503906, 0.08971260833740234, 0.08952556610107422, 0.08906617736816407, 0.08922316741943359, 0.08949964904785156, 0.08911430358886718, 0.09006463623046874, 0.09064096069335938, 0.08963890838623047, 0.08953651428222656, 0.09333964538574219, 0.09175859069824219, 0.08936038208007813, 0.08958566284179688, 0.0897804183959961, 0.08917817687988282, 0.08923545837402344, 0.08925567626953125, 0.0894527359008789, 0.08956813049316406, 0.08997369384765624, 0.0896626205444336, 0.0895005111694336, 0.09261017608642579, 0.08917375946044923, 0.09015974426269531, 0.09012822723388672, 0.0900240936279297, 0.08954265594482422, 0.08973513793945312, 0.08937065887451172, 0.08998422241210938, 0.09084188842773437, 0.08923689270019532, 0.08978316497802734, 0.08978112030029296, 0.08964752197265625, 0.09041260528564453, 0.09004940795898438, 0.08969833374023438, 0.09047036743164062, 0.08974736022949219, 0.08973731231689454, 0.08982342529296874, 0.09008927917480469, 0.09005875396728516, 0.08941964721679688, 0.08937305450439453, 0.090212158203125, 0.08958560180664063, 0.08950784301757812, 0.0895692138671875, 0.0903043212890625, 0.08969859313964844, 0.08989286041259766, 0.08940294647216797, 0.09046086120605469, 0.09075494384765626, 0.09033859252929688, 0.09165267181396484, 0.09034732818603515, 0.09006297302246094, 0.0917606430053711, 0.09150259399414062, 0.08984780883789062, 0.08934528350830079, 0.09013116455078125, 0.0895589141845703, 0.08977327728271485, 0.0900618896484375, 0.09017945861816407, 0.0914776611328125, 0.08999152374267579, 0.08980025482177735, 0.09080601501464844, 0.09085385894775391, 0.09026764678955078, 0.09011427307128907, 0.08976793670654297, 0.09000291442871093, 0.09255171203613281, 0.0908448028564453, 0.09116505432128906, 0.0898653106689453, 0.08953334045410156, 0.08950784301757812, 0.0895283203125, 0.09028975677490235, 0.09052191925048828, 0.08989641571044922, 0.08996927642822265, 0.09094758605957032, 0.0903024673461914, 0.08976902770996094, 0.0904151382446289, 0.08941603088378906, 0.09003065490722656, 0.08934809875488281, 0.08947718048095703, 0.09004825592041016, 0.08998521423339843, 0.08981913757324218, 0.09037551879882813, 0.09034732818603515, 0.08961945343017579, 0.08959715270996094, 0.08981910705566407, 0.09006147003173828, 0.09084681701660156, 0.09063670349121093, 0.09121382141113281, 0.09021990203857422, 0.09131072235107422, 0.09187091064453125, 0.09068576049804687, 0.0896552963256836, 0.09023385620117187, 0.09018057250976562, 0.09010179138183594, 0.09034870147705078, 0.09456111907958985, 0.09081241607666016, 0.0912916488647461, 0.09234636688232421, 0.09089024353027343, 0.0900060806274414, 0.08999571228027343, 0.08985382080078125, 0.09007308959960937, 0.08956928253173828, 0.09059097290039063, 0.09013273620605469, 0.09430016326904297, 0.0902715835571289, 0.09114844512939453, 0.0913583984375, 0.09006368255615234, 0.0898325424194336, 0.09005967712402344, 0.09075917053222657, 0.09047411346435547, 0.09083484649658204, 0.09061328125, 0.08987741088867188, 0.08961027526855468, 0.09022179412841796, 0.09050777435302734, 0.09155596923828126, 0.0913174057006836, 0.09060399627685548, 0.09087026977539063, 0.09222886657714843, 0.09193551635742188, 0.08987033843994141, 0.0908779525756836, 0.08987455749511719, 0.0898435821533203, 0.09076652526855469, 0.09061990356445312, 0.09025318145751954, 0.09061881256103516, 0.09070796966552734, 0.09055846405029297, 0.09007695770263671, 0.09031078338623047, 0.08999267578125, 0.09006511688232421, 0.09028031921386719, 0.09021392059326172, 0.0903741455078125, 0.08972544097900391, 0.08988057708740234, 0.09074073791503906, 0.0896674575805664, 0.09005305480957031, 0.0893655014038086, 0.08983622741699218, 0.08909190368652344, 0.08969209289550781, 0.0903662109375, 0.09327935791015625, 0.08970738983154297, 0.08980480194091797, 0.0894744644165039, 0.09334998321533203, 0.09082502746582032, 0.0904705581665039, 0.09199961853027344, 0.09024310302734374, 0.09082061004638672, 0.10126249694824219, 0.09058493041992187, 0.09043436431884766, 0.08972911834716797, 0.09009696197509766, 0.08971327972412109, 0.0893186264038086, 0.08988153839111328, 0.08981404876708984, 0.08971568298339844, 0.09490620422363281, 0.09056620788574218, 0.08984432220458985, 0.09010995483398437, 0.08984371185302735, 0.09000141143798829, 0.08936653137207032, 0.08981641387939453, 0.09067088317871094, 0.09157107543945313, 0.09022029113769531, 0.08970060729980468, 0.08928665924072265, 0.08964915466308594, 0.08943545532226563, 0.08986201477050781, 0.08951289367675781, 0.09022041320800782, 0.08995945739746093, 0.08963990020751954, 0.08993920135498047, 0.09027597045898438, 0.09034815979003906, 0.08998092651367187, 0.08941270446777344, 0.08972380828857422, 0.08946435546875, 0.08916835021972656, 0.09043968200683594, 0.09152921295166015, 0.08972697448730468, 0.0897798080444336, 0.08991942596435547, 0.09008175659179687, 0.08910438537597656, 0.08950099182128907, 0.08917043304443359, 0.09000160217285157, 0.09057443237304688, 0.08898397064208985, 0.08888044738769531, 0.08918428802490234, 0.09224691009521484, 0.08962847900390625, 0.0935874252319336, 0.09057075500488282, 0.09003008270263672, 0.08971068572998046, 0.08980636596679688, 0.08956352233886719, 0.091340576171875, 0.08974867248535157, 0.08952301025390624, 0.08948735809326172, 0.09013855743408203, 0.09082428741455079, 0.09040870666503906, 0.09026387023925782, 0.09010832214355469, 0.08950374603271484, 0.09034957122802735, 0.08947711944580078, 0.08999664306640626, 0.09027641296386718, 0.0911258544921875, 0.09056690979003906, 0.089746337890625, 0.09001455688476563, 0.08958975982666016, 0.08982112121582031, 0.09112950134277344, 0.09219261169433594, 0.09070012664794921, 0.09026563262939453, 0.08980883026123047, 0.09010173034667969, 0.0915235824584961, 0.09104156494140625, 0.09029341125488281, 0.08990396881103516, 0.09011808013916016, 0.08980646514892578, 0.09058963012695312, 0.09127043151855468, 0.09009225463867188, 0.0898677749633789, 0.08968179321289063, 0.09069222259521484, 0.0906424331665039, 0.09250816345214843, 0.09106022644042969, 0.09028928375244141, 0.09139699554443359, 0.09588057708740234, 0.09126783752441406, 0.09034265899658203, 0.08998489379882812, 0.09031478118896484, 0.09181651306152344, 0.0910716781616211, 0.09185993957519531, 0.09025865936279297, 0.0897380142211914, 0.09030592346191406, 0.08947161865234375, 0.08986150360107421, 0.09024143981933594, 0.09010368347167969, 0.09028854370117187, 0.09022991943359375, 0.08924034881591797, 0.08884630584716798, 0.0892541732788086, 0.08895622253417969, 0.09024716949462891, 0.09016115570068359, 0.09003948974609376, 0.09034835052490234, 0.09103075408935547, 0.0917154541015625, 0.09049180603027343, 0.08923308563232422, 0.09007135772705079, 0.09092915344238281, 0.0899399642944336, 0.09044786834716798, 0.09039670562744141, 0.09196137237548828, 0.09060150146484375, 0.09027574157714843, 0.09055232238769531, 0.09018982696533204, 0.09007465362548828, 0.09115814208984375, 0.09168982696533202, 0.0920025634765625, 0.09076505279541015, 0.0927674560546875, 0.09076348876953125, 0.09042348480224609, 0.09124697875976563, 0.09162957000732422, 0.09101110076904297, 0.09062601470947265, 0.09083699035644531, 0.0912384033203125, 0.09142179107666015, 0.09244560241699219, 0.09168057250976562, 0.09125619506835937, 0.09110201263427735, 0.0944940185546875, 0.09075782775878906, 0.09167667388916016, 0.09044377899169923, 0.0907874526977539, 0.09046041870117187, 0.09059728240966797, 0.09049520111083985, 0.09064393615722656, 0.0909927978515625, 0.09082099151611328, 0.0905129623413086, 0.0908272933959961, 0.09064873504638672, 0.09068310546875, 0.09120361328125, 0.09045782470703125, 0.09060995483398437, 0.09124409484863281, 0.09037049865722656, 0.09112322998046875, 0.09089469146728515, 0.09073177337646485, 0.09106317138671875, 0.09104793548583984, 0.09075711822509766, 0.09037404632568359, 0.08997628784179687, 0.09021459197998047, 0.09015078735351563, 0.09073222351074219, 0.09042134094238281, 0.09001811218261718, 0.09018319702148438, 0.09040108489990234, 0.09018025970458984, 0.09000281524658203, 0.09022147369384766, 0.09029385375976562, 0.09118259429931641, 0.09095024108886719, 0.09065062713623047, 0.09016457366943359, 0.09062179565429687, 0.091351806640625, 0.09075225830078125, 0.09233817291259766, 0.09012079620361328, 0.09081597137451172, 0.09030636596679688, 0.09145629119873047, 0.09058512115478516, 0.09080633544921875, 0.09123846435546876, 0.09120697784423829, 0.09497062683105469, 0.09103968048095704, 0.09424877166748047, 0.09023439788818359, 0.0907209243774414, 0.09041680145263672, 0.09036547088623047, 0.090823486328125, 0.08988617706298828, 0.09031938934326172, 0.08999321746826172, 0.09092649841308593, 0.09134674835205078, 0.09069033813476562, 0.08996665954589844, 0.09070585632324218, 0.09101900482177734, 0.0912938232421875, 0.09091852569580078, 0.09110572814941406, 0.09057491302490234, 0.09113753509521484, 0.09042380523681641, 0.09127760314941406, 0.09023040008544922, 0.08959394836425781, 0.08942189025878906, 0.09005449676513672, 0.08955654144287109, 0.0898620834350586, 0.0902039337158203, 0.09021727752685547, 0.08956313323974609, 0.08970982360839844, 0.08956422424316406, 0.08929539489746094, 0.08968572998046875, 0.0899959716796875, 0.09014067077636718, 0.09439437103271485, 0.09054962921142579, 0.08911526489257812, 0.08970457458496094, 0.08998067474365234, 0.08958812713623047, 0.08929251098632812, 0.08888102722167969, 0.08913318634033203, 0.08970649719238281, 0.09007718658447265, 0.08935964965820313, 0.08927510070800782, 0.08949350738525391, 0.09262659454345704, 0.08973283386230468, 0.09009011077880859, 0.08979046630859375, 0.08927964782714844, 0.08982745361328125, 0.09207017517089844, 0.09151737976074219, 0.09085104370117188, 0.09079420471191406, 0.0910623016357422, 0.09066684722900391, 0.09070406341552735, 0.0910561294555664, 0.09044898986816406, 0.09054246520996094, 0.09061634826660156, 0.09185266876220703, 0.09059033966064453, 0.09101824188232421, 0.09102950286865234, 0.0909024658203125, 0.09029049682617188, 0.08984703826904297, 0.09086160278320313, 0.09087606048583985, 0.09050511932373047, 0.09025766754150391, 0.09389004516601562, 0.09057286071777344, 0.09088265228271485, 0.09004236602783203, 0.08998297882080078, 0.08988262176513671, 0.09018163299560547, 0.09036905670166015, 0.09010594940185547, 0.08958380889892578, 0.08982189178466797, 0.09024915313720704, 0.09023903656005859, 0.09010755157470703, 0.09039715576171875, 0.09042655944824218, 0.0903597412109375, 0.0905291519165039, 0.0908642578125, 0.09082653045654297, 0.09008354949951172, 0.09030860900878906, 0.09258735656738282, 0.09081718444824219, 0.09034751892089844, 0.08987648010253907, 0.09021849822998047, 0.09050236511230468, 0.09045238494873047, 0.09238361358642579, 0.0899788818359375, 0.08943180847167968, 0.08983372497558594, 0.0905871353149414, 0.09008953857421875, 0.08999040222167969, 0.09061017608642578, 0.09012271881103516, 0.08994950103759766, 0.09043516540527344, 0.09052857971191407, 0.09009152221679688, 0.09001983642578125, 0.08976505279541015, 0.09081529235839844, 0.09115010833740235, 0.09067648315429687, 0.09013142395019531, 0.09074393463134765, 0.09027059173583984, 0.09088419342041015, 0.09019792175292969, 0.09005606079101562, 0.09010768127441406, 0.0898056640625, 0.09019391632080079, 0.09046825408935547, 0.0900506591796875, 0.0913469467163086, 0.08984780883789062, 0.09010176086425781, 0.09030598449707031, 0.09022054290771485, 0.0898115234375, 0.09005900573730469, 0.08998886108398438, 0.09042329406738281, 0.09055955505371094, 0.09308274841308593, 0.09062703704833984, 0.09028284454345703, 0.09005820465087891, 0.09012073516845703, 0.090355712890625, 0.09018367767333985, 0.08972605133056641, 0.09025833892822266, 0.08997273254394532, 0.09039600372314453, 0.09112528228759766, 0.09114380645751953, 0.09529183959960938, 0.09382867431640625, 0.09088086700439453, 0.08991305541992188, 0.08990544128417968, 0.08972697448730468, 0.08993587493896485, 0.09005792236328125, 0.09099308776855469, 0.0908139877319336, 0.09036067199707032, 0.09033113861083984, 0.09039974212646484, 0.09159353637695312, 0.09015030670166016, 0.09039513397216797, 0.09005903625488282, 0.09043968200683594, 0.09042329406738281, 0.09027174377441406, 0.0902739486694336, 0.09050492858886719, 0.09015625762939453, 0.09024111938476563, 0.09013945770263672, 0.08995571136474609, 0.08975222778320313, 0.0896839370727539, 0.08993382263183594, 0.0900505599975586, 0.09006412506103516, 0.09018450927734376, 0.09002492523193359, 0.0898338851928711, 0.09006956481933594, 0.09012633514404297, 0.09080774688720702, 0.09001407623291016, 0.09011148834228516, 0.09007791900634765, 0.0902221450805664, 0.09061116790771484, 0.09024928283691407, 0.09015113830566407, 0.09010198211669922, 0.08986873626708984, 0.09019187164306641, 0.09128345489501953, 0.09073458862304687, 0.09016320037841796, 0.09132646179199219, 0.09377184295654296, 0.09038841247558593, 0.0904089584350586, 0.09077760314941406, 0.090818115234375, 0.09026195526123047, 0.09037596893310547, 0.10164399719238282, 0.09004815673828125, 0.09018873596191407]",tokens/s,11.055452840630394,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,847.179776,565.116928,0.0,169.869312,150.669312,s,1,8.0183232421875,8.0183232421875,0.0,8.0183232421875,8.0183232421875,8.0183232421875,8.0183232421875,[8.0183232421875],,kWh,2.2487715045819335e-05,2.4729185654361377e-06,7.296950282009718e-06,3.225758389326519e-05,,MB,1175.339008,625.934336,0.0,209.7152,193.680384,s,10,0.17156870079040526,0.017156870079040525,7.150824827523619e-05,0.01715390396118164,0.017239720726013184,0.017247108173370364,0.017253018131256105,"[0.01707529640197754, 0.01713747215270996, 0.01722015953063965, 0.01717033576965332, 0.01709449577331543, 0.017076351165771483, 0.017069311141967775, 0.017232704162597656, 0.01725449562072754, 0.017238079071044923]",tokens/s,14921.136478893035,kWh,5.192801562426613e-07,5.726572567478513e-08,2.222930801672911e-07,7.988389620847376e-07,tokens/kWh,320465090.150228,MB,1209.30304,628.031488,0.0,211.812352,193.682944,s,10,10.300430114746094,1.0300430114746093,0.006095907972148767,1.0296256713867187,1.0404574340820314,1.0405339538574219,1.0405951696777345,"[1.0406104736328126, 1.0305194091796874, 1.031248779296875, 1.0312947998046875, 1.02873193359375, 1.02442626953125, 1.0207991333007813, 1.026128173828125, 1.026230712890625, 1.0404404296875]",tokens/s,61.16249447662308,kWh,3.014242029250486e-05,3.324213239412338e-06,1.139491065803112e-05,4.486154418994832e-05,tokens/kWh,1404320.8083353443,,s,630,10.294758337020868,0.016340886249239482,0.00038202871475142127,0.0162696475982666,0.016512060356140137,0.016652121543884277,0.01768288057327271,"[0.01618534469604492, 0.016166080474853517, 0.016149503707885742, 0.016114719390869142, 0.01619228744506836, 0.016207872390747072, 0.01624892807006836, 0.018083103179931642, 0.016312128067016603, 0.01613702392578125, 0.017709087371826172, 0.017514463424682616, 0.016442720413208007, 0.016272031784057617, 0.016226207733154297, 0.0162325439453125, 0.016644096374511717, 0.016863231658935548, 0.01980143928527832, 0.016806528091430663, 0.01628489685058594, 0.016421695709228516, 0.016300031661987305, 0.017071968078613282, 0.02018320083618164, 0.01657276725769043, 0.016338207244873046, 0.01640291213989258, 0.016250240325927735, 0.01624038314819336, 0.01614723205566406, 0.016293600082397462, 0.016119199752807616, 0.016180095672607423, 0.016130367279052735, 0.016138111114501953, 0.016115520477294924, 0.016146720886230467, 0.016512319564819335, 0.016150943756103514, 0.01622198486328125, 0.016152063369750978, 0.016155359268188476, 0.01617305564880371, 0.01645155143737793, 0.016265247344970705, 0.016174591064453125, 0.01626576042175293, 0.016316383361816407, 0.01628335952758789, 0.016334880828857423, 0.01644553565979004, 0.016427167892456053, 0.016374975204467773, 0.016865983963012695, 0.01635865592956543, 0.016429216384887695, 0.016366111755371095, 0.016320192337036132, 0.016338911056518555, 0.016310943603515624, 0.016329824447631838, 0.0163786563873291, 0.016369855880737305, 0.016556896209716798, 0.016360319137573243, 0.016352415084838867, 0.016384063720703126, 0.01628982353210449, 0.01639459228515625, 0.01628438377380371, 0.01651283264160156, 0.01642451286315918, 0.01626358413696289, 0.016348384857177736, 0.016224960327148437, 0.016277151107788088, 0.016281600952148437, 0.016294559478759765, 0.01630745506286621, 0.016321056365966796, 0.016300031661987305, 0.016297983169555663, 0.016281600952148437, 0.016360639572143554, 0.016257856369018556, 0.016383264541625978, 0.01644822311401367, 0.01618534469604492, 0.01628553581237793, 0.0161824951171875, 0.016234527587890624, 0.016337472915649413, 0.016326368331909178, 0.01670003128051758, 0.016451583862304688, 0.016297983169555663, 0.01624025535583496, 0.016447231292724608, 0.016296831130981446, 0.01626041603088379, 0.016308671951293947, 0.016345151901245115, 0.01626054382324219, 0.01649260711669922, 0.016347583770751954, 0.01622220802307129, 0.016229759216308592, 0.016321151733398438, 0.01640457534790039, 0.01632451248168945, 0.016283775329589845, 0.016342432022094726, 0.016592639923095703, 0.01630246353149414, 0.01678985595703125, 0.016316415786743164, 0.016611328125, 0.016271263122558593, 0.016186880111694335, 0.016463743209838868, 0.016263904571533202, 0.01625497627258301, 0.01649692726135254, 0.016396223068237306, 0.016293664932250977, 0.01622185516357422, 0.016336767196655274, 0.016427167892456053, 0.016573087692260742, 0.016338848114013673, 0.016439136505126954, 0.016547712326049804, 0.016216352462768556, 0.016318464279174806, 0.016238431930541992, 0.016175264358520507, 0.01622742462158203, 0.016391071319580078, 0.016252927780151367, 0.016465152740478516, 0.016281375885009764, 0.016202720642089843, 0.016199743270874024, 0.016248767852783202, 0.016275775909423827, 0.01639743995666504, 0.0163023681640625, 0.016212255477905273, 0.016302080154418946, 0.016334272384643553, 0.016247167587280273, 0.016364831924438477, 0.016400991439819337, 0.016254720687866212, 0.016195871353149413, 0.016240928649902345, 0.016285152435302735, 0.016331296920776367, 0.016349407196044923, 0.016222240447998047, 0.01631820869445801, 0.016408000946044922, 0.016411552429199217, 0.016581663131713865, 0.016503648757934572, 0.01638185691833496, 0.016334720611572266, 0.016254047393798828, 0.016377920150756835, 0.01625587272644043, 0.016383968353271484, 0.016351232528686522, 0.01652943992614746, 0.016960832595825197, 0.01637887954711914, 0.016471168518066407, 0.016738847732543947, 0.016405824661254884, 0.016618175506591795, 0.01637705612182617, 0.016276256561279297, 0.016441343307495117, 0.016547775268554686, 0.0163035831451416, 0.016311904907226563, 0.01628848075866699, 0.016369728088378905, 0.016294143676757813, 0.01622889518737793, 0.01638444709777832, 0.01615443229675293, 0.01644918441772461, 0.016158304214477538, 0.016326911926269533, 0.01628553581237793, 0.0162576961517334, 0.01618230438232422, 0.01628668785095215, 0.016408575057983397, 0.01651203155517578, 0.016327648162841796, 0.01641267204284668, 0.016319488525390623, 0.0162478084564209, 0.016284767150878905, 0.01656921577453613, 0.016326976776123048, 0.01628495979309082, 0.01626976013183594, 0.01643071937561035, 0.01627107238769531, 0.016261119842529297, 0.016369951248168944, 0.01626540756225586, 0.016197824478149415, 0.016213375091552733, 0.016300895690917968, 0.016260896682739258, 0.016415807723999025, 0.016270271301269533, 0.01637817573547363, 0.01616044807434082, 0.016236543655395508, 0.016257280349731444, 0.01657360076904297, 0.01695804786682129, 0.017618719100952147, 0.016459808349609376, 0.016363136291503905, 0.016319232940673827, 0.016245920181274413, 0.01641049575805664, 0.016759008407592774, 0.016460351943969727, 0.016451839447021485, 0.01661516761779785, 0.01637171173095703, 0.01626316833496094, 0.01627676773071289, 0.01622268867492676, 0.01619327926635742, 0.016243551254272463, 0.016339839935302733, 0.01631452751159668, 0.01623632049560547, 0.016347423553466797, 0.01624041557312012, 0.016257024765014647, 0.01633910369873047, 0.016319007873535157, 0.016533376693725586, 0.01663983917236328, 0.016226303100585936, 0.016250080108642578, 0.016251680374145507, 0.016216287612915038, 0.016285472869873047, 0.016359424591064452, 0.01637347221374512, 0.016285247802734375, 0.016345535278320313, 0.01629648017883301, 0.016327648162841796, 0.01624163246154785, 0.016225696563720703, 0.016302495956420898, 0.01626316833496094, 0.01621401596069336, 0.016658687591552736, 0.016342880249023438, 0.016347040176391603, 0.01639423942565918, 0.016302303314208986, 0.016248607635498048, 0.016268287658691406, 0.016326847076416014, 0.016255807876586915, 0.016236223220825196, 0.016261472702026367, 0.016246751785278322, 0.016236543655395508, 0.01627136039733887, 0.016257024765014647, 0.016190591812133788, 0.016354400634765624, 0.016437023162841798, 0.01659903907775879, 0.0164003849029541, 0.01637727928161621, 0.016308576583862304, 0.016316032409667967, 0.016280160903930665, 0.016293888092041017, 0.016680959701538087, 0.016367807388305664, 0.01631820869445801, 0.016369407653808593, 0.01630847930908203, 0.01624684715270996, 0.01617100715637207, 0.01627097511291504, 0.016310527801513673, 0.01631161689758301, 0.016333984375, 0.016303775787353515, 0.016200927734375, 0.01624880027770996, 0.016229183197021484, 0.016313472747802735, 0.01624073600769043, 0.01662553596496582, 0.01632758331298828, 0.01639833641052246, 0.016281024932861328, 0.016156320571899415, 0.016189504623413085, 0.016170719146728515, 0.016247360229492188, 0.016241952896118163, 0.016188127517700195, 0.0161976318359375, 0.016174463272094725, 0.01619568061828613, 0.016279104232788087, 0.01620867156982422, 0.016187423706054686, 0.01617475128173828, 0.016152959823608398, 0.01621824073791504, 0.01617100715637207, 0.01614556884765625, 0.016225120544433595, 0.016190719604492188, 0.01609721565246582, 0.016198463439941406, 0.01621785545349121, 0.01624684715270996, 0.01619513511657715, 0.016321184158325196, 0.01707721519470215, 0.016247135162353515, 0.01620035171508789, 0.01617417526245117, 0.01625107192993164, 0.016243104934692384, 0.0162491512298584, 0.01619705581665039, 0.01620012855529785, 0.01624281692504883, 0.016232255935668946, 0.0162674560546875, 0.016287967681884764, 0.016301311492919923, 0.016270143508911133, 0.016251840591430665, 0.016470975875854492, 0.016346719741821288, 0.01635968017578125, 0.01616806411743164, 0.016214176177978514, 0.01644822311401367, 0.01630953598022461, 0.016226720809936524, 0.016206144332885742, 0.016356767654418944, 0.016287391662597656, 0.016232927322387694, 0.016054752349853516, 0.016068607330322265, 0.016269535064697267, 0.016299808502197265, 0.016529184341430664, 0.016265439987182616, 0.01657241630554199, 0.016227743148803712, 0.016142719268798827, 0.01614473533630371, 0.016260927200317382, 0.01618934440612793, 0.016200576782226563, 0.016191488265991212, 0.01614028739929199, 0.01616486358642578, 0.016158720016479493, 0.016174463272094725, 0.016179840087890626, 0.01620582389831543, 0.016084224700927734, 0.016105247497558595, 0.01614886474609375, 0.016088863372802735, 0.0161146240234375, 0.016092063903808594, 0.01608803176879883, 0.01627136039733887, 0.016082944869995116, 0.016107616424560548, 0.016078752517700197, 0.01618124771118164, 0.016084991455078124, 0.01609219169616699, 0.016087263107299805, 0.016111711502075195, 0.016060319900512696, 0.01614300727844238, 0.01621206474304199, 0.016220224380493163, 0.016291296005249024, 0.016241119384765624, 0.016189599990844728, 0.01606844711303711, 0.01607587242126465, 0.016102432250976562, 0.016131168365478517, 0.016290687561035157, 0.01624025535583496, 0.01618060874938965, 0.016243616104125978, 0.016089088439941408, 0.016281471252441407, 0.016117984771728516, 0.016254880905151366, 0.01622547149658203, 0.016239423751831055, 0.016184736251831054, 0.01637424087524414, 0.0161342716217041, 0.016227392196655272, 0.016202943801879883, 0.016323328018188477, 0.01640755271911621, 0.016685056686401366, 0.01622425651550293, 0.01620297622680664, 0.01652751922607422, 0.016227167129516603, 0.016176607131958006, 0.01622047996520996, 0.016213375091552733, 0.01630681610107422, 0.016234464645385742, 0.016236352920532226, 0.016271039962768553, 0.016149023056030273, 0.016213695526123048, 0.016197376251220703, 0.016185279846191405, 0.01617519950866699, 0.016189184188842774, 0.016177663803100584, 0.01618092727661133, 0.016149152755737306, 0.01615056037902832, 0.016262208938598633, 0.01662031936645508, 0.01751862335205078, 0.016235679626464845, 0.01617318344116211, 0.016212736129760742, 0.016240543365478515, 0.01620800018310547, 0.016256864547729493, 0.01618751907348633, 0.016209535598754883, 0.01617958450317383, 0.016121856689453123, 0.016230016708374023, 0.01618556785583496, 0.016197792053222658, 0.016544895172119142, 0.016241695404052733, 0.01619651222229004, 0.016208192825317384, 0.016212480545043945, 0.01652284812927246, 0.016308095932006834, 0.016402624130249024, 0.016492639541625977, 0.01631702423095703, 0.01627836799621582, 0.01612019157409668, 0.01611030387878418, 0.016080223083496093, 0.01612953567504883, 0.016309087753295898, 0.01618937683105469, 0.016158336639404296, 0.016204383850097655, 0.01619273567199707, 0.016274175643920898, 0.016408607482910155, 0.01649260711669922, 0.01627849578857422, 0.016237823486328126, 0.016412256240844726, 0.016193824768066405, 0.016272415161132814, 0.01652332878112793, 0.016394527435302734, 0.016310047149658204, 0.01629484748840332, 0.016317472457885743, 0.016384672164916993, 0.016266656875610352, 0.01635193634033203, 0.016207775115966796, 0.016224384307861328, 0.01623632049560547, 0.016312416076660157, 0.016416767120361327, 0.01638297653198242, 0.016446720123291014, 0.01637715148925781, 0.01624947166442871, 0.016199487686157227, 0.016142335891723633, 0.01615667152404785, 0.016207775115966796, 0.01624073600769043, 0.016214176177978514, 0.016155935287475585, 0.016268863677978515, 0.016169376373291015, 0.0161779842376709, 0.016158239364624023, 0.016234752655029296, 0.016214303970336914, 0.016172767639160156, 0.016172767639160156, 0.016196128845214843, 0.01624393653869629, 0.016200096130371093, 0.01616499137878418, 0.016134143829345703, 0.016231456756591798, 0.016488607406616212, 0.01621036720275879, 0.016297567367553712, 0.016229408264160156, 0.016197376251220703, 0.016141792297363282, 0.016138784408569334, 0.016136192321777345, 0.016191488265991212, 0.016404096603393554, 0.016169343948364258, 0.01628758430480957, 0.01632476806640625, 0.016211231231689452, 0.016173791885375977, 0.016760128021240234, 0.01633350372314453, 0.01641046333312988, 0.016212255477905273, 0.016289663314819337, 0.01669728088378906, 0.01695737648010254, 0.0167589111328125, 0.01629203224182129, 0.016258880615234374, 0.016392223358154295, 0.016162784576416015, 0.01616646385192871, 0.016199840545654296, 0.01620368003845215, 0.016402816772460936, 0.01631395149230957, 0.01631920051574707, 0.01624678421020508, 0.016248832702636717, 0.016178943634033202, 0.016225536346435546, 0.016251039505004884, 0.01621283149719238, 0.016257087707519532, 0.016305280685424806, 0.016239423751831055, 0.016349184036254884, 0.016510463714599608, 0.016834720611572266, 0.017181184768676756, 0.01657859230041504, 0.01641379165649414, 0.016361440658569336, 0.01625164794921875, 0.01623859214782715, 0.016281600952148437, 0.016289024353027343, 0.016282367706298827, 0.016230655670166017, 0.016187135696411132, 0.01619254493713379, 0.01618409538269043, 0.016227712631225587, 0.016533855438232423, 0.016312128067016603, 0.016255647659301757, 0.016242528915405275, 0.016251039505004884, 0.016219135284423827, 0.01634627151489258, 0.016239744186401367, 0.016214847564697266, 0.016195327758789062, 0.016296096801757812, 0.01627891159057617, 0.01627814483642578, 0.016146432876586913, 0.01617296028137207, 0.016127967834472658, 0.0161661434173584, 0.01616896057128906, 0.01642585563659668, 0.016256671905517578, 0.016371328353881835, 0.016536319732666015, 0.016377824783325196, 0.016369760513305662, 0.01719081687927246, 0.020703424453735353, 0.020321727752685547, 0.01634284782409668, 0.01621664047241211, 0.01658857536315918, 0.016308095932006834, 0.016333152770996093, 0.016193599700927735, 0.01853228759765625, 0.016973695755004882]",tokens/s,61.19619124369957,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,,MB,5185.974272,3489.529856,0.0,3154.116608,3134.091264,s,1,14.7072783203125,14.7072783203125,0.0,14.7072783203125,14.7072783203125,14.7072783203125,14.7072783203125,[14.7072783203125],,kWh,0.0002320459351583774,2.5589196607052807e-05,7.655783902399405e-05,0.00033419297078942425,,MB,1608.265728,3978.166272,0.0,3554.67264,3441.951744,s,10,0.7850069046020508,0.07850069046020507,0.0005437717370781247,0.07828460693359375,0.07924076461791993,0.07940331077575684,0.07953334770202637,"[0.07806924438476562, 0.07801078033447266, 0.07835507202148438, 0.07790383911132813, 0.07805830383300781, 0.07920464324951172, 0.07897200012207031, 0.07821414184570312, 0.07865302276611329, 0.07956585693359375]",tokens/s,3261.117813094599,kWh,2.295875382682233e-06,2.5319507537064435e-07,1.3848643891406671e-06,3.933934847193544e-06,tokens/kWh,65074794.053244054,MB,1627.451392,3980.263424,0.0,3554.67264,3441.954304,s,10,45.91089501953125,4.5910895019531255,0.0215892227340839,4.585295898437501,4.6197435058593745,4.625288745117188,4.629724936523438,"[4.57457958984375, 4.57050634765625, 4.58025439453125, 4.5662119140625, 4.56919775390625, 4.59033740234375, 4.608150390625, 4.60231201171875, 4.630833984375, 4.61851123046875]",tokens/s,13.72223302839092,kWh,0.00013448114285648173,1.4833532377840734e-05,5.6938265602659e-05,0.0002062529408369815,tokens/kWh,305450.1901613807,,s,630,45.90886572265627,0.07287121543278768,0.0010165101681173213,0.07260729598999024,0.07387986907958985,0.07453413429260253,0.07699828987121582,"[0.07273471832275391, 0.07238451385498047, 0.07271202850341797, 0.07243318176269531, 0.07272921752929687, 0.07242269134521484, 0.072200927734375, 0.07236402893066406, 0.07208550262451172, 0.07206092834472656, 0.07321395111083985, 0.07221437072753906, 0.07237353515625, 0.07264755249023437, 0.07248281860351563, 0.07226956939697265, 0.07263359832763672, 0.07240806579589844, 0.07285350036621094, 0.07256473541259766, 0.07216537475585938, 0.07255782318115235, 0.07222962951660156, 0.07242752075195312, 0.07213465881347657, 0.07211385345458984, 0.07212268829345703, 0.07281664276123047, 0.07240704345703125, 0.07233468627929687, 0.07311011505126953, 0.07257504272460938, 0.07254220581054688, 0.07256256103515625, 0.07238224029541016, 0.07307014465332032, 0.07231932830810547, 0.072059326171875, 0.07228211212158203, 0.07236918640136719, 0.07240748596191406, 0.07254889678955079, 0.07233952331542969, 0.07238636779785156, 0.07463862609863281, 0.07282064056396484, 0.0730654067993164, 0.07244096374511719, 0.07222972869873047, 0.07258934020996094, 0.07237836456298828, 0.07251763153076173, 0.07260160064697266, 0.07387497711181641, 0.0735769271850586, 0.073034912109375, 0.07301407623291016, 0.07316441345214844, 0.07328745269775391, 0.0729217300415039, 0.07280780792236329, 0.0725960922241211, 0.07277362823486327, 0.07218544006347656, 0.07249756622314453, 0.07221453094482422, 0.07221459197998047, 0.07221855926513672, 0.0722903060913086, 0.07204454040527344, 0.07272038269042969, 0.07273471832275391, 0.07388159942626953, 0.07239794921875, 0.07231132507324219, 0.07254589080810547, 0.0723770523071289, 0.07243778991699219, 0.07222681427001953, 0.0723763198852539, 0.0720711669921875, 0.07217676544189452, 0.07206182098388672, 0.0719700164794922, 0.07268374633789063, 0.07280083465576172, 0.07233455657958984, 0.07248976135253907, 0.0722677764892578, 0.07250688171386718, 0.07229081726074219, 0.07243981170654297, 0.07210594940185547, 0.0720814437866211, 0.07222886657714844, 0.07227938842773438, 0.072321533203125, 0.07212662506103516, 0.07291209411621094, 0.07259420776367187, 0.07338579559326172, 0.07257907104492188, 0.07228025817871093, 0.07587225341796874, 0.07315238189697265, 0.07226998138427734, 0.07209974670410156, 0.07218348693847657, 0.07219881439208985, 0.07213231658935547, 0.0721244125366211, 0.07222796630859375, 0.07245081329345703, 0.07276966094970704, 0.07411917114257813, 0.07396966552734376, 0.07326310729980469, 0.07208454132080078, 0.0720877456665039, 0.07219245147705078, 0.07191567993164062, 0.07208975982666016, 0.0720404510498047, 0.07536640167236328, 0.07247666931152344, 0.07254124450683594, 0.07346800231933594, 0.07231565093994141, 0.07204595184326172, 0.0722192611694336, 0.07218182373046875, 0.07218169403076172, 0.07233126068115234, 0.07233740997314453, 0.0723490219116211, 0.07240672302246094, 0.07250633239746093, 0.0725401611328125, 0.07249449920654297, 0.0727394256591797, 0.07321804809570312, 0.0725538558959961, 0.07256272125244141, 0.07292281341552734, 0.07266738891601562, 0.07405225372314453, 0.07278368377685547, 0.0726447982788086, 0.07244786834716797, 0.07252595520019531, 0.07241932678222657, 0.07255859375, 0.07254630279541016, 0.07297433471679687, 0.07370751953125, 0.072844482421875, 0.07272531127929688, 0.07266918182373047, 0.07262617492675781, 0.07280435180664062, 0.07265798187255859, 0.0725656967163086, 0.07257196807861328, 0.0725618896484375, 0.07266480255126953, 0.07252582550048828, 0.07253196716308594, 0.07277145385742187, 0.07279132843017579, 0.07265708923339843, 0.07254854583740235, 0.07260636901855469, 0.07309907531738281, 0.07244348907470703, 0.07270646667480468, 0.07258930969238281, 0.07272614288330079, 0.07247705841064453, 0.072595458984375, 0.07264236450195312, 0.07569538879394531, 0.07357430267333985, 0.07268863677978515, 0.07277315521240234, 0.07274505615234375, 0.07248115539550781, 0.07240937805175782, 0.07228739166259765, 0.0722927703857422, 0.07253139495849609, 0.07225218963623047, 0.07241741180419922, 0.07232717132568359, 0.07245436859130859, 0.07266223907470704, 0.07236665344238281, 0.07387967681884766, 0.07554239654541016, 0.07275910186767579, 0.07223117065429688, 0.0720343017578125, 0.07204857635498046, 0.07203135681152344, 0.07196351623535156, 0.0720749740600586, 0.07209398651123047, 0.07217129516601563, 0.07293312072753906, 0.07270614624023437, 0.07198553466796875, 0.07249858856201172, 0.07218816375732422, 0.07192816162109375, 0.07203369903564454, 0.07208940887451172, 0.07213539123535156, 0.072168701171875, 0.07221526336669921, 0.07209699249267579, 0.0721355209350586, 0.07231407928466797, 0.07282755279541016, 0.07280780792236329, 0.07221529388427735, 0.07235587310791015, 0.07227999877929688, 0.07205216217041016, 0.07210377502441406, 0.07205967712402343, 0.07200534057617188, 0.07206867218017578, 0.07203011322021484, 0.07218057250976563, 0.07221040344238282, 0.07222659301757813, 0.07278819274902344, 0.07262413024902344, 0.07224729919433594, 0.07223705291748046, 0.07659724426269532, 0.07255449676513671, 0.07237602996826172, 0.07232454681396484, 0.0721944351196289, 0.07256111907958984, 0.07210189056396485, 0.07225958251953125, 0.07231820678710937, 0.07207990264892578, 0.07209529876708984, 0.07342147064208984, 0.07454515075683593, 0.07734886169433594, 0.0723966064453125, 0.07206703948974609, 0.0720321273803711, 0.072171875, 0.07478067016601563, 0.07299072265625, 0.07296758270263672, 0.07246288299560547, 0.072499267578125, 0.07312297821044922, 0.07370838165283203, 0.07221071624755859, 0.07222771453857423, 0.07223280334472656, 0.07261901092529296, 0.07206502532958985, 0.07212236785888672, 0.07200697326660156, 0.07208386993408203, 0.07202960205078125, 0.07224127960205078, 0.07199616241455079, 0.07210214233398438, 0.07258905792236328, 0.07232915496826171, 0.07286927795410156, 0.07219193267822266, 0.07230332946777343, 0.07380588531494141, 0.0721418228149414, 0.07210211181640624, 0.07217955017089844, 0.07212940979003907, 0.07194406127929688, 0.07228803253173828, 0.07192201232910156, 0.07273471832275391, 0.0726320037841797, 0.07248108673095703, 0.0720091552734375, 0.07240966033935547, 0.07220207977294922, 0.07271971130371094, 0.07195712280273438, 0.07208294677734375, 0.0720145263671875, 0.07213638305664062, 0.07233106994628906, 0.07198361968994141, 0.07262207794189453, 0.07260591888427734, 0.07253961944580078, 0.07237664031982421, 0.07262345886230469, 0.07243433380126953, 0.07212665557861328, 0.07328749084472656, 0.07310470581054687, 0.07266963195800781, 0.07228543853759765, 0.07259993743896484, 0.07275788879394532, 0.07325475311279298, 0.0745208282470703, 0.07332249450683594, 0.0732768325805664, 0.07269865417480469, 0.07248467254638671, 0.07221862030029297, 0.07240809631347657, 0.07293609619140624, 0.07208582305908204, 0.07203536224365234, 0.07224755096435546, 0.0721824951171875, 0.0720440673828125, 0.07217359924316406, 0.07213510131835937, 0.07221833801269531, 0.07212060546875, 0.0722677764892578, 0.07229849243164063, 0.07216706848144532, 0.07237052917480469, 0.07268147277832031, 0.07263231658935547, 0.07205052947998047, 0.0720665283203125, 0.07210054779052734, 0.07216912078857422, 0.07248429107666016, 0.07222281646728515, 0.07218179321289063, 0.07210806274414062, 0.07212518310546875, 0.07195442962646484, 0.07208755493164062, 0.0726976318359375, 0.08179325103759766, 0.0728248291015625, 0.07266918182373047, 0.07258112335205077, 0.07223296356201171, 0.07229865264892578, 0.07225532531738281, 0.07232534027099609, 0.07265980529785156, 0.07228921508789063, 0.07244735717773437, 0.07231494140625, 0.07217830657958985, 0.07222675323486329, 0.07684700775146484, 0.07268099212646484, 0.0723359375, 0.07256166076660156, 0.07353139495849609, 0.0737015380859375, 0.07422431945800781, 0.07366659545898438, 0.07474153900146484, 0.07550396728515625, 0.07412057495117187, 0.07310995483398437, 0.07298057556152343, 0.07669344329833984, 0.07307324981689453, 0.07318742370605469, 0.0731176986694336, 0.07297180938720703, 0.07314838409423828, 0.07255091094970703, 0.08109420776367188, 0.07262000274658204, 0.07238499450683594, 0.07244390106201172, 0.07219609832763672, 0.07207078552246093, 0.07236029052734375, 0.0774697265625, 0.07307469177246094, 0.07267692565917969, 0.07242591857910156, 0.07232300567626954, 0.07425030517578125, 0.0759312973022461, 0.07293987274169922, 0.07292086029052734, 0.07257315063476563, 0.07219171142578125, 0.07296233367919921, 0.07220838165283203, 0.07258643341064454, 0.07686239624023437, 0.07263177490234375, 0.07325328063964844, 0.07226367950439454, 0.0722229461669922, 0.07275087738037109, 0.0724268798828125, 0.0725059814453125, 0.07258930969238281, 0.07249945831298828, 0.07326080322265625, 0.07213187408447265, 0.07222550201416016, 0.0721981430053711, 0.07692256164550781, 0.07261830139160157, 0.07260160064697266, 0.07249075317382812, 0.0729459228515625, 0.07309926605224609, 0.0724664306640625, 0.07252992248535156, 0.07254835510253907, 0.07229849243164063, 0.07225878143310546, 0.07228240203857422, 0.07238095855712891, 0.07240451049804687, 0.07693526458740234, 0.07248521423339843, 0.07259465789794922, 0.0724399642944336, 0.072487548828125, 0.07247872161865235, 0.07240499114990234, 0.07254790496826172, 0.0725283203125, 0.072770751953125, 0.07278205108642578, 0.07284591674804687, 0.0725761947631836, 0.07702403259277343, 0.07291088104248047, 0.07645798492431641, 0.07311763000488282, 0.07306966400146485, 0.07284630584716797, 0.07276544189453125, 0.07276886749267578, 0.07262665557861328, 0.07258665466308593, 0.07253206634521485, 0.07250975799560547, 0.07298268890380859, 0.07269193267822266, 0.07749177551269532, 0.07315449523925781, 0.07299088287353515, 0.0726255340576172, 0.07265583801269532, 0.07272444915771484, 0.07300303649902344, 0.07289043426513672, 0.07280397033691406, 0.07283334350585938, 0.07263436889648438, 0.07266860961914062, 0.0731244125366211, 0.07282262420654297, 0.07727142333984376, 0.07310928344726562, 0.07276863861083985, 0.07329465484619141, 0.07263852691650391, 0.07285664367675782, 0.07285030364990235, 0.07347110748291015, 0.07268592071533203, 0.07249295806884766, 0.07294022369384766, 0.07270988464355468, 0.0724933090209961, 0.07247014617919922, 0.07271174621582031, 0.07277855682373047, 0.07250450897216797, 0.07260822296142579, 0.07259171295166016, 0.07282182312011719, 0.07294048309326172, 0.07264460754394532, 0.07245823669433593, 0.07274905395507812, 0.07310902404785156, 0.07269149017333984, 0.07260435485839843, 0.07266508483886719, 0.07280796813964843, 0.07426927947998047, 0.07313638305664062, 0.07271398162841797, 0.07396320343017578, 0.0750447998046875, 0.07305868530273438, 0.07262822723388672, 0.07269891357421875, 0.07294579315185547, 0.07317740631103516, 0.072921630859375, 0.07299443054199219, 0.07309919738769531, 0.07317091369628906, 0.07291529846191407, 0.07277200317382812, 0.07536815643310547, 0.07336316680908203, 0.07346419525146484, 0.07275302124023438, 0.07272860717773437, 0.07276338958740235, 0.07279785919189453, 0.07664054107666016, 0.07359033966064453, 0.07341426849365235, 0.07308370971679687, 0.07356217956542968, 0.07340793609619141, 0.07308345794677734, 0.07297968292236329, 0.07339087677001953, 0.07302758026123046, 0.07272857666015625, 0.0728136978149414, 0.07291788482666016, 0.07306854248046875, 0.0728348159790039, 0.07281238555908204, 0.0731320037841797, 0.0730804443359375, 0.0734400634765625, 0.07318073272705078, 0.0730631332397461, 0.07397673797607422, 0.07447225952148437, 0.07417167663574219, 0.07465033721923828, 0.07386217498779298, 0.07384512329101563, 0.07427133178710937, 0.07383245086669922, 0.07403314971923829, 0.07358783721923828, 0.07390707397460937, 0.07451238250732421, 0.0737791976928711, 0.07401471710205078, 0.07417603302001953, 0.07392918395996094, 0.07387667083740235, 0.07399075317382812, 0.07371993255615235, 0.07376895904541016, 0.07391436767578125, 0.07391846466064453, 0.07339622497558594, 0.07378125, 0.0745450210571289, 0.07386895751953125, 0.07367740631103516, 0.07432694244384766, 0.07379043579101563, 0.0738344955444336, 0.07383039855957031, 0.07391779327392578, 0.07393714904785156, 0.07514153289794921, 0.07407001495361328, 0.07511030578613281, 0.07403897857666016, 0.07460086059570313, 0.07406182098388672, 0.0735959014892578, 0.07358688354492188, 0.07331417846679687, 0.072723388671875, 0.07262822723388672, 0.07284326171875, 0.07260160064697266, 0.07266722869873046, 0.07279974365234375, 0.0728819808959961, 0.07279001617431641, 0.07308963012695313, 0.07293145751953126, 0.07450150299072265, 0.0737916488647461, 0.07339862060546876, 0.07294761657714843, 0.07314217376708984, 0.07283116912841797, 0.07258905792236328, 0.07278361511230469, 0.07297074890136719, 0.07281635284423828, 0.07283740997314453, 0.07272211456298829, 0.073906494140625, 0.07292928314208984, 0.07315158081054687, 0.07276624298095703, 0.07312924957275391, 0.07382246398925782, 0.07278857421875, 0.07270713806152344, 0.0727004165649414, 0.072612060546875, 0.07263196563720703, 0.0725182113647461, 0.07278099060058593, 0.07235871887207031, 0.07257315063476563, 0.07233670043945313, 0.07246281433105468, 0.07281254577636719]",tokens/s,13.722839588456486,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,,MB,19134.058496,9961.340928,0.0,9558.818816,9558.429696,s,1,35.77979296875,35.77979296875,0.0,35.77979296875,35.77979296875,35.77979296875,35.77979296875,[35.77979296875],,kWh,0.0008314349593958165,9.170635761100147e-05,0.00027922161226601183,0.00120236292927283,,MB,6382.575616,10435.29728,0.0,10011.803648,9855.258624,s,10,1.448365203857422,0.14483652038574218,0.00018854884342875975,0.14476094818115234,0.1450369613647461,0.14517534103393556,0.1452860447692871,"[0.144791015625, 0.144706298828125, 0.145313720703125, 0.1448931884765625, 0.1448450164794922, 0.1447308807373047, 0.14471974182128905, 0.14464341735839845, 0.14500621032714844, 0.14471571350097656]",tokens/s,1767.5100127937128,kWh,4.29660710294104e-06,4.738450651187801e-07,2.864307846999824e-06,7.634760015059645e-06,tokens/kWh,33530850.936380092,MB,6386.671616,10439.491584,0.0,10013.9008,9856.30976,s,10,71.035375,7.1035375,0.046295802192115765,7.09837841796875,7.159966748046875,7.187990454101563,7.2104094189453125,"[7.21601416015625, 7.1537392578125, 7.10193701171875, 7.08136767578125, 7.06984423828125, 7.09841845703125, 7.104013671875, 7.09833837890625, 7.0519111328125, 7.059791015625]",tokens/s,8.868820640420354,kWh,0.00020666585492788773,2.2795653785660546e-05,0.00010628794614140118,0.0003357494548549495,tokens/kWh,187639.91747125032,,s,630,71.0328540420532,0.11275056197151304,0.0012117652550990936,0.11238499069213867,0.11436554946899413,0.11479696464538575,0.1169697681427002,"[0.11351168060302734, 0.11427289581298829, 0.11692646026611328, 0.1151890869140625, 0.11466614532470704, 0.11456690979003906, 0.11455622100830078, 0.11459270477294922, 0.1144873275756836, 0.114359619140625, 0.11438259124755859, 0.11426294708251954, 0.1145519027709961, 0.11436534118652343, 0.11475968170166016, 0.11463410949707031, 0.11442412567138673, 0.11440348815917968, 0.11433385467529297, 0.11417804718017578, 0.1152143325805664, 0.11401187133789062, 0.11389981079101562, 0.11480233764648437, 0.11433312225341796, 0.11423808288574219, 0.1143524169921875, 0.11410022735595703, 0.11416575622558593, 0.11416985321044922, 0.11340140533447265, 0.11424198150634765, 0.11395686340332031, 0.11525062561035156, 0.11393606567382812, 0.11406015777587891, 0.11387830352783203, 0.11453513336181641, 0.11479039764404297, 0.11456102752685547, 0.11698745727539063, 0.11494445037841797, 0.11475897979736328, 0.11455558776855469, 0.11435343933105468, 0.11398172760009766, 0.11446115112304688, 0.11431318664550781, 0.11374732971191406, 0.11433843231201171, 0.11428572845458984, 0.114432861328125, 0.11585478210449218, 0.11460665893554688, 0.1141673583984375, 0.11471036529541015, 0.1144450912475586, 0.11782637023925781, 0.11455385589599609, 0.11435622406005859, 0.11413299560546875, 0.11428659057617188, 0.11434297943115235, 0.11510591888427735, 0.11441036987304687, 0.11376627349853516, 0.11895359802246094, 0.11389981079101562, 0.11355363464355468, 0.11361078643798828, 0.11446473693847656, 0.11477161407470703, 0.11457571411132812, 0.11394976043701172, 0.11386736297607422, 0.11388365173339844, 0.1138493423461914, 0.11442262268066407, 0.11435561370849609, 0.11356163024902344, 0.11432198333740234, 0.11397484588623047, 0.11397305297851562, 0.11356559753417969, 0.11386134338378906, 0.11417362976074219, 0.11378924560546876, 0.1139111328125, 0.11604777526855468, 0.11435289764404297, 0.11370086669921875, 0.1136394271850586, 0.11431116485595703, 0.1129504623413086, 0.11375635528564453, 0.11356358337402343, 0.11399433898925782, 0.11374781036376953, 0.11319522857666016, 0.112816162109375, 0.11275459289550781, 0.11365526580810546, 0.11291097259521485, 0.11367424011230469, 0.11418009948730469, 0.11399577331542969, 0.11400396728515624, 0.11339981079101563, 0.11396297454833984, 0.11312950134277344, 0.11191609954833984, 0.11155347442626953, 0.11277276611328126, 0.11332387542724609, 0.11229235076904297, 0.11223359680175782, 0.11205414581298828, 0.11161913299560547, 0.11208819580078125, 0.11246889495849609, 0.11265532684326172, 0.11196262359619141, 0.11194322967529297, 0.11145919799804688, 0.11290118408203124, 0.11189075469970704, 0.11306259155273438, 0.11259305572509766, 0.1116610565185547, 0.11171990203857422, 0.11139244842529297, 0.11183395385742187, 0.11155622100830079, 0.11261990356445313, 0.1124352035522461, 0.11214399719238281, 0.11249088287353516, 0.11225936126708984, 0.11283222198486328, 0.11193344116210938, 0.11224883270263672, 0.11389049530029297, 0.11306476593017578, 0.11206451416015625, 0.11264118194580078, 0.11351232147216797, 0.11317552185058594, 0.11269939422607422, 0.11282841491699219, 0.11272172546386719, 0.11229996490478515, 0.11228185272216797, 0.11176345825195312, 0.11485161590576172, 0.1129760971069336, 0.11198812866210937, 0.11198729705810546, 0.11286691284179687, 0.11528956604003907, 0.11436742401123047, 0.11367644500732423, 0.11257574462890625, 0.11249110412597656, 0.11299008178710937, 0.11242742156982422, 0.11187171173095703, 0.11191433715820312, 0.11187677001953125, 0.11387289428710938, 0.11326464080810547, 0.11295977783203125, 0.11240214538574218, 0.11212799835205078, 0.11205632019042969, 0.11238127899169922, 0.11561231994628907, 0.11346070098876954, 0.11333277130126954, 0.11317453002929688, 0.11253782653808594, 0.1123182373046875, 0.1134940185546875, 0.1138153305053711, 0.11227568054199219, 0.11244544219970704, 0.11185523223876953, 0.11481676483154298, 0.111939453125, 0.11166381072998047, 0.11286985778808593, 0.11229011535644531, 0.11177286529541015, 0.11274323272705078, 0.11198464202880859, 0.11192115020751953, 0.11158262634277344, 0.11238050842285156, 0.11210115051269531, 0.11212617492675782, 0.11193753814697266, 0.11243507385253906, 0.11270089721679688, 0.11231436920166016, 0.11238262176513672, 0.11205017852783203, 0.11203174591064453, 0.11186585235595703, 0.11177574157714844, 0.11201292419433594, 0.11170403289794922, 0.11230214691162109, 0.1124191665649414, 0.11273538970947265, 0.11556655883789063, 0.11206095886230469, 0.11199497222900391, 0.11215424346923829, 0.11201395416259766, 0.11183309173583984, 0.11136835479736328, 0.111882080078125, 0.11172659301757812, 0.1120890884399414, 0.11191705322265624, 0.11221596527099609, 0.11420412445068359, 0.11250342559814454, 0.11190444946289063, 0.11236297607421875, 0.11297468566894531, 0.11231635284423828, 0.11226233673095704, 0.11223129272460937, 0.11296358489990234, 0.11250409698486329, 0.11224752044677734, 0.1122529296875, 0.11219725036621093, 0.11202803039550781, 0.11465727996826172, 0.1122031021118164, 0.11616275024414062, 0.1119748764038086, 0.11162777709960937, 0.11198105621337891, 0.1116710433959961, 0.11256025695800781, 0.11220387268066406, 0.11231644439697265, 0.11359846496582031, 0.11286118316650391, 0.11304937744140625, 0.112648193359375, 0.11254563140869141, 0.11200732421875, 0.1124659194946289, 0.11201945495605468, 0.11232784271240234, 0.11371571350097656, 0.114429443359375, 0.11306479644775391, 0.11639305877685546, 0.11420559692382813, 0.11213414764404298, 0.11172454071044922, 0.11170598602294922, 0.11256022644042969, 0.11247551727294922, 0.11240243530273437, 0.11179689788818359, 0.111857666015625, 0.11188224029541016, 0.11167948913574219, 0.111672607421875, 0.11143631744384766, 0.11162847900390625, 0.11212419128417969, 0.11207756805419922, 0.1113253402709961, 0.11225907135009766, 0.11318764495849609, 0.11297577667236328, 0.11293910217285157, 0.11318476867675781, 0.11290726470947265, 0.1139947509765625, 0.11271987152099609, 0.11098668670654296, 0.11106156921386719, 0.11159552001953126, 0.1108828125, 0.11175059509277344, 0.11199346923828125, 0.11147042846679688, 0.11212928009033203, 0.11180707550048828, 0.11105497741699219, 0.11113423919677734, 0.11170582580566406, 0.11195072174072265, 0.11374591827392579, 0.11201945495605468, 0.1114439697265625, 0.11215257263183594, 0.11200438690185546, 0.11150144195556641, 0.11169647979736329, 0.11177327728271484, 0.11085596466064453, 0.11125004577636718, 0.11140435028076172, 0.11078112030029297, 0.11131494140625, 0.11556454467773437, 0.11209932708740235, 0.11570038604736328, 0.11238809967041016, 0.11162614440917969, 0.11227474975585937, 0.11167005157470702, 0.11188838195800781, 0.11157263946533204, 0.11239868927001953, 0.11188349151611328, 0.11220252990722657, 0.111710205078125, 0.11165491485595704, 0.11167334747314453, 0.11628134155273437, 0.1132176284790039, 0.11257027435302734, 0.11178128051757813, 0.11243692779541016, 0.11194870758056641, 0.11199823760986329, 0.11189097595214843, 0.11168787384033203, 0.11300454711914062, 0.11337059020996093, 0.11351094055175781, 0.1128427505493164, 0.11204812622070312, 0.11287567901611328, 0.11247763061523437, 0.11199324798583984, 0.1119662094116211, 0.11257977294921875, 0.11332243347167968, 0.11312748718261718, 0.11550476837158204, 0.11326729583740235, 0.11217724609375, 0.11197849273681641, 0.11180646514892578, 0.11247821044921875, 0.11260012817382813, 0.11290675354003907, 0.11241516876220703, 0.11509935760498047, 0.11260956573486328, 0.1129697265625, 0.11274854278564453, 0.11257036590576172, 0.11281407928466797, 0.11332099151611329, 0.11267513275146485, 0.11256694030761719, 0.11262326049804687, 0.11369302368164062, 0.11273011016845703, 0.11274649810791015, 0.11235430145263672, 0.11344707489013672, 0.11281903839111328, 0.112195068359375, 0.11259136199951172, 0.11288166046142578, 0.11200054168701172, 0.11213823699951173, 0.11234684753417969, 0.11267346954345703, 0.11231187438964843, 0.11220764923095704, 0.11207683563232422, 0.11279833221435547, 0.11222617340087891, 0.11377267456054688, 0.11787382507324219, 0.11221692657470703, 0.11195187377929687, 0.11240243530273437, 0.11298124694824219, 0.11205094146728516, 0.11236761474609375, 0.11252735900878906, 0.11249056243896484, 0.11259897613525391, 0.11332991790771485, 0.11309471893310546, 0.11763263702392578, 0.11291295623779297, 0.11267276763916016, 0.11289920043945313, 0.11237417602539063, 0.11219136047363282, 0.11207107543945312, 0.11240013122558594, 0.11246412658691406, 0.11243949127197266, 0.11256832122802735, 0.11222576141357422, 0.112500732421875, 0.11251699066162109, 0.11204061126708985, 0.11207270050048829, 0.11995772552490234, 0.11255990600585937, 0.1123054428100586, 0.11213279724121093, 0.11197238159179687, 0.11335417938232421, 0.11234873962402343, 0.1123031005859375, 0.11251087951660156, 0.11249468994140625, 0.11265229034423828, 0.11214835357666016, 0.11246809387207031, 0.1123504638671875, 0.11328530883789062, 0.11245024108886718, 0.11204799652099609, 0.11234508514404297, 0.11223811340332031, 0.11199481964111328, 0.11191139221191407, 0.11280166625976562, 0.11359030151367187, 0.11252547454833985, 0.11251945495605468, 0.11208060455322266, 0.11328761291503907, 0.11299244689941407, 0.11284070587158203, 0.11263999938964844, 0.11262137603759766, 0.11595094299316407, 0.11257328033447266, 0.11300579071044922, 0.11208169555664063, 0.11285094451904297, 0.11232978820800782, 0.11173369598388672, 0.11220377349853515, 0.11318390655517578, 0.1126903076171875, 0.11240383911132812, 0.11292463684082031, 0.11281651306152343, 0.11255596923828125, 0.11257247924804688, 0.11202137756347656, 0.11266381072998047, 0.11290838623046875, 0.11272032165527343, 0.11311548614501953, 0.11294719696044922, 0.11228982543945312, 0.1137561264038086, 0.11249164581298828, 0.11247811126708984, 0.11225392150878906, 0.11279936218261719, 0.11255436706542969, 0.11574246215820312, 0.11240595245361328, 0.11201741027832031, 0.11212854766845703, 0.1126976318359375, 0.11217622375488281, 0.11222723388671875, 0.11230617523193359, 0.11231833648681641, 0.11234678649902344, 0.11255599975585938, 0.11172096252441406, 0.11296768188476562, 0.11315200042724609, 0.1119662094116211, 0.11320848083496093, 0.11276783752441406, 0.11184531402587891, 0.11214854431152343, 0.11233074951171874, 0.1121239013671875, 0.11288780975341797, 0.11219904327392578, 0.11164435577392579, 0.11179232025146485, 0.1118113250732422, 0.1121229476928711, 0.11156371307373047, 0.11223040008544923, 0.11743177795410156, 0.11190886688232422, 0.11208704376220703, 0.11172454071044922, 0.11184742736816407, 0.11124326324462891, 0.11173033905029296, 0.11206687927246094, 0.11251036834716797, 0.11203001403808593, 0.11158560180664062, 0.11237948608398438, 0.11216278076171875, 0.1121440658569336, 0.11199894714355468, 0.11214070129394531, 0.11202992248535157, 0.11236093139648437, 0.11138118743896484, 0.1110282211303711, 0.11425587463378906, 0.11194678497314453, 0.1113563232421875, 0.11111615753173829, 0.1112615966796875, 0.11141104125976563, 0.11198969268798828, 0.1121443862915039, 0.11127808380126954, 0.11156175994873047, 0.11140742492675781, 0.11135017395019531, 0.1115425262451172, 0.11182899475097656, 0.11259251403808594, 0.11272217559814453, 0.1118201904296875, 0.11155859375, 0.11166713714599609, 0.1122066879272461, 0.111783935546875, 0.11161395263671875, 0.11199078369140625, 0.11424678039550781, 0.11302188873291015, 0.11201529693603515, 0.11154431915283203, 0.11180550384521484, 0.11169478607177734, 0.11140876770019531, 0.11156227111816407, 0.11119292449951172, 0.11180844879150391, 0.11217241668701172, 0.11162284851074218, 0.1113333740234375, 0.11160781097412109, 0.11168940734863281, 0.11174329376220703, 0.11395686340332031, 0.11219875335693359, 0.11152476501464843, 0.11284627532958984, 0.11191731262207032, 0.11225711822509765, 0.11202377319335938, 0.11135113525390625, 0.11257321929931641, 0.11151769256591797, 0.11204576110839844, 0.11626118469238281, 0.11194367980957032, 0.11131289672851563, 0.11213542175292969, 0.11472972869873047, 0.11156841278076172, 0.11141168212890624, 0.11229376220703124, 0.11134281921386718, 0.11171113586425781, 0.11212556457519532, 0.11115264129638672, 0.11135059356689453, 0.11173484802246093, 0.11107852935791016, 0.11124797058105469, 0.11225081634521485, 0.11483171081542969, 0.11220566558837891, 0.1119233627319336, 0.11164262390136719, 0.11227318572998046, 0.11169718170166015, 0.11131171417236328, 0.1109566421508789, 0.11226870727539062, 0.11226707458496094, 0.11213875579833985, 0.11198287963867187, 0.11135308837890626, 0.11216758728027344, 0.11148092651367188, 0.11134899139404297, 0.11148150634765625, 0.11194377899169922, 0.11206588745117188, 0.11155728149414063, 0.1122136001586914, 0.11323165130615234, 0.11276914978027344, 0.11208755493164063, 0.11184333038330078, 0.11178598022460938, 0.11212361907958984, 0.11226898956298828, 0.11218390655517578, 0.11238735961914062, 0.11213420867919922, 0.11222083282470703, 0.11186083221435547, 0.11193392181396485, 0.11203798675537109, 0.11189683532714843, 0.11209939575195313, 0.11181878662109375, 0.11240624237060547, 0.11191939544677734]",tokens/s,8.869135395109204,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,,MB,1073.303552,599.6544,0.0,197.132288,173.338112,s,1,8.2358193359375,8.2358193359375,0.0,8.2358193359375,8.2358193359375,8.2358193359375,8.2358193359375,[8.2358193359375],,kWh,2.458502964168474e-05,2.70451647764865e-06,7.444728177999349e-06,3.473427429733274e-05,,MB,1317.789696,662.56896,0.0,239.075328,211.473408,s,10,0.3764009246826172,0.03764009246826172,0.00013748134536996405,0.037612991333007814,0.03776871185302734,0.03786163558959961,0.03793597457885742,"[0.03761942291259766, 0.03774806213378906, 0.037703712463378905, 0.03795455932617187, 0.03770566558837891, 0.03760655975341797, 0.03757388687133789, 0.03746326446533203, 0.03749622344970703, 0.03752956771850586]",tokens/s,6801.258530803564,kWh,1.1159296731075075e-06,1.2306661729670883e-07,4.210754004732629e-07,1.6600716908774791e-06,tokens/kWh,154210207.55114725,MB,1350.545408,675.151872,0.0,251.65824,211.475968,s,10,23.039292480468752,2.3039292480468747,0.00585087011623551,2.302861206054687,2.30947255859375,2.311827587890625,2.313711611328125,"[2.308675048828125, 2.29877490234375, 2.301589111328125, 2.301660400390625, 2.307451416015625, 2.3141826171875, 2.303876220703125, 2.30894921875, 2.292287353515625, 2.30184619140625]",tokens/s,27.34458970621924,kWh,6.731461989189191e-05,7.424594656786849e-06,2.264677638712575e-05,9.738599093580449e-05,tokens/kWh,646910.2937149219,,s,630,23.03307286071778,0.036560433112250434,0.00045845430339038943,0.03646127891540528,0.03689868125915527,0.037140113067626955,0.03841531150817871,"[0.036362239837646484, 0.03666563034057617, 0.036843521118164066, 0.03682099151611328, 0.03656835174560547, 0.036668384552001956, 0.0365483512878418, 0.036684864044189455, 0.03650409698486328, 0.03800284957885742, 0.03820364761352539, 0.03676160049438477, 0.03672835159301758, 0.03637868881225586, 0.0363787841796875, 0.0363768310546875, 0.03653039932250977, 0.0366794548034668, 0.03642591857910156, 0.03654819107055664, 0.0365052490234375, 0.03630547332763672, 0.03628972625732422, 0.036405567169189454, 0.036258720397949216, 0.036127967834472655, 0.03631174468994141, 0.03632278442382812, 0.036289024353027347, 0.03637868881225586, 0.036552352905273436, 0.0362672004699707, 0.03658835220336914, 0.03632940673828125, 0.03609987258911133, 0.0365604476928711, 0.036371040344238284, 0.036292896270751954, 0.03649836730957031, 0.03640816116333008, 0.03648313522338867, 0.03625737762451172, 0.03632998275756836, 0.0366135368347168, 0.03629344177246094, 0.0366561279296875, 0.03646902465820313, 0.036356670379638675, 0.04117708969116211, 0.036750751495361327, 0.036776031494140625, 0.03649161529541016, 0.03651379013061523, 0.036357376098632814, 0.036657089233398436, 0.03640607833862305, 0.036691967010498046, 0.03699542236328125, 0.03651657485961914, 0.03649542236328125, 0.03696083068847656, 0.03737168121337891, 0.03659977722167969, 0.0363370246887207, 0.03944918441772461, 0.036422687530517577, 0.03624275207519531, 0.03637180709838867, 0.03622159957885742, 0.03644134521484375, 0.03631455993652344, 0.03649030303955078, 0.03631695938110351, 0.03637263870239258, 0.03652614212036133, 0.036663230895996095, 0.036517822265625, 0.03653353500366211, 0.03691551971435547, 0.03788876724243164, 0.03632297515869141, 0.03629171371459961, 0.036452831268310545, 0.03683932876586914, 0.036631103515625, 0.03633356857299805, 0.036329376220703126, 0.03635414505004883, 0.03650739288330078, 0.036305152893066406, 0.036247646331787106, 0.03663248062133789, 0.036211711883544925, 0.0363524169921875, 0.036297313690185545, 0.03636396789550781, 0.03637635040283203, 0.03633820724487305, 0.03647078323364258, 0.036273216247558596, 0.0368765754699707, 0.03645711898803711, 0.03646044921875, 0.036388961791992185, 0.03641263961791992, 0.0363047981262207, 0.036219680786132816, 0.03623097610473633, 0.03644188690185547, 0.036301025390625, 0.03652841567993164, 0.03732051086425781, 0.03631292724609375, 0.03647651290893555, 0.0362437744140625, 0.03643840026855469, 0.03653023910522461, 0.03651401519775391, 0.03624665451049805, 0.03631686401367187, 0.036196384429931644, 0.03621750259399414, 0.03624809646606445, 0.036189952850341794, 0.036141281127929685, 0.03617388916015625, 0.03615532684326172, 0.03761651229858398, 0.036504993438720705, 0.0365159683227539, 0.0365296630859375, 0.036330432891845704, 0.036507678985595704, 0.036329471588134765, 0.03636409759521484, 0.036434112548828126, 0.03634924697875976, 0.036657054901123046, 0.03637347030639648, 0.0366424331665039, 0.036515743255615234, 0.03637891387939453, 0.03639273452758789, 0.036593887329101564, 0.03635200119018555, 0.03637059020996094, 0.0364194221496582, 0.03625804901123047, 0.036263233184814454, 0.036302879333496095, 0.036425407409667966, 0.036358814239501956, 0.03641299057006836, 0.03627040100097656, 0.03631737518310547, 0.03646464157104492, 0.036212734222412106, 0.03740067291259765, 0.03634908676147461, 0.03659049606323242, 0.03633750534057617, 0.03658870315551758, 0.03842969512939453, 0.03652374267578125, 0.036920673370361326, 0.037437313079833986, 0.03656924819946289, 0.03640403366088867, 0.036472801208496095, 0.036246719360351565, 0.03638684844970703, 0.03650431823730469, 0.03665683364868164, 0.0365939826965332, 0.03631504058837891, 0.03627017593383789, 0.03642131042480469, 0.03643833541870117, 0.03664588928222656, 0.03654143905639649, 0.03673907089233398, 0.03638272094726563, 0.03645868682861328, 0.036496864318847654, 0.03647113418579102, 0.03661331176757812, 0.03643065643310547, 0.0364249267578125, 0.03629555130004883, 0.03643881607055664, 0.036319393157958985, 0.03670000076293945, 0.03642380905151367, 0.03648659133911133, 0.03652214431762695, 0.036305343627929684, 0.03636368179321289, 0.0367243537902832, 0.03636716842651367, 0.036673534393310545, 0.03646831893920898, 0.03616118240356445, 0.03637529754638672, 0.036278270721435545, 0.03622444915771485, 0.036590206146240235, 0.04034121704101563, 0.036352031707763674, 0.03656723022460937, 0.03656499099731445, 0.03629875183105469, 0.036525726318359375, 0.03646278381347656, 0.03617174530029297, 0.03622316741943359, 0.03604665756225586, 0.03631516647338867, 0.036243873596191405, 0.03674528121948242, 0.03662201690673828, 0.0362820816040039, 0.03659190368652344, 0.037037086486816403, 0.036495807647705075, 0.036644702911376954, 0.036598304748535156, 0.036638080596923826, 0.0369889907836914, 0.03667007827758789, 0.03649568176269531, 0.03641139221191406, 0.03650870513916016, 0.03677772903442383, 0.036719295501708986, 0.03613727951049805, 0.036380672454833986, 0.036567039489746093, 0.036319103240966794, 0.036348033905029296, 0.03641705703735352, 0.036245983123779295, 0.03631020736694336, 0.036345951080322264, 0.03635305786132813, 0.03645951843261719, 0.03635254287719727, 0.03634143829345703, 0.03643353652954102, 0.036807521820068356, 0.03665510559082031, 0.036394878387451174, 0.0364249267578125, 0.0362995834350586, 0.036364513397216795, 0.03644339370727539, 0.03655263900756836, 0.03637088012695312, 0.03635011291503906, 0.03687558364868164, 0.03637932968139648, 0.03736064147949219, 0.03656963348388672, 0.03642835235595703, 0.03647830581665039, 0.036364864349365235, 0.03643724822998047, 0.03648780822753906, 0.03638489532470703, 0.03643392181396484, 0.03694947052001953, 0.03641001510620117, 0.03638016128540039, 0.03698761749267578, 0.03672335815429688, 0.03659468841552734, 0.03653222274780273, 0.036429088592529295, 0.036355838775634766, 0.03676668930053711, 0.038348800659179685, 0.03713008117675781, 0.03700342559814453, 0.036587520599365236, 0.03644351959228516, 0.03675961685180664, 0.036190784454345704, 0.03690496063232422, 0.03667542266845703, 0.03677347183227539, 0.03672848129272461, 0.03668060684204102, 0.036410945892333985, 0.03693817520141601, 0.03645254516601563, 0.03635385513305664, 0.037136383056640625, 0.036534271240234374, 0.03682831954956055, 0.03645731353759766, 0.03641449737548828, 0.03661497497558594, 0.03665526580810547, 0.03665865707397461, 0.03670185470581055, 0.03647331237792969, 0.03646300888061523, 0.03650928115844727, 0.03648553466796875, 0.03650083160400391, 0.036434593200683596, 0.03643904113769531, 0.03661119842529297, 0.03665497589111328, 0.036527870178222656, 0.03663312149047852, 0.03626505661010742, 0.03791228866577148, 0.0376995849609375, 0.03697884750366211, 0.036586784362792966, 0.036751937866210935, 0.03648921585083008, 0.03643580627441406, 0.03677795028686524, 0.0365546875, 0.037128318786621095, 0.03662662506103516, 0.03653948974609375, 0.036427680969238284, 0.03684652709960937, 0.03680255889892578, 0.037101566314697264, 0.03682838439941406, 0.037065761566162106, 0.036461982727050785, 0.03658992004394531, 0.036380672454833986, 0.036472801208496095, 0.03659737777709961, 0.036867935180664065, 0.036471359252929686, 0.036689342498779295, 0.03631363296508789, 0.03669606399536133, 0.03654876708984375, 0.036544288635253906, 0.03695743942260742, 0.03669225692749024, 0.03838009643554687, 0.03657727813720703, 0.03652435302734375, 0.03675104141235352, 0.03664486312866211, 0.03698601531982422, 0.0371641616821289, 0.03663999938964844, 0.03663945770263672, 0.03653577423095703, 0.036415233612060546, 0.03672678375244141, 0.03657782363891601, 0.0364769287109375, 0.03668975830078125, 0.03649552154541016, 0.03656268692016602, 0.03664511871337891, 0.036765567779541014, 0.037023841857910154, 0.03713974380493164, 0.03636732864379883, 0.036677375793457034, 0.03647001647949219, 0.036319488525390624, 0.03658768081665039, 0.03642367935180664, 0.03630259323120117, 0.03738268661499024, 0.036527999877929686, 0.03623395156860352, 0.03715891265869141, 0.036371742248535156, 0.03637324905395508, 0.03652204895019531, 0.03646057510375977, 0.03640934371948242, 0.03641491317749023, 0.03637088012695312, 0.0364189453125, 0.03645299148559571, 0.03636041641235352, 0.03641763305664063, 0.036382080078125, 0.036417919158935545, 0.03639289474487305, 0.0368804817199707, 0.036435745239257813, 0.0365241584777832, 0.03654409790039063, 0.036407711029052735, 0.036533790588378905, 0.03648553466796875, 0.036241600036621094, 0.03644198226928711, 0.03638425445556641, 0.03637913513183594, 0.0365379524230957, 0.03637443161010742, 0.036313312530517575, 0.037017921447753906, 0.03659158325195312, 0.036472320556640625, 0.03652454376220703, 0.036355583190917966, 0.036604129791259765, 0.036423297882080076, 0.036455070495605466, 0.03648271942138672, 0.03644790267944336, 0.036536800384521485, 0.03665958404541016, 0.036777694702148436, 0.03650089645385742, 0.03695075225830078, 0.036478977203369144, 0.03657046508789062, 0.03640364837646484, 0.0365508804321289, 0.03681280136108398, 0.03650870513916016, 0.0364769287109375, 0.03695001602172852, 0.03868697738647461, 0.03659587097167969, 0.03684614562988281, 0.036918655395507816, 0.03633420944213867, 0.03650672149658203, 0.03654953765869141, 0.0365241584777832, 0.03663814544677734, 0.03646918487548828, 0.03643804931640625, 0.036343841552734374, 0.036372577667236325, 0.036675678253173825, 0.036345951080322264, 0.036294464111328126, 0.03633356857299805, 0.03657523345947265, 0.036636672973632815, 0.036905120849609375, 0.03652127838134766, 0.03636409759521484, 0.03665584182739258, 0.03666908645629883, 0.03641788864135742, 0.036376384735107424, 0.03666553497314453, 0.036363903045654296, 0.03633804702758789, 0.036442176818847656, 0.03642790222167969, 0.036308799743652344, 0.0366143684387207, 0.03679414367675781, 0.036483070373535154, 0.03641862487792969, 0.036330432891845704, 0.03656294250488281, 0.03930521774291992, 0.03648931121826172, 0.03644736099243164, 0.03646691131591797, 0.03636454391479492, 0.03648684692382813, 0.037491329193115236, 0.040104991912841795, 0.03672169494628906, 0.03689798355102539, 0.0363930549621582, 0.0366824951171875, 0.036652896881103514, 0.036582977294921874, 0.03646047973632813, 0.03653279876708984, 0.036395008087158204, 0.03662982559204102, 0.03712185668945313, 0.036448318481445315, 0.03659478378295898, 0.03644947052001953, 0.03668636703491211, 0.03670220947265625, 0.036687679290771484, 0.03664710235595703, 0.03657932662963867, 0.036486465454101565, 0.036618942260742186, 0.03629843139648437, 0.03642832183837891, 0.03653209686279297, 0.036265247344970705, 0.036332321166992185, 0.036661087036132814, 0.036495361328125, 0.03652505493164063, 0.03626464080810547, 0.03647014236450195, 0.03625827026367188, 0.036348384857177736, 0.036355968475341796, 0.036552608489990236, 0.03624745559692383, 0.036290878295898436, 0.03617792129516602, 0.03627008056640625, 0.03623487854003906, 0.036330047607421874, 0.03635385513305664, 0.03636217498779297, 0.03629580688476562, 0.036413440704345705, 0.036115390777587894, 0.036206592559814454, 0.03623526382446289, 0.03604848098754883, 0.03624617767333985, 0.03637958526611328, 0.03608153533935547, 0.03638508987426758, 0.03615817642211914, 0.036520065307617186, 0.03649923324584961, 0.036241409301757815, 0.03650867080688477, 0.03622732925415039, 0.03610246276855469, 0.03624755096435547, 0.03636636734008789, 0.03613494491577148, 0.036129184722900394, 0.037085342407226565, 0.03625315093994141, 0.036534622192382814, 0.036222976684570314, 0.03628636932373047, 0.03629699325561524, 0.03629075241088867, 0.03779545593261719, 0.03714041519165039, 0.03629248046875, 0.0365918083190918, 0.03638272094726563, 0.036229118347167966, 0.036413089752197265, 0.03634415817260742, 0.036485118865966795, 0.0364356803894043, 0.03642192077636719, 0.036633663177490235, 0.03644102478027344, 0.03628646469116211, 0.03654032135009765, 0.036366622924804685, 0.03623865509033203, 0.03632191848754883, 0.03624127960205078, 0.036382080078125, 0.036315231323242186, 0.036406913757324216, 0.03655558395385742, 0.03635532760620117, 0.03662108612060547, 0.03647798538208008, 0.03679945755004883, 0.03753481674194336, 0.036430751800537106, 0.03601583862304687, 0.03627142333984375, 0.03613004684448242, 0.03611414337158203, 0.036415328979492186, 0.036218017578125, 0.0374354248046875, 0.03628335952758789, 0.03705785751342774, 0.03811967849731445, 0.0370524787902832, 0.0364793586730957, 0.03651164627075195, 0.03639510345458984, 0.03629068756103516, 0.036377567291259766, 0.03616831970214844, 0.036212158203125, 0.03715372848510742, 0.036396961212158206, 0.03630688095092773, 0.0363807373046875, 0.03740422439575195, 0.0364835205078125, 0.0364031982421875, 0.03654611206054687, 0.036405025482177736, 0.03618377685546875, 0.03633657455444336, 0.036378528594970705, 0.036362335205078124, 0.036452350616455076, 0.03615903854370117, 0.03622281646728515, 0.03661475372314453, 0.03631539154052734, 0.03647036743164062, 0.03672694396972656, 0.036634624481201174, 0.036257568359375, 0.03642131042480469, 0.03795203018188477, 0.03632112121582031, 0.03627433776855469, 0.03621673583984375, 0.03665900802612305, 0.03635638427734375, 0.03632537460327148, 0.0363633918762207, 0.03629676818847656, 0.036939998626708985, 0.03651644897460937, 0.03656902313232422]",tokens/s,27.351973564692987,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.42 GiB is free. Process 114202 has 13.32 GiB memory in use. Of the allocated memory 13.21 GiB is allocated by PyTorch, and 1.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,,MB,1792.356352,790.495232,0.0,387.97312,373.788672,s,1,9.1069775390625,9.1069775390625,0.0,9.1069775390625,9.1069775390625,9.1069775390625,9.1069775390625,[9.1069775390625],,kWh,5.134861882498475e-05,5.657091890138377e-06,1.690612463599306e-05,7.391183535111618e-05,,MB,1844.371456,891.158528,0.0,467.664896,421.489664,s,10,0.7397996520996093,0.07397996520996093,0.0010280551395685884,0.07349257659912109,0.07558497161865235,0.07593565521240235,0.07621620208740235,"[0.0734228515625, 0.07317411041259765, 0.073263427734375, 0.0735179214477539, 0.07310777282714843, 0.07434508514404296, 0.07346723175048828, 0.07628633880615235, 0.07550704193115235, 0.07370787048339844]",tokens/s,3460.3963285661457,kWh,2.16543798030839e-06,2.3880994990924374e-07,8.490068520443321e-07,3.253254782261966e-06,tokens/kWh,78690424.55445342,MB,1848.655872,941.490176,0.0,517.996544,433.41312,s,10,45.4326416015625,4.54326416015625,0.0071401817434617754,4.542790771484375,4.55135830078125,4.555304150390625,4.558460830078125,"[4.5426181640625, 4.53997998046875, 4.54296337890625, 4.5368056640625, 4.5445048828125, 4.531197265625, 4.55925, 4.54297119140625, 4.54186962890625, 4.5504814453125]",tokens/s,13.866682142874415,kWh,0.00013234953523760985,1.4598485911666738e-05,4.482989080215702e-05,0.00019177791195143363,tokens/kWh,328504.9845362499,,s,630,45.425543342590295,0.07210403705173069,0.0007504465010481083,0.07193017578125,0.07262707443237305,0.07305258026123047,0.07548663970947266,"[0.07269197082519531, 0.07283574676513672, 0.07184284973144531, 0.07167401885986328, 0.07168259429931641, 0.0717619171142578, 0.07178828430175781, 0.07218531036376953, 0.07222541046142578, 0.07196688079833985, 0.0715736312866211, 0.07173423767089844, 0.07143014526367188, 0.07230934143066406, 0.07209142303466796, 0.0719850845336914, 0.07181372833251953, 0.07395935821533203, 0.0722957763671875, 0.07240137481689453, 0.07250771331787109, 0.07282249450683594, 0.07248713684082031, 0.07242352294921875, 0.07206697845458984, 0.07197468566894531, 0.07155558776855468, 0.07176367950439454, 0.07164518737792969, 0.07182364654541015, 0.07199078369140625, 0.07266905975341797, 0.07208930969238281, 0.07222771453857423, 0.07198092651367187, 0.0716523208618164, 0.07396620941162109, 0.07392905426025391, 0.07204278564453125, 0.07179974365234375, 0.07175651550292969, 0.07251148986816407, 0.0719232940673828, 0.07180754852294922, 0.07158972930908203, 0.07180841827392578, 0.07160291290283204, 0.07158930969238281, 0.07160262298583984, 0.07154390716552735, 0.07245616149902344, 0.07213152313232422, 0.0718704605102539, 0.07261090850830078, 0.07189199829101563, 0.07253916931152343, 0.0715863037109375, 0.07239920043945312, 0.07262329864501953, 0.07177641296386719, 0.07167247772216796, 0.07136460876464844, 0.07157574462890624, 0.07166445159912109, 0.07178569793701171, 0.0715532455444336, 0.07169062042236328, 0.07174553680419922, 0.07273267364501954, 0.07170835113525391, 0.07194866943359375, 0.07191712188720703, 0.07176230621337891, 0.07164415740966797, 0.07175196838378907, 0.07164611053466798, 0.07187564849853516, 0.07308159637451171, 0.07213260650634766, 0.0719359359741211, 0.07180921936035156, 0.0714750747680664, 0.07196214294433594, 0.07228463745117188, 0.07215513610839844, 0.07163442993164063, 0.07193350219726563, 0.0716333770751953, 0.07166000366210938, 0.07208060455322265, 0.07185897827148438, 0.07219548797607422, 0.07233500671386718, 0.07234860992431641, 0.0760442886352539, 0.07224867248535156, 0.07303030395507812, 0.07207475280761719, 0.07195699310302735, 0.07195648193359375, 0.07195852661132812, 0.07191942596435547, 0.07154707336425781, 0.0717110366821289, 0.07273439788818359, 0.07271218872070312, 0.07232022094726563, 0.07235670471191406, 0.07190742492675781, 0.07179964447021485, 0.0717927703857422, 0.07189183807373047, 0.07173017883300781, 0.07171174621582031, 0.07193389129638672, 0.07180422210693359, 0.07151248168945312, 0.07141766357421875, 0.07168899536132813, 0.07213750457763672, 0.07243641662597657, 0.072157470703125, 0.07181478118896484, 0.07245353698730468, 0.0721204833984375, 0.07244473266601563, 0.07152025604248047, 0.07195177459716796, 0.07220285034179688, 0.07171603393554687, 0.07682643127441406, 0.07211036682128906, 0.07194601440429688, 0.07236271667480469, 0.0720931167602539, 0.07211497497558594, 0.07182915496826171, 0.07170604705810547, 0.07201190185546875, 0.07206578826904297, 0.07189814758300782, 0.07190841674804688, 0.07174944305419922, 0.0716063995361328, 0.07154278564453125, 0.07179190063476562, 0.07191126251220703, 0.07212278747558594, 0.07193238067626953, 0.07191069030761718, 0.0723397445678711, 0.07157926177978516, 0.07170934295654297, 0.07148592376708984, 0.0731355209350586, 0.071604736328125, 0.071932861328125, 0.071678466796875, 0.07158204650878906, 0.07171868896484375, 0.07233110046386719, 0.07216575622558594, 0.0720211181640625, 0.07222169494628906, 0.0722103042602539, 0.0717619171142578, 0.07187843322753906, 0.0715820770263672, 0.07259699249267579, 0.07175811004638671, 0.0717410888671875, 0.07162716674804688, 0.07160655975341797, 0.07306211090087891, 0.07337165069580077, 0.07235145568847656, 0.07162726593017578, 0.07214498901367188, 0.07275897979736329, 0.0718449935913086, 0.07178739166259765, 0.07152639770507813, 0.07618329620361328, 0.07196192169189453, 0.07171990203857422, 0.07166130828857421, 0.071563232421875, 0.07171711730957031, 0.07187881469726562, 0.072067138671875, 0.0720132827758789, 0.07235395050048828, 0.07232752227783203, 0.07194016265869141, 0.0718431396484375, 0.07174214172363282, 0.07194624328613282, 0.07177625274658203, 0.0718807373046875, 0.07187042999267577, 0.07171481323242188, 0.07173693084716797, 0.07228617858886718, 0.07165555572509766, 0.07203206634521485, 0.07280390167236328, 0.07191238403320313, 0.07218390655517579, 0.071810302734375, 0.07171520233154297, 0.07170035552978515, 0.07209935760498047, 0.07182015991210937, 0.0719834213256836, 0.07190290832519532, 0.07183987426757812, 0.07218982696533204, 0.07203401947021484, 0.07156764984130859, 0.07219744110107422, 0.07180563354492188, 0.0720169906616211, 0.07186319732666016, 0.07208566284179688, 0.0719502716064453, 0.07176316833496094, 0.07170486450195312, 0.07192822265625, 0.07173929595947266, 0.07257459259033203, 0.07216995239257812, 0.07392256164550781, 0.07292518615722657, 0.07184989166259766, 0.0723436508178711, 0.07170051574707031, 0.07174889373779297, 0.07169529724121093, 0.07165484619140625, 0.07177772521972656, 0.07193312072753906, 0.07198880004882813, 0.07182166290283203, 0.07194537353515625, 0.07287359619140625, 0.07187967681884766, 0.07186678314208984, 0.0718124771118164, 0.07251999664306641, 0.07159571075439453, 0.0718333740234375, 0.07188857269287109, 0.07138854217529297, 0.0717239990234375, 0.07171993255615235, 0.07171366119384766, 0.07182268524169921, 0.07226390075683593, 0.07167852783203126, 0.07349043273925782, 0.0718006362915039, 0.07218003082275391, 0.07184156799316406, 0.07164118194580078, 0.07162262725830078, 0.07236393737792969, 0.07158735656738281, 0.07542540740966797, 0.07282173156738281, 0.07178768157958984, 0.07169084930419922, 0.07227439880371093, 0.0720557098388672, 0.0716522216796875, 0.07214598083496093, 0.07193901062011719, 0.07204601287841797, 0.07156956481933593, 0.07158563232421875, 0.07163750457763672, 0.07186051177978516, 0.07212163543701172, 0.07170499420166015, 0.07139315032958984, 0.07164848327636719, 0.07232361602783204, 0.07173506927490235, 0.07215952301025391, 0.07153727722167968, 0.0719046401977539, 0.07332937622070312, 0.07261357116699219, 0.0717392349243164, 0.07185324859619141, 0.07164002990722657, 0.07228211212158203, 0.07196025848388672, 0.0725524444580078, 0.07202137756347657, 0.07262509155273437, 0.07197420501708984, 0.07213536071777343, 0.07176214599609375, 0.07180675506591797, 0.07304093170166015, 0.0721561279296875, 0.07174348449707031, 0.07168390655517579, 0.07177190399169922, 0.07188838195800781, 0.07408252716064453, 0.07265353393554687, 0.07243929290771485, 0.07254041290283203, 0.07360499572753906, 0.07190828704833985, 0.07256297302246094, 0.07213648223876953, 0.07269347381591797, 0.07194633483886718, 0.07176806640625, 0.07161212921142578, 0.07287359619140625, 0.071783203125, 0.07179264068603515, 0.072042236328125, 0.07261824035644532, 0.07149568176269532, 0.07199942779541016, 0.0713605728149414, 0.07143424224853516, 0.07151411437988281, 0.07211007690429687, 0.07147846221923829, 0.07144073486328124, 0.07169712066650391, 0.07174944305419922, 0.07182972717285156, 0.0718226547241211, 0.07176003265380859, 0.07219181060791016, 0.07232518768310547, 0.07172953796386719, 0.07160006713867187, 0.07162876892089844, 0.07154051208496094, 0.0720095977783203, 0.07174400329589843, 0.07168508911132812, 0.07155513763427734, 0.07267151641845702, 0.0718629150390625, 0.0715489273071289, 0.07191065979003906, 0.07206169891357422, 0.0720805435180664, 0.07149388885498047, 0.07156591796875, 0.07210428619384765, 0.07172262573242187, 0.0716731185913086, 0.07197779083251953, 0.07186335754394531, 0.07163510131835937, 0.07213664245605468, 0.07143103790283203, 0.07160348510742187, 0.07249369812011719, 0.07321981048583984, 0.07342899322509766, 0.07156684875488281, 0.07204940795898437, 0.07186434936523438, 0.07171891021728516, 0.07158758544921875, 0.07206025695800782, 0.07184003448486329, 0.07189161682128906, 0.07213670349121094, 0.07200109100341796, 0.07184783935546875, 0.07251203155517578, 0.07214105224609375, 0.07198047637939453, 0.07238880157470703, 0.08147379302978516, 0.07240185546875, 0.07186262512207031, 0.07262633514404297, 0.07197071838378906, 0.07180902099609375, 0.07181267547607421, 0.07193289947509765, 0.07168032073974609, 0.07184761810302734, 0.0720479965209961, 0.07258790588378906, 0.07205801391601563, 0.07200764465332031, 0.07198194885253906, 0.07181311798095703, 0.07221043395996093, 0.07192556762695312, 0.07236943817138672, 0.07575440216064454, 0.07207974243164063, 0.07151577758789063, 0.0716339874267578, 0.07150918579101563, 0.07196646118164063, 0.07169599914550781, 0.07173990631103516, 0.07197196960449219, 0.07186016082763672, 0.07176239776611328, 0.071723388671875, 0.07166767883300781, 0.07186656188964843, 0.07208284759521484, 0.07193436431884766, 0.07175782775878906, 0.07227391815185547, 0.07226383972167968, 0.07235791778564453, 0.07266079711914063, 0.07172303771972656, 0.07235292816162109, 0.07551165008544922, 0.07276233673095703, 0.07261593627929687, 0.07238262176513671, 0.07252073669433594, 0.07251238250732422, 0.07244588470458985, 0.07213993835449219, 0.07268256378173828, 0.07248365020751953, 0.0724480972290039, 0.07211408233642579, 0.07192070770263671, 0.07241209411621094, 0.07239580535888672, 0.07203068542480469, 0.07196031951904297, 0.07193062591552735, 0.07494831848144531, 0.07420531463623047, 0.07192511749267579, 0.07235807800292969, 0.0717968978881836, 0.07257337951660156, 0.07210131072998047, 0.072229248046875, 0.07191567993164062, 0.07155535888671875, 0.07147084808349609, 0.07138304138183593, 0.07145881652832031, 0.0716053466796875, 0.07221340942382813, 0.071884765625, 0.07165487670898438, 0.07266716766357421, 0.07156822204589844, 0.07245404815673828, 0.07209859466552734, 0.07197100830078125, 0.0726529312133789, 0.07205372619628907, 0.07175721740722656, 0.07164985656738282, 0.07176953887939454, 0.07193122863769531, 0.07203068542480469, 0.07179110717773438, 0.07173085021972657, 0.07203376007080078, 0.07166655731201171, 0.07263372802734375, 0.07162124633789063, 0.0720404510498047, 0.07155228424072266, 0.07179952239990234, 0.0715120620727539, 0.07166770935058593, 0.07186431884765625, 0.07232835388183594, 0.07531613159179687, 0.07220134735107422, 0.07226156616210938, 0.07168294525146485, 0.0718194580078125, 0.07264620971679688, 0.07259571075439453, 0.07227597045898437, 0.0718356170654297, 0.07194815826416015, 0.07164534759521485, 0.07183679962158203, 0.07182015991210937, 0.07200966644287109, 0.0718351058959961, 0.07217231750488282, 0.07192572784423829, 0.07210345458984375, 0.07204940795898437, 0.07154611206054687, 0.07234841918945313, 0.07176576232910156, 0.07193567657470704, 0.07167884826660156, 0.07177552032470703, 0.07185008239746093, 0.0718382110595703, 0.07178800201416016, 0.07223929595947266, 0.07213481903076172, 0.07171686553955078, 0.07174348449707031, 0.0718397445678711, 0.07160355377197265, 0.07372252655029297, 0.07152432250976562, 0.07194617462158204, 0.07182959747314453, 0.07208771514892579, 0.07161980438232422, 0.07257254028320312, 0.07164415740966797, 0.07178377532958985, 0.07258121490478515, 0.07219638061523438, 0.07275360107421874, 0.07240892791748046, 0.07244598388671875, 0.07252579498291016, 0.07236720275878906, 0.07199833679199219, 0.07226131439208984, 0.07216681671142579, 0.07186502075195313, 0.07457817840576172, 0.07187782287597656, 0.07179933166503906, 0.07205935668945312, 0.07203171539306641, 0.0717907485961914, 0.07208911895751953, 0.07184774780273437, 0.07189180755615235, 0.07163494110107421, 0.07137891387939453, 0.07168386840820312, 0.07170492553710937, 0.07138294219970703, 0.0719257583618164, 0.07146470642089844, 0.07179692840576171, 0.07197100830078125, 0.07199295806884766, 0.07232310485839843, 0.07214102172851562, 0.07188684844970702, 0.07276246643066406, 0.0747491226196289, 0.07219602966308594, 0.07191846466064453, 0.07159264373779296, 0.07250739288330078, 0.07182166290283203, 0.0718289566040039, 0.07209184265136719, 0.0722896957397461, 0.071802978515625, 0.07249766540527344, 0.07236204528808594, 0.07209363555908203, 0.07274838256835937, 0.07209334564208984, 0.07184652709960937, 0.07163123321533203, 0.07185788726806641, 0.0747872314453125, 0.0720341796875, 0.07196393585205078, 0.07184867095947266, 0.07209568023681641, 0.07266925048828125, 0.07205632019042969, 0.07234162902832031, 0.07176025390625, 0.07626547241210938, 0.07185820770263672, 0.07190729522705078, 0.07185964965820313, 0.07206707000732422, 0.07238832092285157, 0.07203517150878906, 0.07348223876953125, 0.07288813018798829, 0.07242140960693359, 0.07210924530029297, 0.07241149139404297, 0.07192972564697266, 0.07159884643554687, 0.07201074981689454, 0.07162982177734376, 0.07167123413085938, 0.07151385498046875, 0.07174412536621094, 0.0718333740234375, 0.07156678771972656, 0.0716953582763672, 0.07181436920166015, 0.07293955230712891, 0.07322112274169922, 0.07208505249023438, 0.07215119934082032, 0.07146905517578125, 0.07202957153320312, 0.0718424301147461, 0.071729248046875, 0.07211414337158203, 0.07197430419921876, 0.07257142639160157, 0.07223091125488282, 0.07326512145996093, 0.07213222503662109, 0.0722745590209961, 0.07241811370849609]",tokens/s,13.868848970031387,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,24209.506304,13792.837632,0.0,13390.31552,13325.885952,s,1,48.88270703125,48.88270703125,0.0,48.88270703125,48.88270703125,48.88270703125,48.88270703125,[48.88270703125],,kWh,0.0012185130991249934,0.00013440095311640748,0.00039511531609198736,0.0017480293683333882,,MB,2931.421184,14019.330048,0.0,13595.836416,13508.150784,s,10,2.237373947143555,0.2237373947143555,0.0017081055185526458,0.22380310821533203,0.226144775390625,0.2263524856567383,0.2265186538696289,"[0.22257597351074218, 0.222059326171875, 0.22067222595214844, 0.22484307861328126, 0.2239567108154297, 0.22364950561523436, 0.22290432739257812, 0.22656019592285156, 0.22609861755371094, 0.22405398559570314]",tokens/s,1144.1985383213835,kWh,6.662519067803066e-06,7.347521320936895e-07,4.27467513690958e-06,1.1671946336806336e-05,tokens/kWh,21932931.544821206,MB,2935.549952,14052.88448,0.0,13627.293696,13438.291968,s,10,128.03450585937503,12.803450585937501,0.09420712424063508,12.7675888671875,12.932628125,12.958886328125,12.979892890625,"[12.788255859375, 12.7617744140625, 12.752634765625, 12.763880859375, 12.698796875, 12.687603515625, 12.771296875, 12.92679296875, 12.98514453125, 12.8983251953125]",tokens/s,4.920548533158335,kWh,0.00037996558810344394,4.1912233346878103e-05,0.00016819932395328508,0.0005900771454036073,tokens/kWh,106765.70087612627,,s,630,128.0318349456787,0.20322513483441063,0.0023712118243395166,0.20271764373779297,0.20615424804687502,0.20752144622802735,0.2101832640075684,"[0.20202239990234375, 0.20180224609375, 0.20163174438476564, 0.20201593017578126, 0.2006878662109375, 0.20111395263671875, 0.20528970336914062, 0.20248780822753906, 0.20193399047851562, 0.2031492462158203, 0.20641378784179687, 0.2034249267578125, 0.20324534606933595, 0.20292933654785156, 0.20211727905273438, 0.2076760711669922, 0.20234854125976562, 0.20136550903320313, 0.20087974548339843, 0.20089894104003905, 0.20160678100585938, 0.20216246032714844, 0.2025036163330078, 0.2027670135498047, 0.20309811401367187, 0.20326002502441406, 0.20312380981445313, 0.20344522094726564, 0.2039785614013672, 0.20263232421875, 0.20914469909667968, 0.20405657958984375, 0.20327040100097657, 0.20351510620117189, 0.203755615234375, 0.20491322326660155, 0.20738752746582031, 0.20391445922851562, 0.20300985717773437, 0.20169654846191407, 0.20223046875, 0.20206346130371095, 0.20199874877929688, 0.20250215148925782, 0.20107878112792968, 0.20148838806152344, 0.20171597290039062, 0.2035177001953125, 0.2022908172607422, 0.2021932830810547, 0.20172329711914064, 0.20122508239746092, 0.20182389831542968, 0.20347091674804688, 0.20163737487792968, 0.20510470581054688, 0.201800537109375, 0.20160520935058593, 0.20240304565429687, 0.21513090515136718, 0.2017505340576172, 0.2010828857421875, 0.20149862670898439, 0.20772396850585936, 0.20430018615722656, 0.20225910949707032, 0.2028738555908203, 0.20498886108398437, 0.2037105255126953, 0.20346697998046875, 0.20386854553222655, 0.2030274200439453, 0.20334623718261718, 0.202844482421875, 0.2026129913330078, 0.20195289611816405, 0.2017978210449219, 0.20200473022460938, 0.20518821716308594, 0.20316627502441406, 0.20195292663574219, 0.20147471618652343, 0.20264857482910156, 0.20191769409179688, 0.20303575134277344, 0.2024129333496094, 0.20197314453125, 0.20175875854492187, 0.20122454833984374, 0.20130201721191407, 0.20243865966796876, 0.20173619079589844, 0.2006138916015625, 0.20194886779785157, 0.2019003448486328, 0.20196089172363282, 0.20110946655273437, 0.20072332763671874, 0.2013111114501953, 0.20097724914550782, 0.2012446746826172, 0.2009291229248047, 0.19998941040039062, 0.20531814575195312, 0.20269906616210936, 0.20375315856933593, 0.20381082153320312, 0.21209103393554687, 0.2028152618408203, 0.20323948669433595, 0.20125080871582032, 0.20092915344238282, 0.2002198028564453, 0.20939459228515625, 0.20359158325195312, 0.20181417846679686, 0.2008627471923828, 0.19983868408203126, 0.2061824951171875, 0.201166748046875, 0.20069705200195312, 0.20070608520507813, 0.19983004760742187, 0.20425897216796876, 0.20064521789550782, 0.20068360900878907, 0.20221221923828125, 0.20783917236328125, 0.2017731170654297, 0.20144688415527343, 0.20048944091796875, 0.20050070190429686, 0.205053955078125, 0.20230000305175783, 0.20258134460449218, 0.2015663604736328, 0.2035205078125, 0.20533042907714844, 0.20230723571777343, 0.20236956787109375, 0.20641290283203126, 0.2007026824951172, 0.20567599487304689, 0.20187600708007813, 0.20261244201660156, 0.2019924774169922, 0.20099276733398438, 0.2057165069580078, 0.20113871765136718, 0.20043411254882812, 0.2003927001953125, 0.20034970092773438, 0.2027706298828125, 0.2057869415283203, 0.20232191467285157, 0.2055425567626953, 0.2038301086425781, 0.20303599548339843, 0.20717840576171875, 0.20241203308105468, 0.20123190307617186, 0.20463615417480469, 0.2006287078857422, 0.20602825927734375, 0.20093801879882814, 0.2006793212890625, 0.20092442321777343, 0.20142933654785156, 0.20227874755859376, 0.20340937805175782, 0.2033957824707031, 0.2005155792236328, 0.20008528137207032, 0.2025740509033203, 0.20104173278808593, 0.20028460693359376, 0.20338262939453125, 0.20185008239746094, 0.20237362670898437, 0.20217875671386717, 0.2021060791015625, 0.20075331115722655, 0.2012469482421875, 0.20102592468261718, 0.201406494140625, 0.20190133666992188, 0.20147299194335938, 0.20094461059570312, 0.2011679992675781, 0.20307760620117188, 0.20329592895507811, 0.203529052734375, 0.20326400756835938, 0.20451123046875, 0.2027940216064453, 0.2032191619873047, 0.20336306762695314, 0.2031226806640625, 0.2033948211669922, 0.20263699340820313, 0.20866925048828125, 0.20665721130371092, 0.204214599609375, 0.20588861083984375, 0.20567132568359375, 0.20209869384765625, 0.2019737548828125, 0.20340505981445312, 0.2054842529296875, 0.2032210235595703, 0.20480514526367188, 0.20226556396484374, 0.202057861328125, 0.20190789794921876, 0.20214341735839844, 0.2026112060546875, 0.20232156372070312, 0.20276054382324218, 0.20298713684082031, 0.20220335388183594, 0.20232351684570313, 0.20239418029785156, 0.20119964599609375, 0.20117503356933594, 0.2041817626953125, 0.2029522247314453, 0.20285772705078126, 0.20167984008789064, 0.20125875854492187, 0.2016320037841797, 0.20267584228515625, 0.202316162109375, 0.2021817626953125, 0.20134786987304687, 0.20128163146972655, 0.2002515869140625, 0.20039999389648439, 0.2010917053222656, 0.20014271545410156, 0.20071466064453125, 0.20057279968261718, 0.20218450927734374, 0.20135087585449218, 0.2014007110595703, 0.2018877410888672, 0.20046438598632813, 0.2017725372314453, 0.20216268920898436, 0.2010963897705078, 0.20342198181152343, 0.20194467163085938, 0.20174557495117187, 0.20263731384277345, 0.2039500732421875, 0.2016563262939453, 0.2019000244140625, 0.20221653747558593, 0.20125584411621095, 0.20754637145996094, 0.20355830383300783, 0.20273622131347657, 0.2019471435546875, 0.20114793395996095, 0.20308741760253907, 0.2021037139892578, 0.2021212158203125, 0.20190223693847656, 0.20129776000976562, 0.20097024536132813, 0.19999337768554687, 0.201144287109375, 0.20080026245117188, 0.20222898864746094, 0.2022387237548828, 0.20124263000488282, 0.20156211853027345, 0.2009374694824219, 0.200732666015625, 0.2012704620361328, 0.20140847778320312, 0.2020708770751953, 0.2003927001953125, 0.20282981872558595, 0.2006814422607422, 0.20064259338378906, 0.20077702331542968, 0.201449951171875, 0.20067123413085938, 0.19999562072753907, 0.20088204956054687, 0.20110774230957032, 0.2009491882324219, 0.2001243896484375, 0.2004721221923828, 0.2003424072265625, 0.20055232238769533, 0.19968582153320313, 0.199029541015625, 0.20428675842285157, 0.20107557678222657, 0.20115455627441406, 0.2018540496826172, 0.20223478698730468, 0.20110336303710938, 0.20137327575683595, 0.20151542663574218, 0.200521728515625, 0.19956736755371093, 0.20952447509765626, 0.20031103515625, 0.20149862670898439, 0.20171107482910156, 0.20084585571289063, 0.20088627624511718, 0.20081394958496093, 0.20067800903320313, 0.20009059143066407, 0.19950265502929687, 0.20113818359375, 0.20136140441894532, 0.2004104919433594, 0.20387718200683594, 0.2020568389892578, 0.20197651672363282, 0.2023071746826172, 0.20146453857421875, 0.1999331817626953, 0.20032762145996094, 0.20121951293945312, 0.20284169006347658, 0.2009835205078125, 0.19970431518554688, 0.20065715026855468, 0.20053602600097656, 0.20150650024414063, 0.20047906494140624, 0.19940716552734375, 0.20102572631835938, 0.20079632568359376, 0.20256483459472657, 0.20033421325683592, 0.19929493713378907, 0.19880918884277343, 0.20062806701660157, 0.2005853729248047, 0.200581787109375, 0.2013468475341797, 0.20125286865234376, 0.20180992126464845, 0.2025676727294922, 0.2055720977783203, 0.20112384033203126, 0.2006075897216797, 0.20060755920410156, 0.20111007690429689, 0.20894496154785155, 0.19960418701171875, 0.20397427368164062, 0.20071871948242187, 0.20430642700195312, 0.20158642578125, 0.20157260131835938, 0.20174038696289062, 0.20071119689941405, 0.20173504638671874, 0.201606689453125, 0.2006776885986328, 0.20015478515625, 0.20145228576660157, 0.20187532043457032, 0.2054019775390625, 0.2016215057373047, 0.20093338012695314, 0.2003228759765625, 0.20083116149902344, 0.20090467834472656, 0.20075526428222656, 0.20280096435546874, 0.2025568389892578, 0.201038330078125, 0.199950439453125, 0.19969842529296875, 0.1998603515625, 0.2080431365966797, 0.2016570281982422, 0.20259580993652343, 0.2016036834716797, 0.2020106201171875, 0.20386306762695314, 0.20306019592285157, 0.20191583251953124, 0.20246176147460937, 0.2025059814453125, 0.20360832214355468, 0.20245709228515624, 0.20603395080566406, 0.20219593811035155, 0.20634214782714844, 0.20507443237304687, 0.20452351379394532, 0.20109513854980468, 0.20062380981445313, 0.2007553253173828, 0.2017015380859375, 0.20049722290039063, 0.21251890563964843, 0.20196333312988282, 0.20127078247070312, 0.20322149658203126, 0.20217047119140624, 0.20237525939941406, 0.20119078063964843, 0.20169381713867188, 0.2025041961669922, 0.20030581665039063, 0.20224246215820313, 0.20123078918457032, 0.2008962860107422, 0.2013341064453125, 0.20307667541503907, 0.20315936279296876, 0.20257382202148438, 0.2014883575439453, 0.2022706298828125, 0.2011564483642578, 0.2025592041015625, 0.2023912353515625, 0.20209770202636718, 0.20479983520507813, 0.20207002258300782, 0.20171160888671874, 0.20164633178710936, 0.20246707153320312, 0.20116844177246093, 0.20304920959472655, 0.20386834716796876, 0.2042401885986328, 0.20386886596679688, 0.20901417541503906, 0.20492962646484375, 0.204769287109375, 0.20480012512207033, 0.20397747802734376, 0.20527658081054687, 0.2044217529296875, 0.20472422790527345, 0.2062146911621094, 0.20837026977539064, 0.20484428405761718, 0.20491123962402344, 0.20446003723144532, 0.20406886291503906, 0.204398681640625, 0.21483941650390626, 0.20534857177734375, 0.205316162109375, 0.20480332946777344, 0.20534757995605468, 0.20407501220703125, 0.20485142517089844, 0.2049697570800781, 0.20385093688964845, 0.2041393280029297, 0.2041589813232422, 0.20392710876464842, 0.20406112670898438, 0.2037959747314453, 0.20898060607910157, 0.205025146484375, 0.20528128051757813, 0.20495126342773437, 0.2054169921875, 0.20407066345214844, 0.203905029296875, 0.20413772583007814, 0.2043725128173828, 0.20332902526855468, 0.20398358154296875, 0.20425494384765625, 0.2036801300048828, 0.20603074645996095, 0.20392755126953124, 0.20387840270996094, 0.20428314208984374, 0.20427238464355468, 0.20522601318359374, 0.2042388153076172, 0.2037671356201172, 0.20311875915527344, 0.20346931457519532, 0.20562025451660157, 0.20456752014160157, 0.20792880249023438, 0.20427244567871095, 0.2049819793701172, 0.2066677703857422, 0.20775117492675782, 0.20592434692382813, 0.20429005432128905, 0.20404611206054687, 0.2128520050048828, 0.20437193298339842, 0.2089318084716797, 0.2067415008544922, 0.205549560546875, 0.20600119018554688, 0.20572633361816406, 0.20532054138183595, 0.20629638671875, 0.20644102478027343, 0.20615171813964844, 0.20578927612304687, 0.2066657257080078, 0.2060738525390625, 0.2104523468017578, 0.20749098205566407, 0.20669667053222657, 0.20708096313476562, 0.20860159301757814, 0.20911923217773437, 0.2077696075439453, 0.2074292755126953, 0.2059141082763672, 0.20595542907714845, 0.20600962829589844, 0.20661024475097656, 0.20608607482910157, 0.20635133361816407, 0.20621107482910156, 0.20691148376464844, 0.20815667724609374, 0.206063232421875, 0.20637869262695313, 0.20603768920898438, 0.20689305114746093, 0.20573747253417968, 0.2078961639404297, 0.2064024658203125, 0.20501925659179687, 0.20939558410644532, 0.20554547119140626, 0.20632115173339843, 0.2062607421875, 0.2085253143310547, 0.2048305206298828, 0.20441465759277344, 0.2043270721435547, 0.204161376953125, 0.20388864135742188, 0.20731698608398438, 0.21272166442871093, 0.20358583068847655, 0.2048058624267578, 0.20500889587402343, 0.2059735107421875, 0.2045050811767578, 0.2040606689453125, 0.20411573791503906, 0.20328997802734375, 0.20485002136230468, 0.20522940063476564, 0.20405314636230468, 0.20333917236328125, 0.20655778503417968, 0.20546368408203125, 0.20452134704589844, 0.2045358123779297, 0.20441949462890624, 0.20743295288085936, 0.20617701721191406, 0.2041835479736328, 0.20602674865722656, 0.2041343994140625, 0.20441279602050783, 0.20463629150390625, 0.20484915161132813, 0.20481228637695312, 0.204769287109375, 0.20510220336914062, 0.20471725463867188, 0.20469523620605468, 0.20450265502929688, 0.20518467712402344, 0.20507516479492188, 0.20345193481445312, 0.20297366333007813, 0.20244479370117188, 0.20718205261230468, 0.20533021545410157, 0.20443545532226562, 0.2050314178466797, 0.20555162048339845, 0.20496365356445312, 0.20575251770019531, 0.20425852966308594, 0.20361407470703125, 0.20274064636230468, 0.203910400390625, 0.20896640014648438, 0.20392051696777344, 0.20242112731933593, 0.20526275634765626, 0.20476431274414061, 0.2047780456542969, 0.20776748657226562, 0.20421795654296876, 0.2042106170654297, 0.20508621215820313, 0.20599900817871095, 0.20506816101074218, 0.2058814697265625, 0.20542083740234374, 0.2057678985595703, 0.20501744079589843, 0.2045314636230469, 0.20385218811035155, 0.2042040252685547, 0.2046333465576172, 0.204606201171875, 0.2046829833984375, 0.20387277221679687, 0.20423062133789063, 0.20435888671875, 0.2037909393310547, 0.20389273071289063, 0.2030960693359375, 0.20398284912109374, 0.20428729248046876, 0.20429881286621093, 0.20442156982421875]",tokens/s,4.920651182319586,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,26452.058112,13903.003648,0.0,13507.756032,13505.835008,s,1,53.90316796875,53.90316796875,0.0,53.90316796875,53.90316796875,53.90316796875,53.90316796875,[53.90316796875],,kWh,0.0013569878731208365,0.0001496739902800994,0.0005118981872960088,0.002018560050696945,,MB,1244.868608,14796.3904,0.0,14380.171264,14187.445248,s,10,1.9127048645019529,0.19127048645019534,0.0022393600134042175,0.19167635345458983,0.19329945373535157,0.19392224121093748,0.19442047119140624,"[0.18762191772460937, 0.18912060546875, 0.1879237823486328, 0.19106585693359374, 0.19111424255371093, 0.1931610565185547, 0.19454502868652343, 0.19306732177734376, 0.19223846435546876, 0.19284658813476563]",tokens/s,1338.4187218379845,kWh,5.607734266430845e-06,6.184143504098953e-07,3.719279704981173e-06,9.945428321821913e-06,tokens/kWh,25740470.064853184,MB,1267.437568,14859.30496,0.0,14443.085824,14355.886592,s,10,64.43047265625,6.443047265625,0.032960088473115615,6.45414990234375,6.4775452636718756,6.478823413085937,6.479845932617187,"[6.39431640625, 6.40673486328125, 6.417478515625, 6.3996123046875, 6.4531806640625, 6.47464306640625, 6.47202490234375, 6.47726123046875, 6.4801015625, 6.455119140625]",tokens/s,9.777981970754448,kWh,0.00018670186194481874,2.059298404982509e-05,0.0001227757937132182,0.000330070639707862,tokens/kWh,190868.23370827487,,s,630,64.42633708190921,0.10226402711414156,0.0012898268375584128,0.10208265686035156,0.1036439224243164,0.10448080749511718,0.10602578125,"[0.10036243438720703, 0.10116118621826171, 0.10032310485839843, 0.10073308563232422, 0.10051315307617187, 0.10073664093017579, 0.10029334259033203, 0.10072908782958985, 0.10038684844970704, 0.10088169860839843, 0.10077008056640625, 0.10091155242919922, 0.1010708465576172, 0.11337318420410156, 0.10201036834716797, 0.10176358032226562, 0.10084352111816407, 0.10132249450683593, 0.10085810852050782, 0.10034998321533203, 0.10034992218017579, 0.10017091369628907, 0.10019692993164063, 0.1025231704711914, 0.10110269165039062, 0.10062735748291016, 0.1002795181274414, 0.10045836639404297, 0.10055363464355468, 0.10041548919677734, 0.09961676788330077, 0.09992716979980469, 0.10060070037841796, 0.10089462280273437, 0.10093782043457031, 0.10034761810302735, 0.10047004699707031, 0.1007269744873047, 0.10151939392089844, 0.1015693130493164, 0.10300006103515626, 0.10227097320556641, 0.10274800109863282, 0.10267049407958985, 0.10381517028808594, 0.10236070251464843, 0.10202556610107422, 0.10197023773193359, 0.10315542602539063, 0.10119366455078126, 0.10151328277587891, 0.101644287109375, 0.10241024017333984, 0.10305741119384766, 0.10152518463134766, 0.1032391357421875, 0.10216534423828125, 0.10253311920166015, 0.10168057250976563, 0.10115129852294921, 0.10144502258300782, 0.10209750366210937, 0.10154505920410156, 0.10373939514160156, 0.10356326293945313, 0.10150707244873047, 0.10268262481689452, 0.10245529937744141, 0.10182208251953125, 0.1012391357421875, 0.10196380615234375, 0.10117324829101562, 0.10051321411132813, 0.10216710662841796, 0.10089891052246094, 0.1009249267578125, 0.10104457855224609, 0.10154195404052735, 0.10142710113525391, 0.10302470397949219, 0.10197609710693359, 0.10093363189697266, 0.10045362854003906, 0.10206665802001953, 0.10473910522460937, 0.10562969970703125, 0.10100444793701172, 0.10224259185791015, 0.10178822326660156, 0.10115277099609375, 0.10176633453369141, 0.10317497253417969, 0.10111759948730469, 0.10132924652099609, 0.10182041931152344, 0.10151033782958985, 0.10185196685791016, 0.10187155151367187, 0.101338623046875, 0.10053485107421875, 0.10220953369140626, 0.10108525085449219, 0.1008946533203125, 0.10245465850830078, 0.10111449432373047, 0.10150457763671875, 0.10135801696777344, 0.10119168090820313, 0.10073808288574218, 0.10113279724121094, 0.10365100860595704, 0.10152550506591797, 0.10118169403076172, 0.1014662094116211, 0.10059820556640625, 0.10144358062744141, 0.10078412628173829, 0.10058294677734375, 0.10081715393066407, 0.10548793792724609, 0.101370849609375, 0.10060511779785156, 0.10100991821289063, 0.1010444793701172, 0.10027731323242188, 0.10081948852539062, 0.10025478363037109, 0.10009900665283203, 0.10085715484619141, 0.10062451171875, 0.10020317077636719, 0.10122434997558594, 0.10147590637207031, 0.10184339141845702, 0.10235517120361329, 0.10118940734863281, 0.10153778839111328, 0.10124396514892578, 0.10258322906494141, 0.10143743896484375, 0.10191168212890625, 0.10259852600097656, 0.10182553863525391, 0.10339532470703125, 0.101718017578125, 0.10163398742675782, 0.10208262634277344, 0.10181964874267578, 0.10376473236083984, 0.10134697723388672, 0.10138188934326171, 0.1018024673461914, 0.10167922973632812, 0.10235289764404297, 0.10155817413330079, 0.1032968978881836, 0.10142092895507812, 0.10262358093261718, 0.10320230102539063, 0.10153421020507812, 0.10148659515380859, 0.10238108825683594, 0.1030656967163086, 0.10292876434326172, 0.10121113586425781, 0.10259056091308594, 0.10120489501953125, 0.10171392059326172, 0.10194303894042969, 0.102914306640625, 0.10191667175292969, 0.10158723449707031, 0.10309529876708984, 0.10177849578857422, 0.1016193618774414, 0.10308354949951172, 0.1021956787109375, 0.10147225952148438, 0.10088652801513671, 0.10145378875732422, 0.10198963165283204, 0.10328345489501953, 0.10243280029296875, 0.10183817291259765, 0.10120464324951171, 0.10121414184570313, 0.10114182281494141, 0.102580322265625, 0.10099699401855469, 0.10244313812255859, 0.10162620544433594, 0.10062774658203125, 0.10309056091308594, 0.1017257308959961, 0.10115084838867187, 0.1013064956665039, 0.10229167938232422, 0.10128179168701172, 0.10163996887207032, 0.10105808258056641, 0.10236393737792969, 0.10083318328857421, 0.10045970916748047, 0.10133920288085937, 0.10063241577148438, 0.1040777587890625, 0.10109983825683594, 0.10187113952636719, 0.10124556732177735, 0.10109951782226563, 0.10196125030517578, 0.10138841247558594, 0.10070633697509766, 0.10089868927001953, 0.10243116760253906, 0.10068991851806641, 0.10030073547363282, 0.09989756774902343, 0.10114403533935547, 0.10202057647705078, 0.10198518371582031, 0.10482015991210937, 0.10167894744873048, 0.10177200317382812, 0.10171392059326172, 0.10203075408935547, 0.10180818939208984, 0.10077145385742188, 0.10470492553710938, 0.1021435546875, 0.10176351928710937, 0.10125862121582031, 0.10084400177001954, 0.10150415802001952, 0.10220003509521484, 0.10124317169189453, 0.10113843536376953, 0.10048089599609375, 0.10187379455566406, 0.10125721740722657, 0.10095410919189453, 0.10115657806396484, 0.10068348693847656, 0.1016336669921875, 0.10167801666259765, 0.10140467071533203, 0.10152041625976563, 0.10140547180175781, 0.10156649780273437, 0.10213168334960937, 0.1015882568359375, 0.10177753448486328, 0.1016341781616211, 0.10081075286865235, 0.10081795501708984, 0.10056166076660156, 0.10112432098388671, 0.1019566421508789, 0.10209401702880859, 0.10171571350097657, 0.10250220489501953, 0.10266851043701172, 0.10273990631103516, 0.10145900726318359, 0.10204259490966797, 0.10268396759033203, 0.10271612548828125, 0.10252217864990235, 0.10450809478759765, 0.10297698974609375, 0.1062733154296875, 0.10311885070800782, 0.10305055999755859, 0.10228921508789063, 0.10263581085205079, 0.10330281829833984, 0.10465122985839843, 0.10291165161132812, 0.10267648315429688, 0.10201785278320312, 0.10172825622558594, 0.1036410903930664, 0.10275836944580079, 0.10153372955322265, 0.10272972869873047, 0.10145692443847656, 0.10133808135986327, 0.10327654266357422, 0.10221363067626953, 0.10117324829101562, 0.10238953399658203, 0.10244322967529297, 0.10134114837646484, 0.10322128295898438, 0.10178752136230469, 0.10142642974853516, 0.1034044189453125, 0.1022525405883789, 0.10173593902587891, 0.10151936340332031, 0.10357196807861328, 0.10545970916748047, 0.10246896362304687, 0.1042597427368164, 0.10185913848876953, 0.10162361907958985, 0.10393689727783204, 0.10114457702636719, 0.10106470489501954, 0.10284012603759765, 0.10444745635986329, 0.10164911651611327, 0.10243276977539062, 0.1015040283203125, 0.10071920013427735, 0.10152012634277344, 0.10233356475830079, 0.10175753784179688, 0.1015156478881836, 0.10244198608398437, 0.10200899505615234, 0.10189266967773437, 0.10399078369140625, 0.10306822204589844, 0.10202067565917969, 0.10245779418945312, 0.10231820678710937, 0.10139593505859375, 0.10258076477050782, 0.10382131195068359, 0.10134937286376954, 0.10128998565673829, 0.10261273956298828, 0.10213999938964843, 0.10184925079345702, 0.10207379150390625, 0.1019459228515625, 0.10142649841308594, 0.1022757797241211, 0.10197811126708985, 0.10199040222167968, 0.10210508728027344, 0.10693427276611328, 0.1017200927734375, 0.10173824310302734, 0.10210940551757812, 0.10190348815917968, 0.10133798217773438, 0.10308729553222656, 0.10161030578613281, 0.10194534301757813, 0.10181552124023438, 0.1022116470336914, 0.10233110046386719, 0.10347315216064454, 0.10262857818603516, 0.10310530853271484, 0.10319593811035156, 0.10360086059570313, 0.1032204818725586, 0.10220416259765625, 0.10290073394775391, 0.1026344985961914, 0.10375548553466797, 0.10194972991943359, 0.10535929870605469, 0.10303215789794921, 0.10319465637207031, 0.1027910385131836, 0.1036583023071289, 0.11184333038330078, 0.10397901153564452, 0.10555126190185547, 0.10255779266357422, 0.10329929351806641, 0.10338893127441406, 0.10256793975830078, 0.10338972473144531, 0.10318895721435548, 0.10333529663085937, 0.10367263793945312, 0.10289497375488281, 0.10263638305664062, 0.10266400146484375, 0.10198630523681641, 0.10245702362060546, 0.10275462341308594, 0.10531782531738282, 0.10222361755371094, 0.10242454528808594, 0.10282854461669921, 0.10223446655273437, 0.10190672302246094, 0.10218876647949218, 0.10263346862792969, 0.10668441772460938, 0.10250444793701172, 0.10354483032226562, 0.10216822052001953, 0.1024781723022461, 0.10489145660400391, 0.10200774383544922, 0.10163168334960937, 0.10589241790771485, 0.10376771545410156, 0.10183074951171875, 0.1028136978149414, 0.10213529968261718, 0.1016591339111328, 0.10216758728027343, 0.10259555053710938, 0.10191462707519532, 0.1018918685913086, 0.10296578979492188, 0.10215392303466797, 0.10120191955566406, 0.10336847686767578, 0.10237920379638672, 0.101432861328125, 0.1026693115234375, 0.1024135971069336, 0.10123654174804687, 0.10170665740966797, 0.10359603118896485, 0.10299161529541015, 0.10222822570800781, 0.1026355209350586, 0.10362841796875, 0.10217817687988281, 0.10236006164550782, 0.10385724639892578, 0.10238153839111327, 0.1027491226196289, 0.10323558044433594, 0.10312032318115234, 0.10265382385253906, 0.10238582611083985, 0.1031236801147461, 0.10246089935302734, 0.10247196960449219, 0.10211084747314453, 0.10158694458007812, 0.1016380157470703, 0.10183897399902343, 0.10212761688232422, 0.10316185760498046, 0.10330252838134765, 0.10396070098876953, 0.10338502502441406, 0.1034000015258789, 0.10401996612548828, 0.1031492462158203, 0.1026337890625, 0.10459750366210938, 0.10364313507080078, 0.10323919677734375, 0.10303740692138671, 0.10400768280029297, 0.10228326416015625, 0.10362265777587891, 0.10324153900146485, 0.10384611511230468, 0.10195555114746094, 0.10161740875244141, 0.10353279876708985, 0.10453584289550781, 0.10293782043457031, 0.10331033325195313, 0.10237747192382812, 0.10194866943359375, 0.10263401794433594, 0.10265007781982421, 0.1017567367553711, 0.1032806396484375, 0.105455810546875, 0.10206412506103515, 0.10266969299316406, 0.1040943374633789, 0.10320076751708984, 0.10240153503417969, 0.1051632308959961, 0.10312911987304688, 0.10295417785644531, 0.10329942321777344, 0.10180592346191407, 0.10196176147460938, 0.10162236785888672, 0.10176921844482421, 0.10142105865478515, 0.10133673858642578, 0.10211158752441406, 0.10535321807861328, 0.10179766082763672, 0.10225718688964844, 0.10141664123535156, 0.10165631866455078, 0.10186367797851563, 0.1021333770751953, 0.10139830780029296, 0.10500975799560547, 0.10263865661621094, 0.1015603485107422, 0.10199308776855469, 0.1030450210571289, 0.10221766662597656, 0.10192607879638672, 0.10219200134277344, 0.10197196960449219, 0.10176694488525391, 0.10133526611328125, 0.10186579132080079, 0.10542025756835938, 0.10138758087158203, 0.10188687896728515, 0.10147382354736328, 0.10199292755126953, 0.10280960083007812, 0.1036390380859375, 0.1026131820678711, 0.10345452880859375, 0.10253919982910156, 0.1025549087524414, 0.10460358428955079, 0.10241677093505859, 0.10505059051513672, 0.10232832336425782, 0.10311824035644532, 0.10327715301513672, 0.10319462585449218, 0.10341149139404297, 0.10375177764892578, 0.10324390411376953, 0.10418540954589844, 0.10356956481933594, 0.10323139190673829, 0.10308236694335937, 0.10286899566650391, 0.10220252990722656, 0.10257494354248047, 0.10275667572021484, 0.10330044555664063, 0.10297814178466796, 0.10342784118652344, 0.10172621154785157, 0.10361427307128906, 0.10210646057128907, 0.10182844543457031, 0.10198528289794923, 0.10301222229003906, 0.10208268737792969, 0.10169058990478516, 0.10153590393066406, 0.10346969604492187, 0.10289356994628907, 0.10500505828857422, 0.10205593872070312, 0.10501465606689453, 0.1023752670288086, 0.10240080261230469, 0.10249830627441406, 0.10608025360107422, 0.10393791961669922, 0.10244313812255859, 0.10295500946044922, 0.10190633392333984, 0.10177072143554687, 0.10363967895507813, 0.10168355560302735, 0.10250605010986329, 0.10230841827392578, 0.1020948486328125, 0.10212092590332031, 0.10318281555175782, 0.10225055694580078, 0.10214195251464844, 0.10323887634277344, 0.10245507049560547, 0.1024561309814453, 0.10370451354980469, 0.1036864013671875, 0.102508544921875, 0.10205181121826172, 0.1047429428100586, 0.10298070526123047, 0.1025479965209961, 0.10793408203125, 0.10382131195068359, 0.10249830627441406, 0.1018408660888672, 0.10372509002685547, 0.10211532592773437, 0.10231734466552735, 0.1041824951171875, 0.10357081604003907, 0.10217855834960937, 0.10175987243652344, 0.10232832336425782, 0.10153369903564453, 0.10149667358398437, 0.10166492462158203, 0.10131865692138672, 0.10103590393066406, 0.10162598419189453, 0.10211081695556641, 0.10158086395263671, 0.10152937316894531, 0.10195001220703125, 0.10125721740722657, 0.10112521362304687, 0.10193807983398437, 0.10131644439697265, 0.10176118469238281, 0.10145177459716796, 0.10282803344726563, 0.10183869171142577, 0.10147593688964844, 0.10112265777587891, 0.10120150756835937, 0.10393369293212891, 0.1031072006225586, 0.10255359649658204, 0.10316799926757812, 0.10262528228759765, 0.10239385223388672, 0.10250342559814453, 0.10254386901855468, 0.10382380676269531, 0.10220550537109375, 0.1032806396484375, 0.10244915008544922]",tokens/s,9.778609626666219,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,1081.700352,608.043008,0.0,205.520896,177.265664,s,1,8.00251611328125,8.00251611328125,0.0,8.00251611328125,8.00251611328125,8.00251611328125,8.00251611328125,[8.00251611328125],,kWh,2.3312826570888015e-05,2.5643125598772975e-06,7.19083908598539e-06,3.30679782167507e-05,,MB,1360.605184,710.803456,0.0,287.309824,258.169344,s,10,0.3105099506378174,0.031050995063781738,0.0005464407899618577,0.030969711303710935,0.03176811256408691,0.03180188007354736,0.03182889408111572,"[0.03183564758300781, 0.0317606086730957, 0.03174070358276367, 0.031025375366210937, 0.030914047241210937, 0.031154752731323242, 0.03090559959411621, 0.030245792388916014, 0.030491775512695312, 0.03043564796447754]",tokens/s,8244.50229289436,kWh,9.00014889487069e-07,9.924035146773679e-08,3.5342677846772473e-07,1.3526820194225306e-06,tokens/kWh,189253643.00272742,MB,1393.344512,731.774976,0.0,308.281344,258.171904,s,10,18.334688964843753,1.8334688964843753,0.025071541482289746,1.831104736328125,1.8745905029296874,1.8788193603515626,1.8822024462890625,"[1.8830482177734376, 1.8736507568359375, 1.829335693359375, 1.832873779296875, 1.8376553955078125, 1.83568017578125, 1.81231103515625, 1.81126123046875, 1.8032188720703124, 1.81565380859375]",tokens/s,34.36109558269612,kWh,5.2801646031341464e-05,5.823714187358405e-06,1.7361476282332643e-05,7.598683650103253e-05,tokens/kWh,829090.9702385615,,s,630,18.329457202911385,0.029094376512557744,0.0005833872895712093,0.028966479301452637,0.029763935279846193,0.02997257585525513,0.03098142650604248,"[0.029205984115600585, 0.029466144561767577, 0.029496255874633788, 0.029476127624511718, 0.029588447570800782, 0.029558591842651367, 0.02961359977722168, 0.029714143753051758, 0.029952543258666992, 0.029692127227783204, 0.029775680541992186, 0.029628768920898437, 0.02957638359069824, 0.0295963191986084, 0.02955232048034668, 0.029589151382446287, 0.029711008071899414, 0.02984457588195801, 0.029563039779663087, 0.02960051155090332, 0.02969830322265625, 0.029755392074584962, 0.029674272537231445, 0.029559072494506837, 0.029897727966308595, 0.029632383346557618, 0.029595199584960936, 0.029635072708129883, 0.029699167251586913, 0.03440092849731445, 0.02998067283630371, 0.029861663818359373, 0.02976380729675293, 0.02999443244934082, 0.030216320037841797, 0.029739456176757814, 0.02988140869140625, 0.031177663803100587, 0.029851648330688478, 0.029829120635986327, 0.029736352920532227, 0.029776479721069334, 0.030424800872802735, 0.02966352081298828, 0.029909215927124023, 0.029625568389892578, 0.03021376037597656, 0.029963327407836915, 0.029859712600708008, 0.029812320709228516, 0.029719104766845705, 0.02974048042297363, 0.029600000381469725, 0.029663360595703125, 0.029617536544799806, 0.02954934310913086, 0.030178207397460938, 0.031486944198608396, 0.029875776290893555, 0.03016566467285156, 0.02981670379638672, 0.029921344757080078, 0.030062431335449218, 0.029339744567871095, 0.029537248611450195, 0.029561471939086915, 0.029573375701904298, 0.029470176696777345, 0.029610464096069336, 0.029543903350830077, 0.030378559112548827, 0.029706335067749022, 0.029562688827514647, 0.02960758399963379, 0.029475360870361327, 0.029708351135253906, 0.029900224685668945, 0.029809152603149414, 0.029908992767333983, 0.029660352706909178, 0.029688928604125978, 0.02947862434387207, 0.029719968795776368, 0.02966172790527344, 0.029474592208862303, 0.02970857620239258, 0.02954787254333496, 0.02940380859375, 0.029558271408081056, 0.02950320053100586, 0.029757888793945312, 0.029765087127685545, 0.02975529670715332, 0.029727392196655274, 0.02960416030883789, 0.02986556816101074, 0.030097824096679687, 0.02971161651611328, 0.02995187187194824, 0.030094207763671874, 0.031749216079711914, 0.02959040069580078, 0.029623903274536133, 0.029497695922851563, 0.029521247863769532, 0.029715200424194337, 0.029590911865234375, 0.029477760314941405, 0.029901599884033202, 0.029912031173706055, 0.030232576370239257, 0.02953331184387207, 0.03008153533935547, 0.029791807174682616, 0.029791263580322264, 0.029689632415771484, 0.029450239181518553, 0.02947088050842285, 0.02943987274169922, 0.02958355140686035, 0.029643999099731446, 0.02973958396911621, 0.029937664031982423, 0.02968780708312988, 0.029971519470214845, 0.029973440170288086, 0.029179904937744142, 0.029274335861206056, 0.029289695739746095, 0.029057600021362304, 0.029059072494506837, 0.029030527114868164, 0.028993408203125, 0.028921695709228517, 0.02888924789428711, 0.028966335296630858, 0.028969568252563478, 0.028914815902709962, 0.028926015853881836, 0.028892063140869142, 0.029001247406005858, 0.029108320236206055, 0.029018272399902345, 0.028970815658569335, 0.030501216888427735, 0.030950368881225584, 0.028918752670288084, 0.029050912857055664, 0.028979455947875977, 0.028978208541870117, 0.028891839981079102, 0.028865663528442383, 0.028957792282104492, 0.028785791397094727, 0.028916000366210937, 0.028864896774291993, 0.028835968017578126, 0.028925439834594727, 0.02895715141296387, 0.02893609619140625, 0.028804447174072264, 0.02892665672302246, 0.029019935607910156, 0.029064960479736328, 0.028835359573364257, 0.028922143936157226, 0.028960447311401367, 0.028844991683959962, 0.029079551696777343, 0.028891136169433593, 0.028851200103759765, 0.028924448013305664, 0.02888547134399414, 0.02896076774597168, 0.029052383422851564, 0.02915990447998047, 0.02902355194091797, 0.02892582321166992, 0.029087711334228515, 0.028844640731811522, 0.028963136672973632, 0.02891366386413574, 0.028810304641723634, 0.028870880126953127, 0.02885100746154785, 0.028947839736938475, 0.029173887252807618, 0.029098400115966795, 0.029315071105957033, 0.02861039924621582, 0.028740896224975585, 0.030145088195800782, 0.029327072143554688, 0.02891427230834961, 0.028909408569335937, 0.02887654495239258, 0.029151487350463866, 0.02943414306640625, 0.029165279388427733, 0.029318912506103516, 0.02923129653930664, 0.02921811294555664, 0.02924857521057129, 0.029083328247070314, 0.028984703063964844, 0.028843807220458983, 0.028937055587768556, 0.02891276741027832, 0.02912099266052246, 0.028860511779785155, 0.028817440032958986, 0.029020448684692383, 0.028896287918090822, 0.028933088302612306, 0.029067327499389648, 0.02889311981201172, 0.02898524856567383, 0.02896086311340332, 0.028884992599487305, 0.0289751033782959, 0.02888015937805176, 0.02888096046447754, 0.028908191680908205, 0.028870655059814454, 0.028925376892089842, 0.029192768096923827, 0.0291778564453125, 0.028966623306274412, 0.028912031173706054, 0.02901593589782715, 0.029348896026611327, 0.029288768768310547, 0.028975231170654297, 0.028949024200439454, 0.02938470458984375, 0.029375680923461912, 0.029507455825805665, 0.029360479354858398, 0.029434688568115236, 0.029606719970703126, 0.029494239807128907, 0.02919219207763672, 0.028997631072998048, 0.02931711959838867, 0.028940288543701172, 0.029163520812988283, 0.028868608474731446, 0.028908767700195313, 0.028998176574707032, 0.029135072708129883, 0.028972448348999022, 0.02896950340270996, 0.028641727447509764, 0.028935583114624023, 0.02879996871948242, 0.02896691131591797, 0.028831743240356447, 0.02892598342895508, 0.02918396759033203, 0.02920038414001465, 0.029001728057861328, 0.02894985580444336, 0.028930240631103516, 0.029253503799438477, 0.029823583602905275, 0.029132799148559572, 0.029014015197753908, 0.029161663055419923, 0.028845407485961913, 0.02906159973144531, 0.02933964729309082, 0.029553823471069336, 0.02898588752746582, 0.029469215393066406, 0.029585311889648438, 0.029375776290893555, 0.02951228713989258, 0.029339487075805665, 0.029511072158813476, 0.029190975189208983, 0.029413312911987306, 0.029925376892089843, 0.0295280647277832, 0.02911846351623535, 0.02903606414794922, 0.029049312591552735, 0.028863807678222657, 0.0290097599029541, 0.028922719955444334, 0.028891263961791994, 0.029002687454223634, 0.028976064682006836, 0.029136800765991212, 0.02935977554321289, 0.029241792678833006, 0.029083648681640626, 0.028876863479614257, 0.028997791290283202, 0.029069087982177735, 0.029011743545532227, 0.02899171257019043, 0.02925993537902832, 0.029422815322875977, 0.029105024337768556, 0.028959583282470704, 0.029434783935546875, 0.029128704071044922, 0.029150943756103515, 0.028964864730834962, 0.029005695343017578, 0.030556575775146484, 0.029038591384887694, 0.028852479934692383, 0.02897011184692383, 0.029224639892578126, 0.02866899108886719, 0.029563840866088868, 0.029569023132324217, 0.029311199188232422, 0.02923721694946289, 0.029024063110351564, 0.028888383865356446, 0.028813535690307618, 0.02892848014831543, 0.028635135650634767, 0.02867555236816406, 0.02895948791503906, 0.028742816925048827, 0.028712959289550782, 0.028740224838256837, 0.028696575164794923, 0.02890883255004883, 0.028843008041381835, 0.028744447708129884, 0.02885686492919922, 0.028903871536254882, 0.028850175857543944, 0.02888902473449707, 0.028810623168945313, 0.028887327194213868, 0.028967071533203125, 0.028911712646484376, 0.02884623908996582, 0.02916543960571289, 0.02939014434814453, 0.029345951080322265, 0.02927039909362793, 0.029417119979858398, 0.02955516815185547, 0.029771936416625976, 0.02959769630432129, 0.02934931182861328, 0.02921766471862793, 0.03099411201477051, 0.0288703670501709, 0.02887094306945801, 0.0288155517578125, 0.02889094352722168, 0.028817216873168947, 0.028785087585449218, 0.028767520904541016, 0.028906015396118163, 0.028915935516357422, 0.02883350372314453, 0.02918809509277344, 0.030774784088134766, 0.030773759841918946, 0.028999200820922853, 0.028956672668457032, 0.029336032867431642, 0.028903360366821288, 0.028868032455444337, 0.029161184310913087, 0.030241727828979492, 0.028930015563964843, 0.02921881675720215, 0.029361663818359376, 0.029317632675170898, 0.028434431076049805, 0.02886355209350586, 0.02891257667541504, 0.028649471282958985, 0.028492895126342774, 0.02853366470336914, 0.028545024871826172, 0.029246784210205077, 0.02882975959777832, 0.028604415893554686, 0.028998399734497072, 0.030074880599975585, 0.028989311218261718, 0.030177087783813478, 0.029525823593139648, 0.028880767822265625, 0.028942848205566408, 0.02877235221862793, 0.028504159927368163, 0.028612159729003907, 0.02854742431640625, 0.02865692710876465, 0.028914400100708008, 0.028651519775390624, 0.02871004867553711, 0.028576608657836913, 0.028489728927612305, 0.02857766342163086, 0.028479808807373046, 0.028655712127685546, 0.02846067237854004, 0.02878607940673828, 0.02893846321105957, 0.02880374336242676, 0.028831455230712892, 0.028616384506225587, 0.028606880187988282, 0.028588031768798827, 0.028669567108154298, 0.028575679779052735, 0.028541631698608398, 0.02864041519165039, 0.028717695236206056, 0.028610111236572266, 0.028514720916748046, 0.02870688056945801, 0.028530431747436524, 0.029108415603637694, 0.03073859214782715, 0.028489568710327148, 0.028715200424194336, 0.02845462417602539, 0.028788511276245116, 0.028678335189819337, 0.028483392715454102, 0.028494144439697267, 0.028432384490966797, 0.028507551193237304, 0.028403488159179688, 0.028736032485961915, 0.028983583450317384, 0.02837615966796875, 0.028461984634399414, 0.0280830078125, 0.028608480453491212, 0.028342016220092775, 0.028412160873413087, 0.028500255584716798, 0.028813024520874024, 0.028478815078735353, 0.028602624893188478, 0.028504480361938478, 0.028291072845458985, 0.028420095443725587, 0.028388511657714843, 0.028467967987060548, 0.032320606231689454, 0.029035551071166992, 0.028524511337280272, 0.028555200576782225, 0.028450271606445313, 0.02840028762817383, 0.02846713638305664, 0.028306751251220702, 0.02852729606628418, 0.028522655487060546, 0.02901795196533203, 0.028987680435180664, 0.028573375701904297, 0.028657344818115233, 0.02841212844848633, 0.028461183547973633, 0.028390623092651366, 0.028443071365356447, 0.028467552185058594, 0.02844476890563965, 0.0284256649017334, 0.02833168029785156, 0.028388160705566406, 0.028493024826049804, 0.028324640274047852, 0.028370399475097657, 0.02843497657775879, 0.02854297637939453, 0.028475391387939454, 0.028446943283081054, 0.028552255630493163, 0.0313753604888916, 0.029712608337402344, 0.028995712280273436, 0.029538591384887694, 0.029331392288208007, 0.029085695266723634, 0.02897318458557129, 0.029337631225585938, 0.029023296356201173, 0.028864896774291993, 0.029073984146118163, 0.02919628715515137, 0.029222431182861327, 0.028807647705078127, 0.028608383178710936, 0.028457088470458983, 0.02842742347717285, 0.028695199966430666, 0.028397184371948242, 0.028187231063842775, 0.028688800811767577, 0.028453887939453124, 0.02843123245239258, 0.028419584274291993, 0.028423744201660155, 0.028490144729614256, 0.02875142478942871, 0.0284354248046875, 0.028407808303833007, 0.02873263931274414, 0.02852076721191406, 0.028850656509399414, 0.028566911697387697, 0.028507808685302734, 0.02848252868652344, 0.028471296310424804, 0.02852659225463867, 0.02860339164733887, 0.02848409652709961, 0.02877187156677246, 0.028939231872558594, 0.02898739242553711, 0.028859455108642577, 0.028779455184936523, 0.028605791091918947, 0.028385791778564453, 0.03004755210876465, 0.029191007614135744, 0.028611648559570314, 0.028484319686889647, 0.02852787208557129, 0.02869718360900879, 0.028908031463623047, 0.0288907527923584, 0.02868182373046875, 0.029299360275268554, 0.028594175338745118, 0.028425344467163084, 0.02913983917236328, 0.028395008087158204, 0.028407487869262695, 0.0283756160736084, 0.028416255950927734, 0.028507551193237304, 0.028498527526855468, 0.02839894485473633, 0.028427040100097656, 0.028460287094116212, 0.028513952255249022, 0.02846614456176758, 0.028479488372802734, 0.02839971160888672, 0.028554143905639647, 0.028520959854125977, 0.028508672714233397, 0.028665056228637697, 0.02850399971008301, 0.02849468803405762, 0.02846512031555176, 0.028927776336669923, 0.028503328323364257, 0.02859062385559082, 0.028057600021362306, 0.028376703262329103, 0.028387712478637694, 0.02867635154724121, 0.028841087341308594, 0.028551807403564455, 0.028516639709472658, 0.028681951522827147, 0.02873151969909668, 0.028753023147583007, 0.02889936065673828, 0.02903932762145996, 0.02872697639465332, 0.028836160659790038, 0.028772224426269533, 0.028684608459472655, 0.028877920150756835, 0.02859491157531738, 0.028569887161254883, 0.028546783447265626, 0.028649471282958985, 0.028726688385009767, 0.02869308853149414, 0.028673152923583984, 0.028662464141845704, 0.02868675231933594, 0.028555200576782225, 0.02874729537963867, 0.02863164710998535, 0.028667552947998047, 0.02883148765563965, 0.029101631164550782, 0.029332511901855467, 0.029289344787597656, 0.029094751358032227, 0.02920857620239258, 0.029327360153198243, 0.029108224868774416, 0.0290119686126709, 0.02940332794189453, 0.029428575515747072, 0.02923721694946289, 0.02908777618408203, 0.029141311645507813, 0.029014144897460938, 0.028872800827026368, 0.02885001564025879, 0.029220672607421876, 0.029168319702148438, 0.028987104415893555, 0.028748159408569337, 0.0286944637298584, 0.028577856063842774, 0.028612607955932616, 0.0284017276763916, 0.028694047927856445, 0.028737535476684572, 0.029117088317871093, 0.028686080932617188, 0.028678047180175782, 0.028492128372192383, 0.028538623809814454, 0.028668191909790038]",tokens/s,34.37090324201926,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,,MB,14375.411712,8040.349696,0.0,7637.827584,7627.927552,s,1,32.290033203125,32.290033203125,0.0,32.290033203125,32.290033203125,32.290033203125,32.290033203125,[32.290033203125],,kWh,0.0007315360060750133,8.068236278164469e-05,0.0002463438081860003,0.0010585621770426582,,MB,1387.37664,8436.711424,0.0,8013.217792,7904.390656,s,10,1.3679185943603516,0.13679185943603517,0.000518904473399938,0.13670050811767578,0.13731319885253906,0.137660009765625,0.13793745849609376,"[0.13800682067871095, 0.13651193237304687, 0.13723612976074218, 0.13613002014160155, 0.1363008270263672, 0.13689158630371093, 0.13703846740722656, 0.13659561157226563, 0.13680540466308594, 0.13640179443359374]",tokens/s,1871.4563940824812,kWh,4.015119000171042e-06,4.4279070714782735e-07,2.673529536082251e-06,7.131439243401122e-06,tokens/kWh,35897382.17806209,MB,1412.554752,8480.751616,0.0,8055.160832,8005.6832,s,10,60.177873046875,6.0177873046875,0.02892419890281047,6.011704345703125,6.05363818359375,6.067083251953125,6.077839306640625,"[5.96954833984375, 6.0015341796875, 6.003828125, 6.0176142578125, 6.0805283203125, 6.0220068359375, 6.02659326171875, 6.00579443359375, 6.050650390625, 5.99977490234375]",tokens/s,10.468964223931065,kWh,0.00017647632848233086,1.9466052359512172e-05,8.864007129251763e-05,0.0002845824521343607,tokens/kWh,221376.96659615415,,s,630,60.17406497955317,0.09551438885643368,0.0011703739413413006,0.09527076721191406,0.09689995956420898,0.097616943359375,0.09980681503295898,"[0.09552662658691406, 0.09528300476074218, 0.09476143646240234, 0.0945249252319336, 0.09431878662109375, 0.09427907562255859, 0.09460009765625, 0.09487359619140626, 0.09442918395996094, 0.09444102478027344, 0.09432313537597656, 0.09498326110839844, 0.0952984619140625, 0.09789849853515625, 0.09579110717773437, 0.09459712219238281, 0.0943431396484375, 0.095012451171875, 0.09472045135498047, 0.09443942260742187, 0.09481807708740235, 0.09458914947509765, 0.09506940460205078, 0.09577552032470703, 0.09467084503173828, 0.09443504333496094, 0.09448681640625, 0.09490841674804687, 0.09428173065185547, 0.09440239715576172, 0.09410553741455079, 0.09417340850830078, 0.09445558166503906, 0.09473654174804688, 0.09488963317871094, 0.09434751892089843, 0.09457193756103516, 0.09516639709472656, 0.09557670593261719, 0.09518918609619141, 0.09512957000732422, 0.09418956756591797, 0.09414659118652344, 0.09444351959228516, 0.09417091369628906, 0.09417545318603515, 0.09422582244873047, 0.0940163803100586, 0.09464393615722656, 0.09474022674560546, 0.09463209533691407, 0.09488188934326172, 0.09416905975341797, 0.09441820526123047, 0.09467161560058594, 0.094271484375, 0.09427753448486328, 0.09456822204589843, 0.09701356506347657, 0.0958755874633789, 0.09448448181152344, 0.09451519775390625, 0.09444732666015625, 0.09547100830078124, 0.0945640640258789, 0.09497280120849609, 0.09473776245117188, 0.0947001953125, 0.09519091033935546, 0.0943834228515625, 0.0997907485961914, 0.09505257415771484, 0.09503756713867187, 0.09555558776855469, 0.095283203125, 0.09540169525146484, 0.09597161865234374, 0.09599929809570312, 0.09530643463134765, 0.09584435272216797, 0.09480908966064452, 0.09446502685546875, 0.09478963470458984, 0.09482649230957031, 0.094336669921875, 0.09437423706054687, 0.09472118377685547, 0.09459168243408203, 0.0944288330078125, 0.09437753295898438, 0.0943186264038086, 0.09429232025146485, 0.09436422729492187, 0.09477251434326171, 0.09475689697265625, 0.0945462417602539, 0.09446230316162109, 0.09419369506835938, 0.09451625823974609, 0.09429296112060546, 0.09514588928222656, 0.09441855621337891, 0.09604723358154296, 0.095393310546875, 0.09811190032958984, 0.09541059112548828, 0.09446729278564453, 0.09448937225341797, 0.09473228454589844, 0.09438412475585937, 0.09465856170654297, 0.09519718170166015, 0.09504902648925781, 0.09500128173828125, 0.09501868438720704, 0.0966875228881836, 0.09597020721435547, 0.09735289764404297, 0.09583904266357422, 0.09654271697998047, 0.09586073303222656, 0.096468994140625, 0.09633792114257812, 0.09663488006591797, 0.09634406280517578, 0.09609385681152344, 0.0961884765625, 0.09546502685546875, 0.09563126373291016, 0.09556438446044922, 0.09539289855957031, 0.09488428497314454, 0.09566371154785157, 0.09492505645751953, 0.09520595550537109, 0.095889404296875, 0.0945459213256836, 0.09458252716064453, 0.09507855987548829, 0.0957952651977539, 0.09612496185302734, 0.09551203155517578, 0.09496790313720703, 0.09558675384521484, 0.09438336181640625, 0.09467932891845703, 0.09583254241943359, 0.09684172821044922, 0.09420089721679688, 0.094399169921875, 0.0940785903930664, 0.09475545501708985, 0.09483468627929688, 0.09461129760742187, 0.09488569641113281, 0.09496339416503906, 0.09528797149658202, 0.09580989074707032, 0.09463977813720703, 0.09546259307861328, 0.09762284851074218, 0.09696825408935547, 0.09570041656494141, 0.09522073364257813, 0.09494630432128906, 0.095044189453125, 0.09576025390625, 0.09521206665039063, 0.09568592071533204, 0.09513442993164062, 0.095494140625, 0.09509273529052735, 0.09576850891113281, 0.09532982635498047, 0.09479151916503906, 0.0953587875366211, 0.09497830200195312, 0.09505241394042968, 0.09594879913330077, 0.0957276153564453, 0.09505318450927734, 0.09474317169189453, 0.09517056274414062, 0.09495756530761719, 0.094648193359375, 0.09475116729736328, 0.09515907287597657, 0.09620912170410156, 0.095263427734375, 0.0949900131225586, 0.09540982055664063, 0.09462236785888672, 0.09508025360107422, 0.09558854675292969, 0.09615155029296875, 0.095005859375, 0.09534345245361328, 0.09544239807128906, 0.09554982757568359, 0.09488368225097656, 0.0948635482788086, 0.09601222229003906, 0.09569471740722656, 0.09495378875732421, 0.10594131469726563, 0.09577977752685547, 0.09762892913818359, 0.09506816101074218, 0.09504681396484375, 0.09570780944824218, 0.09527062225341797, 0.09509910583496094, 0.09484928131103515, 0.09465446472167968, 0.09469062042236329, 0.09436224365234375, 0.09451248168945313, 0.09455017852783203, 0.09457679748535157, 0.09763471984863281, 0.09609532928466796, 0.09475971221923828, 0.0952496337890625, 0.09531484985351563, 0.09978880310058594, 0.09500867462158204, 0.09526630401611329, 0.09431903839111327, 0.0968637466430664, 0.09601907348632813, 0.09550348663330079, 0.09503427124023438, 0.09517056274414062, 0.09604505920410156, 0.09519923400878906, 0.09511116790771484, 0.09555353546142578, 0.0948305892944336, 0.09629901123046875, 0.09604505920410156, 0.09619862365722656, 0.09511103820800781, 0.09529074859619141, 0.09565058898925781, 0.09484083557128906, 0.09512489318847656, 0.09455225372314453, 0.09436774444580077, 0.094434814453125, 0.09430099487304687, 0.09432073974609376, 0.09461145782470703, 0.09527705383300782, 0.09489161682128906, 0.09525305938720703, 0.09573974609375, 0.09660380554199219, 0.09609455871582032, 0.0957353286743164, 0.09573628997802734, 0.09636678314208984, 0.09584620666503907, 0.1001400604248047, 0.09610444641113282, 0.09594774627685547, 0.09498802947998047, 0.09512748718261718, 0.09558866882324218, 0.09556320190429687, 0.09570937347412109, 0.09573808288574219, 0.09576668548583984, 0.09555919647216797, 0.09573628997802734, 0.09575122833251953, 0.09566057586669922, 0.09643622589111328, 0.09660233306884766, 0.09683487701416016, 0.09736589050292968, 0.09692822265625, 0.09705433654785156, 0.0975155487060547, 0.09664601898193359, 0.09860915374755859, 0.09738396453857422, 0.09874269104003906, 0.09910841369628906, 0.09808067321777343, 0.09738668823242187, 0.09742582702636719, 0.09716265869140625, 0.0979093475341797, 0.09683692932128907, 0.09693254089355469, 0.0957191390991211, 0.09589107513427735, 0.09554601287841796, 0.09622732543945313, 0.09595699310302734, 0.09673625946044923, 0.09529241943359375, 0.09736163330078125, 0.09655635070800782, 0.09680585479736328, 0.09557196807861328, 0.09653654479980468, 0.09899382019042968, 0.09642208099365235, 0.10072902679443359, 0.09602047729492187, 0.09513555145263672, 0.09669007873535156, 0.09480601501464844, 0.09523168182373047, 0.09532569885253907, 0.0947225570678711, 0.09442652893066407, 0.09448918151855469, 0.09518886566162109, 0.09785673522949219, 0.09542115020751953, 0.0958322525024414, 0.09517183685302734, 0.09546828460693359, 0.0958361587524414, 0.09551033782958984, 0.0977061767578125, 0.09868697357177734, 0.09981337738037109, 0.09770550537109375, 0.09662496185302734, 0.09666985321044921, 0.09652019500732421, 0.09618169403076172, 0.09543328094482421, 0.09559654235839844, 0.09507225799560547, 0.09488361358642577, 0.09498403167724609, 0.09555596923828125, 0.0951108169555664, 0.09527737426757812, 0.09528508758544922, 0.09712454223632813, 0.09534464263916016, 0.0949719009399414, 0.09571123504638672, 0.09552281951904297, 0.09503094482421875, 0.09495587158203125, 0.09584230041503906, 0.09519951629638672, 0.09576614379882813, 0.095602783203125, 0.09477324676513672, 0.09534259033203125, 0.09577862548828125, 0.09540585327148438, 0.09401564788818359, 0.09480818939208985, 0.09575651550292968, 0.09541382598876953, 0.09593686676025391, 0.0983358383178711, 0.09647593688964844, 0.09445184326171875, 0.09509004974365234, 0.09504217529296875, 0.09409667205810547, 0.09471798706054688, 0.09439075469970704, 0.09467721557617187, 0.09433702087402343, 0.09551222229003906, 0.09484732818603515, 0.09428355407714843, 0.09468860626220703, 0.09613075256347656, 0.09438358306884766, 0.09436988830566406, 0.09494844818115235, 0.09428546905517578, 0.09490534210205079, 0.09465865325927734, 0.09545616149902343, 0.09439142608642578, 0.09472089385986328, 0.09473433685302735, 0.09562521362304688, 0.09480127716064453, 0.09585420989990234, 0.09579212951660156, 0.09726361846923828, 0.09573580932617187, 0.0956416015625, 0.09473574066162109, 0.09450764465332032, 0.09446163177490234, 0.09519647979736329, 0.09727622222900391, 0.0970626220703125, 0.09630169677734375, 0.09568931579589844, 0.09564326477050782, 0.09710781097412109, 0.09727401733398437, 0.09593869018554688, 0.09603801727294922, 0.09871858978271485, 0.09697257232666015, 0.09848854064941406, 0.0973268814086914, 0.09575218963623047, 0.0954636459350586, 0.09540716552734375, 0.09508096313476562, 0.09496566772460938, 0.09584226989746093, 0.09595142364501953, 0.09596109008789062, 0.0962682876586914, 0.09606553649902344, 0.09836131286621094, 0.09564572906494141, 0.09595027160644531, 0.09529714965820313, 0.0953000946044922, 0.0953463363647461, 0.09499533081054687, 0.09435321807861329, 0.09479996490478515, 0.09499199676513671, 0.09611251068115234, 0.09515660858154297, 0.09533881378173828, 0.09490962982177735, 0.0953305892944336, 0.09442720031738282, 0.09620425415039062, 0.0944935073852539, 0.0950693130493164, 0.0951182098388672, 0.09457823944091796, 0.09570681762695313, 0.09479039764404297, 0.0966123504638672, 0.09442508697509766, 0.09460655975341797, 0.09422220611572266, 0.09471683502197266, 0.0947333755493164, 0.09514672088623047, 0.09470178985595704, 0.0943267822265625, 0.09491865539550781, 0.09511638641357421, 0.09494790649414063, 0.09487983703613281, 0.09446221160888672, 0.09473843383789063, 0.09445171356201172, 0.0948674545288086, 0.09632147216796876, 0.0949351043701172, 0.0949816665649414, 0.09986914825439454, 0.09645487976074218, 0.09584928131103515, 0.09527091217041016, 0.09548812866210937, 0.09525334167480469, 0.09570713806152344, 0.09561497497558594, 0.09627238464355468, 0.09479151916503906, 0.0958746566772461, 0.09554547119140624, 0.09515865325927735, 0.09498627471923828, 0.09494060516357422, 0.0952647705078125, 0.09616035461425781, 0.09590374755859375, 0.09537741088867188, 0.09588690948486328, 0.09516841888427735, 0.09535494232177734, 0.09681353759765625, 0.09487359619140626, 0.09465650939941406, 0.09467903900146485, 0.09532003021240235, 0.09541423797607422, 0.09461766052246094, 0.09528089904785156, 0.09593408203125, 0.09562979125976563, 0.0956514892578125, 0.09509919738769532, 0.0954554214477539, 0.09585977935791015, 0.095159423828125, 0.09538540649414062, 0.09583631896972657, 0.09588902282714844, 0.09536681365966797, 0.09515180969238281, 0.09693596649169922, 0.09586978912353515, 0.09787187194824219, 0.09991149139404297, 0.096165283203125, 0.0979543685913086, 0.09674400329589844, 0.09547042846679688, 0.09601251220703125, 0.09614601898193359, 0.09559040069580078, 0.09545724487304688, 0.09541817474365234, 0.09562748718261718, 0.09521561431884766, 0.09655001831054688, 0.09584697723388672, 0.09760102081298828, 0.095718017578125, 0.09472428894042968, 0.09534259033203125, 0.0964315185546875, 0.095538818359375, 0.09869798278808593, 0.09695458984375, 0.0969195556640625, 0.09734508514404297, 0.09588937377929688, 0.09688521575927735, 0.09783296203613281, 0.09625190734863281, 0.09691283416748046, 0.09701945495605468, 0.09594572448730469, 0.09704239654541015, 0.09626422119140625, 0.09537104034423828, 0.09552710723876953, 0.09474665832519531, 0.09561087799072265, 0.09539788818359375, 0.09596723175048828, 0.09638502502441407, 0.09471750640869141, 0.09529798126220704, 0.09503743743896484, 0.09523814392089844, 0.09554739379882812, 0.09629660797119141, 0.09515449523925781, 0.09476303863525391, 0.09540115356445313, 0.09585084533691406, 0.09718745422363281, 0.09470857238769531, 0.0948897933959961, 0.09440835571289062, 0.09515673828125, 0.09523590087890625, 0.09501286315917969, 0.09760972595214844, 0.09601853179931641, 0.09557791900634766, 0.09520137786865235, 0.09562726593017579, 0.0961533432006836, 0.09614771270751953, 0.09689852905273437, 0.09622566223144531, 0.09549021148681641, 0.09495756530761719, 0.09610854339599609, 0.09559654235839844, 0.0951562271118164, 0.09518284606933594, 0.09483660888671876, 0.09431382751464844, 0.09470137786865235, 0.09423693084716797, 0.0944801254272461, 0.09482240295410156, 0.09517343902587891, 0.09611612701416015, 0.09527308654785156, 0.09501312255859375, 0.09466284942626953, 0.09453142547607422, 0.09462409973144531, 0.09478546905517578, 0.09477251434326171, 0.09447910308837891, 0.09426073455810546, 0.09452732849121094, 0.09549046325683594, 0.0944919662475586, 0.0943216323852539, 0.094355712890625, 0.09432243347167969, 0.09501862335205079, 0.09508697509765625, 0.09459916687011719, 0.09488079833984375, 0.09509081268310547, 0.09560492706298829, 0.09486720275878906, 0.09497183990478515, 0.10330601501464844, 0.09443100738525391, 0.09424671936035156, 0.09688655853271484, 0.09412687683105468, 0.09439199829101562, 0.09454752349853515, 0.09433513641357422, 0.09450707244873047, 0.09518956756591797, 0.09482649230957031, 0.09582592010498046, 0.09439356994628906, 0.09457046508789063, 0.09672962951660156, 0.0954161605834961]",tokens/s,10.469626743914844,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,1896.706048,1057.947648,0.0,662.700032,622.833664,s,1,9.2692841796875,9.2692841796875,0.0,9.2692841796875,9.2692841796875,9.2692841796875,9.2692841796875,[9.2692841796875],,kWh,5.9649310204152544e-05,6.572552757026549e-06,2.1403072677991797e-05,8.762493563917089e-05,,MB,1939.795968,1181.679616,0.0,765.46048,733.871104,s,10,0.6155335044860839,0.06155335044860839,0.00022180813588524277,0.0615022087097168,0.061793860244750975,0.0618812822341919,0.06195121982574463,"[0.06154595184326172, 0.06177443313598633, 0.06167647933959961, 0.061381248474121096, 0.061244766235351564, 0.0612828483581543, 0.06174854278564453, 0.06145206451416016, 0.06145846557617188, 0.06196870422363281]",tokens/s,4158.99375313026,kWh,1.8151124012682475e-06,2.0017424931970968e-07,9.000007200000811e-07,2.9152873705880382e-06,tokens/kWh,87812955.45089355,MB,1944.014848,1192.165376,0.0,775.94624,748.240384,s,10,37.76376586914063,3.7763765869140626,0.010053992689648775,3.7759786376953124,3.7856682861328124,3.7918682006835938,3.796828132324219,"[3.7733896484375, 3.780630859375, 3.7761025390625, 3.762035888671875, 3.761979736328125, 3.78429052734375, 3.771435791015625, 3.798068115234375, 3.775854736328125, 3.77997802734375]",tokens/s,16.682658243965452,kWh,0.00011029425315748479,1.2165164813399175e-05,4.338264581719974e-05,0.0001658420637880837,tokens/kWh,379879.4983672095,,s,630,37.75729852676391,0.059932219883752254,0.0006360604396337902,0.05986535835266113,0.06039576988220215,0.060804128265380855,0.06275842662811279,"[0.05902950286865234, 0.05941420745849609, 0.05929391860961914, 0.0595497932434082, 0.06069657516479492, 0.06140550231933594, 0.05985782241821289, 0.05959964752197266, 0.05993401718139649, 0.05960358428955078, 0.05967468643188477, 0.05986713409423828, 0.05979340744018555, 0.06025328063964844, 0.06011209487915039, 0.06067910385131836, 0.060885761260986326, 0.05981798553466797, 0.05941443252563477, 0.059832191467285155, 0.0596453742980957, 0.059781150817871095, 0.0598823356628418, 0.05989980697631836, 0.05949625778198242, 0.06007212829589844, 0.060018848419189454, 0.059637599945068356, 0.059393726348876956, 0.059488574981689454, 0.06198681640625, 0.0605010871887207, 0.05932287979125977, 0.05945180892944336, 0.059891712188720705, 0.05943078231811524, 0.05938188934326172, 0.06029248046875, 0.059579071044921876, 0.059604961395263674, 0.05943088150024414, 0.05950006484985351, 0.060953056335449216, 0.05972524642944336, 0.059736640930175784, 0.05976019287109375, 0.059685310363769534, 0.06000230407714844, 0.0598076171875, 0.060353919982910155, 0.059605758666992185, 0.05964137649536133, 0.059609569549560544, 0.05962710571289063, 0.059781375885009765, 0.05970249557495117, 0.059651359558105466, 0.060028736114501956, 0.06010358428955078, 0.06028908920288086, 0.06004038238525391, 0.06013100814819336, 0.06007625579833984, 0.05989718246459961, 0.059832286834716794, 0.05980448150634766, 0.060006271362304686, 0.05983148956298828, 0.059916511535644534, 0.05975875091552734, 0.05994089508056641, 0.06012911987304687, 0.06172083282470703, 0.05999852752685547, 0.06017638397216797, 0.05978726577758789, 0.05953289413452149, 0.05940620803833008, 0.05956047821044922, 0.059913887023925784, 0.05991049575805664, 0.05984988784790039, 0.05936624145507813, 0.05921791839599609, 0.05910444641113281, 0.059636417388916015, 0.05961068725585938, 0.059593441009521485, 0.05960073471069336, 0.05974649429321289, 0.05950611114501953, 0.06014604949951172, 0.059469825744628904, 0.05955088043212891, 0.06031241607666016, 0.059570049285888674, 0.06382777786254883, 0.059993473052978516, 0.05960540771484375, 0.0599967041015625, 0.05989116668701172, 0.05999055862426758, 0.06005724716186524, 0.059963520050048826, 0.059905952453613284, 0.05998579025268555, 0.05991596984863281, 0.06014009475708008, 0.060179710388183594, 0.05990086364746094, 0.060031326293945315, 0.06001193618774414, 0.059881599426269534, 0.06461436462402344, 0.06070697784423828, 0.059822078704833984, 0.06014976119995117, 0.05958019256591797, 0.059611358642578126, 0.05988457489013672, 0.05974310302734375, 0.059920063018798826, 0.05980815887451172, 0.05979878234863281, 0.060035839080810546, 0.0596492805480957, 0.05893734359741211, 0.05932003021240234, 0.05986556625366211, 0.05959158325195312, 0.06453270721435547, 0.059460319519042966, 0.05935449600219726, 0.0594601936340332, 0.059672031402587894, 0.059423263549804685, 0.05957638549804688, 0.05989574432373047, 0.05937356948852539, 0.05966438293457031, 0.05989580917358398, 0.05941398239135742, 0.05999465560913086, 0.0599285774230957, 0.05957222366333008, 0.06003612899780274, 0.06003152084350586, 0.0595865592956543, 0.06011945724487305, 0.06079286575317383, 0.06024192047119141, 0.059811809539794925, 0.05982620620727539, 0.060037120819091794, 0.05986713409423828, 0.05994697570800781, 0.060053535461425785, 0.060561214447021484, 0.06123779296875, 0.060006366729736325, 0.06000028610229492, 0.0599384651184082, 0.060071617126464846, 0.061071582794189457, 0.05976822280883789, 0.059340862274169924, 0.059142112731933594, 0.0588950080871582, 0.059445247650146485, 0.060746910095214844, 0.0603185920715332, 0.05994838333129883, 0.059764511108398435, 0.06009328079223633, 0.05907257461547852, 0.059316158294677734, 0.06000559997558594, 0.05966713714599609, 0.0610263671875, 0.060190399169921874, 0.06332860946655273, 0.060080127716064455, 0.05918339157104492, 0.05908160018920899, 0.05897097778320313, 0.0591129264831543, 0.05926380920410156, 0.05962688064575195, 0.05989852905273438, 0.059498336791992186, 0.05980313491821289, 0.060453697204589846, 0.05951027297973633, 0.060193279266357425, 0.060085342407226565, 0.06000323104858398, 0.060391681671142575, 0.05984844970703125, 0.05957244873046875, 0.05973728179931641, 0.060091201782226565, 0.059797279357910155, 0.0600384635925293, 0.060157886505126955, 0.05985279846191406, 0.06008668899536133, 0.06008457565307617, 0.059256542205810545, 0.05921206283569336, 0.05919295883178711, 0.05922035217285156, 0.05944054412841797, 0.05947865676879883, 0.05900694274902344, 0.058589183807373046, 0.058448223114013674, 0.058861217498779296, 0.05922796630859375, 0.05954313659667969, 0.0594392318725586, 0.05979388809204102, 0.05943033599853516, 0.05977350234985351, 0.05963481521606445, 0.05946252822875977, 0.059445152282714846, 0.0594741439819336, 0.05924844741821289, 0.0597402229309082, 0.059612895965576174, 0.05934067153930664, 0.059566497802734375, 0.059344894409179685, 0.059436927795410155, 0.059705535888671876, 0.05978339385986328, 0.060058494567871094, 0.061217086791992184, 0.06011958312988281, 0.060386592864990235, 0.05981872177124024, 0.05971068954467774, 0.05962847900390625, 0.05976700973510742, 0.059889278411865234, 0.05996140670776367, 0.059951038360595704, 0.05994905471801758, 0.06019001770019531, 0.06018463897705078, 0.059908447265625, 0.05973430252075195, 0.059392158508300784, 0.059708446502685544, 0.05990572738647461, 0.06007583999633789, 0.05936966323852539, 0.05891206359863281, 0.05847635269165039, 0.05914316940307617, 0.05967184066772461, 0.05973462295532227, 0.059639041900634765, 0.05957708740234375, 0.059535358428955076, 0.05900492858886719, 0.058721694946289066, 0.05905452728271485, 0.059676929473876955, 0.0599463996887207, 0.05945529556274414, 0.05971142578125, 0.060141727447509764, 0.05961724853515625, 0.05935577774047852, 0.05941459274291992, 0.05936848068237305, 0.06106569671630859, 0.05961103820800781, 0.05971308898925781, 0.0596715202331543, 0.05954767990112304, 0.0593919677734375, 0.059991424560546874, 0.05990393447875977, 0.05995187377929687, 0.06003078460693359, 0.060031105041503906, 0.06046121597290039, 0.06048972702026367, 0.05986899185180664, 0.05962895965576172, 0.059703296661376956, 0.05993471908569336, 0.060177024841308595, 0.059996158599853515, 0.05997875213623047, 0.059705886840820316, 0.059478271484375, 0.059619518280029295, 0.05959683227539062, 0.05977088165283203, 0.05957846450805664, 0.05954470443725586, 0.05964905548095703, 0.060953887939453125, 0.05964617538452149, 0.05994112014770508, 0.05961318588256836, 0.059648159027099606, 0.05974204635620117, 0.059868766784667966, 0.05984035110473633, 0.05966700744628906, 0.05974009704589844, 0.059166465759277344, 0.05948441696166992, 0.059743518829345706, 0.059626209259033204, 0.05977088165283203, 0.059466785430908206, 0.060016830444335936, 0.05984700775146484, 0.05983814239501953, 0.05947264099121094, 0.05958860778808594, 0.05971795272827148, 0.06004732894897461, 0.0598504638671875, 0.059963520050048826, 0.059574207305908206, 0.0602391357421875, 0.059908576965332035, 0.060768192291259765, 0.06136975860595703, 0.0603138542175293, 0.060068126678466796, 0.060289344787597655, 0.060337310791015626, 0.06001948928833008, 0.060216991424560544, 0.060170463562011715, 0.06270921707153321, 0.06036108779907227, 0.06159600067138672, 0.06106316757202149, 0.060030975341796876, 0.059569919586181644, 0.059807998657226566, 0.05985452651977539, 0.06025872039794922, 0.059840415954589846, 0.05995481491088867, 0.05985500717163086, 0.060575008392333984, 0.05983942413330078, 0.05963740921020508, 0.05924105453491211, 0.05935488128662109, 0.05969510269165039, 0.059881599426269534, 0.06000966262817383, 0.05968147277832031, 0.05996953582763672, 0.06009980773925781, 0.06042499160766602, 0.06013132858276367, 0.05970095825195312, 0.05981622314453125, 0.05976268768310547, 0.0598355827331543, 0.059919105529785154, 0.05993024063110351, 0.060429790496826175, 0.06083273696899414, 0.06041151809692383, 0.06022409439086914, 0.060437278747558595, 0.0598919677734375, 0.06031167984008789, 0.060192798614501955, 0.0600159683227539, 0.060015392303466794, 0.059947135925292966, 0.05982527923583984, 0.05988230514526367, 0.059694976806640626, 0.05990412902832031, 0.059529216766357425, 0.06002483367919922, 0.05930521774291992, 0.059411201477050785, 0.05961872100830078, 0.05981961441040039, 0.05950156784057617, 0.05984441757202148, 0.06003760147094726, 0.05964566421508789, 0.05923193740844727, 0.059256481170654296, 0.05979344177246094, 0.06241984176635742, 0.05969689559936524, 0.05947600173950195, 0.05908009719848633, 0.059118144989013674, 0.059276512145996094, 0.05927123260498047, 0.05968764877319336, 0.05987945556640625, 0.05979043197631836, 0.05960793685913086, 0.05959804916381836, 0.05955641555786133, 0.05976505661010742, 0.059786975860595705, 0.05980527877807617, 0.060037185668945316, 0.06002067184448242, 0.059894527435302734, 0.060558494567871095, 0.06014841461181641, 0.0600964469909668, 0.05987916946411133, 0.06045727920532227, 0.06029836654663086, 0.06006889724731445, 0.060042335510253904, 0.06002259063720703, 0.06015871810913086, 0.06035612869262695, 0.060186878204345706, 0.06016041564941406, 0.05997772979736328, 0.05983027267456055, 0.05969900894165039, 0.059649856567382815, 0.05974784088134766, 0.059853694915771485, 0.05954719924926758, 0.05958252716064453, 0.060255615234375, 0.06484662628173828, 0.06008921432495117, 0.06019580841064453, 0.060167743682861326, 0.059859390258789065, 0.060052833557128905, 0.06041667175292969, 0.059988094329833985, 0.05996297454833984, 0.05997187042236328, 0.0613397102355957, 0.06001657485961914, 0.0600186882019043, 0.06005385589599609, 0.05993231964111328, 0.060254207611083986, 0.060037120819091794, 0.06009347152709961, 0.06018899154663086, 0.06035932922363281, 0.060108993530273436, 0.05999801635742188, 0.060259742736816405, 0.06040636825561523, 0.06031561660766602, 0.0604549446105957, 0.060313568115234376, 0.06058111953735352, 0.06035257720947266, 0.06013513565063477, 0.06020540618896485, 0.060650142669677734, 0.06029939270019531, 0.06039459228515625, 0.0603015022277832, 0.06026633453369141, 0.060193313598632815, 0.06002070236206054, 0.060090625762939456, 0.0597212142944336, 0.06006604766845703, 0.060209312438964845, 0.05997151947021485, 0.060497600555419924, 0.06003142547607422, 0.0608133430480957, 0.0596338882446289, 0.05938166427612305, 0.060129150390625, 0.06074399948120117, 0.05991187286376953, 0.059582462310791014, 0.05973516845703125, 0.0598287353515625, 0.059910526275634764, 0.060114944458007816, 0.06220764923095703, 0.06090172958374023, 0.06014963150024414, 0.06012688064575195, 0.06020316696166992, 0.060112319946289065, 0.059360832214355466, 0.05961772918701172, 0.06007859039306641, 0.060385280609130856, 0.06143385696411133, 0.06011084747314453, 0.060055553436279295, 0.06010713577270508, 0.06032313537597656, 0.059980224609375, 0.06004851150512695, 0.06005740737915039, 0.0599090576171875, 0.05995315170288086, 0.06082262420654297, 0.06003977584838867, 0.059983840942382814, 0.060229984283447266, 0.06012313461303711, 0.06006079864501953, 0.06026847839355469, 0.05963462448120117, 0.059711360931396486, 0.05983379364013672, 0.059488960266113285, 0.06020473480224609, 0.0596932487487793, 0.05983251190185547, 0.05987526321411133, 0.05975244903564453, 0.059776992797851564, 0.05978319931030274, 0.059719680786132816, 0.05945910263061523, 0.059443233489990234, 0.059393505096435546, 0.05973417663574219, 0.05972598266601563, 0.059392673492431644, 0.059268447875976564, 0.05935801696777344, 0.05955712127685547, 0.059539520263671875, 0.059454143524169924, 0.059686752319335935, 0.059668479919433595, 0.05964822387695313, 0.060093441009521485, 0.05984703826904297, 0.05985456085205078, 0.05988217544555664, 0.05971558380126953, 0.05984467315673828, 0.05987033462524414, 0.06001155090332031, 0.06013708877563476, 0.05984476852416992, 0.05984675216674805, 0.0599054069519043, 0.060087936401367184, 0.059974559783935545, 0.06038457489013672, 0.06227555084228516, 0.06062992095947266, 0.06008339309692383, 0.060057952880859376, 0.05984115219116211, 0.059963039398193356, 0.05997308731079101, 0.05985772705078125, 0.05969510269165039, 0.059799327850341796, 0.059815841674804686, 0.06042371368408203, 0.05955686569213867, 0.05938560104370117, 0.05933599853515625, 0.05978367996215821, 0.05993929672241211, 0.06013443374633789, 0.05986515045166016, 0.059652576446533205, 0.05945337677001953, 0.05935878372192383, 0.06280876922607422, 0.060006401062011716, 0.06009267044067383, 0.06003424072265625, 0.060006977081298825, 0.06028265762329101, 0.05951715087890625, 0.059305950164794924, 0.05924252700805664, 0.05968384170532227, 0.059931198120117185, 0.0599571533203125, 0.06061248016357422, 0.05974822235107422, 0.059797889709472654, 0.060174175262451175, 0.06008505630493164, 0.060419841766357424, 0.06277852630615234, 0.061300575256347654, 0.06042086410522461, 0.060246112823486325, 0.060123584747314454, 0.06023667144775391, 0.06026739120483399, 0.06020083236694336, 0.06077040100097656, 0.060211200714111325, 0.05970246505737305, 0.060146495819091796, 0.059123233795166014, 0.05942335891723633, 0.05982428741455078, 0.05946540832519531, 0.05973811340332031, 0.06003628921508789, 0.05971673583984375, 0.05910086441040039, 0.05920767974853516, 0.05960819244384766, 0.05978015899658203, 0.059606849670410154]",tokens/s,16.68551576997571,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp0sdla2nn/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpghysea6m/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,14672.269312,7846.428672,0.0,7451.181056,7445.507072,s,1,32.763345703125,32.763345703125,0.0,32.763345703125,32.763345703125,32.763345703125,32.763345703125,[32.763345703125],,kWh,0.0007489382572000029,8.260589899749046e-05,0.0002828680040719983,0.0011144121602694916,,MB,1351.00416,7997.423616,0.0,7581.20448,7570.843648,s,10,1.2404065856933593,0.12404065856933592,0.00030640996927625664,0.12393275070190429,0.12430681533813477,0.12457982215881347,0.12479822761535644,"[0.12392790222167968, 0.12415155029296875, 0.12395753479003906, 0.12372345733642579, 0.1239375991821289, 0.12389715576171875, 0.12390732574462891, 0.12485282897949218, 0.12424614715576172, 0.12380508422851562]",tokens/s,2063.8394132428907,kWh,3.6786270893749892e-06,4.056642756230992e-07,2.4400644520500062e-06,6.524355817048094e-06,tokens/kWh,39237590.22018294,MB,1371.017216,8010.006528,0.0,7593.787392,7514.46784,s,10,73.00622705078125,7.3006227050781245,0.016843565972124462,7.300080810546875,7.32080166015625,7.3236291015625,7.3258910546875,"[7.310501953125, 7.32017333984375, 7.32645654296875, 7.31858203125, 7.2788623046875, 7.3018740234375, 7.29828759765625, 7.2871904296875, 7.2786982421875, 7.2856005859375]",tokens/s,8.629400880582258,kWh,0.0002144734256918747,2.3657444612008882e-05,0.00010982166424615,0.00034795253455003364,tokens/kWh,181059.1783200273,,s,630,73.00248457336426,0.11587695964026072,0.00110443629695133,0.11570410919189453,0.11685232391357421,0.11739402809143067,0.11972997146606446,"[0.11452957153320313, 0.11483833312988281, 0.11656124877929687, 0.11619590759277344, 0.11579801940917969, 0.11623744201660156, 0.11662425231933594, 0.11568879699707031, 0.11590313720703126, 0.11606221008300781, 0.11550310516357422, 0.11642403411865235, 0.11569538879394531, 0.11946640014648438, 0.11668527984619141, 0.11578368377685547, 0.1155125732421875, 0.11620771026611328, 0.11576182556152344, 0.11564543914794922, 0.11673193359375, 0.11538022613525391, 0.11539759826660156, 0.11620297241210938, 0.11548316955566407, 0.11542034912109375, 0.11580604553222656, 0.11577632141113281, 0.11681517028808594, 0.11605487823486328, 0.1160478744506836, 0.11687036895751954, 0.11608246612548828, 0.11632128143310547, 0.11547443389892578, 0.11624038696289063, 0.11539826965332031, 0.11659506988525391, 0.11608380889892578, 0.11556515502929687, 0.11602333068847656, 0.11582825469970703, 0.1155917739868164, 0.11728233337402344, 0.11528256225585938, 0.11529148864746094, 0.11691677093505859, 0.11581862640380859, 0.11657981109619141, 0.11656031799316406, 0.11620156860351563, 0.11576521301269531, 0.11589430236816406, 0.11626697540283203, 0.11675651550292969, 0.11668275451660157, 0.11625676727294922, 0.11612569427490234, 0.11578691101074219, 0.11580038452148438, 0.11547293090820313, 0.11570175933837891, 0.11536895751953125, 0.11687737274169922, 0.11508806610107422, 0.11549465942382812, 0.11538022613525391, 0.11739981079101562, 0.11721932983398438, 0.11541814422607422, 0.11634505462646484, 0.11545062255859374, 0.11627254486083985, 0.11633724975585938, 0.11598438262939453, 0.11570508575439453, 0.11619609832763672, 0.11603753662109376, 0.11586975860595704, 0.11719388580322265, 0.11563097381591797, 0.11904204559326172, 0.11523043060302735, 0.1154985580444336, 0.1155939483642578, 0.11538019561767578, 0.11484162902832032, 0.1147146224975586, 0.11590198516845703, 0.11548925018310546, 0.11686707305908203, 0.11655577850341797, 0.11528806304931641, 0.11583001708984375, 0.1160947494506836, 0.11587200164794922, 0.11686780548095703, 0.11605197143554688, 0.11841741180419922, 0.11738861083984375, 0.11623465728759766, 0.11661891174316406, 0.11627372741699218, 0.11578374481201172, 0.11564236450195313, 0.11643084716796875, 0.11521228790283203, 0.11620556640625, 0.11628134155273437, 0.1159331817626953, 0.11620352172851563, 0.11607244873046875, 0.11576729583740235, 0.11539250946044922, 0.1168506851196289, 0.11611039733886719, 0.11683936309814454, 0.11711270141601562, 0.11536192321777344, 0.11720294189453125, 0.11638169860839843, 0.1161871337890625, 0.11709645080566407, 0.11624447631835938, 0.11681177520751954, 0.116748291015625, 0.11527318572998047, 0.11543401336669921, 0.11540995025634766, 0.11503043365478516, 0.11558560180664063, 0.11524079895019532, 0.11697577667236328, 0.11695254516601562, 0.11592963409423829, 0.11561779022216796, 0.11573407745361328, 0.11544416046142578, 0.11647369384765625, 0.1158043212890625, 0.11612911987304687, 0.11678543853759765, 0.11621932983398438, 0.11657305908203125, 0.11668262481689454, 0.11643309020996094, 0.11674009704589844, 0.11842912292480469, 0.11768070220947266, 0.11574877166748047, 0.11583853149414063, 0.11601692962646484, 0.11547519683837891, 0.11855046081542969, 0.11601926422119141, 0.11558707427978515, 0.11647984313964843, 0.11621392059326172, 0.11643907165527344, 0.11696028900146484, 0.11560374450683594, 0.11595843505859375, 0.11663359832763671, 0.11612569427490234, 0.11711443328857422, 0.11586342620849609, 0.12008096313476563, 0.117136962890625, 0.11636573028564454, 0.11649846649169922, 0.11624652862548829, 0.11608882904052735, 0.115525634765625, 0.11608198547363281, 0.1155038070678711, 0.11671961975097657, 0.11607202911376953, 0.11635689544677734, 0.11618303680419922, 0.11603321838378906, 0.11581321716308594, 0.11521647644042969, 0.11638486480712891, 0.11721820831298828, 0.11633574676513672, 0.11634108734130859, 0.11624880218505859, 0.11627142333984375, 0.11613798522949219, 0.11616416168212891, 0.11554962921142578, 0.11656089782714844, 0.11655958557128906, 0.11545629119873047, 0.11870003509521485, 0.11551948547363282, 0.11541241455078124, 0.11497325134277343, 0.1163076171875, 0.11509590148925782, 0.11628463745117187, 0.11531753540039062, 0.11491680145263672, 0.11540316772460937, 0.11536124420166016, 0.11543385314941407, 0.1155423355102539, 0.11577932739257812, 0.1160660171508789, 0.11579996490478515, 0.11607920074462891, 0.11622841644287109, 0.11564972686767579, 0.1157801284790039, 0.11557273864746094, 0.11568476867675781, 0.11533168029785157, 0.1160970230102539, 0.11662739562988281, 0.11583209228515624, 0.11573725128173828, 0.11552985382080078, 0.11511398315429687, 0.11516108703613281, 0.11540275573730469, 0.11520614624023437, 0.11580006408691407, 0.11532083129882813, 0.11495423889160156, 0.11555225372314454, 0.11504169464111329, 0.11504227447509766, 0.11609970855712891, 0.11823030090332032, 0.11625468444824219, 0.1170862045288086, 0.11584998321533203, 0.11606221008300781, 0.12886015319824218, 0.11599180603027344, 0.11555916595458984, 0.11538960266113281, 0.11507183837890625, 0.11600873565673828, 0.11629180908203125, 0.1157918701171875, 0.11571405029296875, 0.11593059539794921, 0.11589647674560546, 0.116271484375, 0.11578892517089844, 0.12510502624511718, 0.11464498901367187, 0.11496979522705078, 0.11554835510253907, 0.11577571105957031, 0.11553833770751953, 0.11566079711914062, 0.11619328308105469, 0.11679948425292969, 0.11659056091308594, 0.11601923370361328, 0.11529420471191407, 0.11530400085449219, 0.11545849609375, 0.11508274841308594, 0.11535411071777343, 0.11777606201171875, 0.11600313568115235, 0.11548652648925781, 0.11446495819091797, 0.11589836883544923, 0.11531999969482422, 0.1149714584350586, 0.11472473907470702, 0.1153148193359375, 0.11522787475585937, 0.1162402572631836, 0.11573289489746094, 0.11688396453857422, 0.11598783874511719, 0.11566758728027343, 0.1160060806274414, 0.11558380889892578, 0.11530976104736328, 0.11490982055664062, 0.11540908813476562, 0.11472383880615235, 0.11536630249023437, 0.1152518081665039, 0.11517513275146485, 0.11479273223876953, 0.11486729431152344, 0.11498384094238281, 0.11565007781982421, 0.11554045104980469, 0.11450748443603516, 0.11555458831787109, 0.11536383819580077, 0.11473101043701171, 0.11535529327392578, 0.11622434997558594, 0.1161124496459961, 0.11739846038818359, 0.1167831039428711, 0.11582054138183594, 0.1159188461303711, 0.1151119384765625, 0.11510578918457032, 0.11470579528808594, 0.11499378967285157, 0.115525634765625, 0.11549462127685547, 0.11498086547851563, 0.11529379272460938, 0.11411670684814453, 0.11467472076416016, 0.11737187194824218, 0.1153611831665039, 0.11579596710205078, 0.11557743835449219, 0.11555379486083985, 0.12084889221191407, 0.11642259216308594, 0.11577667236328125, 0.11585775756835938, 0.11622252655029297, 0.115736572265625, 0.11689299011230468, 0.11626566314697266, 0.11529734039306641, 0.11509446716308594, 0.11529216003417969, 0.11589753723144532, 0.11549983978271484, 0.11558911895751953, 0.11799552154541015, 0.11575462341308594, 0.11486991882324218, 0.11506723022460938, 0.11505427551269531, 0.11476573181152344, 0.11488092803955079, 0.115089599609375, 0.11552086639404296, 0.11547325134277343, 0.1158818588256836, 0.1151878433227539, 0.11618029022216797, 0.11619193267822266, 0.11604563140869141, 0.1158852767944336, 0.11848802947998047, 0.11654659271240235, 0.11593212890625, 0.11509760284423828, 0.11547853088378907, 0.11558297729492187, 0.1151072006225586, 0.11537471771240235, 0.11601110076904297, 0.11583888244628907, 0.11573043060302735, 0.11558204650878906, 0.1155367660522461, 0.11564220428466797, 0.11663318634033203, 0.11507977294921876, 0.11521024322509765, 0.11493775939941406, 0.11575033569335938, 0.11785282897949219, 0.1165660171508789, 0.11696537780761719, 0.11634073638916016, 0.11640831756591796, 0.11642675018310547, 0.11637964630126953, 0.11514243316650391, 0.11537612915039062, 0.11561583709716797, 0.11594560241699219, 0.11534646606445312, 0.11527164459228516, 0.11720953369140626, 0.11546422576904297, 0.115392578125, 0.11623881530761719, 0.11964415740966797, 0.11600895690917969, 0.1157754898071289, 0.11559241485595703, 0.11547318267822265, 0.11555840301513672, 0.1156193618774414, 0.1159110107421875, 0.11647193908691406, 0.11600281524658203, 0.11582182312011718, 0.11624479675292969, 0.11561619567871094, 0.11538329315185547, 0.1190983657836914, 0.11521842956542969, 0.11550105285644531, 0.11493312072753906, 0.11514534759521484, 0.11546419525146484, 0.11618035125732422, 0.11477046203613281, 0.11458342742919922, 0.11455276489257812, 0.11488655853271484, 0.11474777221679687, 0.1149111328125, 0.11619868469238281, 0.1154629135131836, 0.11552703857421875, 0.11597395324707031, 0.11639078521728516, 0.11929097747802735, 0.12006486511230469, 0.11651225280761719, 0.11616108703613282, 0.11594547271728516, 0.11477401733398437, 0.11515443420410157, 0.11530496215820313, 0.11524505615234375, 0.1146081314086914, 0.1152146224975586, 0.11593494415283204, 0.11546141052246094, 0.11540348815917968, 0.1153986587524414, 0.11592499542236329, 0.11523430633544922, 0.11510012817382813, 0.11743030548095704, 0.11578163146972656, 0.11629519653320312, 0.11486300659179688, 0.11534726715087891, 0.11598799896240235, 0.11627798461914063, 0.11551542663574219, 0.11631206512451171, 0.1156648941040039, 0.11658035278320313, 0.11745442962646484, 0.11464054107666016, 0.11527449798583984, 0.11492147064208984, 0.11475302124023437, 0.11493836975097656, 0.11556790161132813, 0.11580486297607422, 0.11555433654785156, 0.11554716491699218, 0.11587068939208985, 0.11560755157470703, 0.11505049896240234, 0.11480025482177734, 0.11490243530273438, 0.11586653137207031, 0.11570553588867187, 0.11593891143798828, 0.11583977508544922, 0.11565650939941406, 0.11546588897705078, 0.11580620574951171, 0.11529475402832032, 0.11477398681640626, 0.11639318084716797, 0.11510150146484376, 0.11568172454833985, 0.1150469741821289, 0.11505609893798828, 0.11469878387451173, 0.1152696304321289, 0.11472077178955079, 0.11540070343017578, 0.11531059265136719, 0.11496166229248046, 0.11542400360107422, 0.11492710113525391, 0.11532486724853516, 0.11784614562988281, 0.11624428558349609, 0.11549734497070313, 0.11582486724853516, 0.11602537536621094, 0.11586339569091797, 0.11976502227783203, 0.11631001281738282, 0.11598451232910156, 0.11549081420898437, 0.11571772766113281, 0.11545846557617187, 0.1172152328491211, 0.11567922973632813, 0.11570313262939454, 0.11511465454101563, 0.1161523208618164, 0.11456671905517578, 0.11443654632568359, 0.11782345581054687, 0.11531849670410156, 0.1149603500366211, 0.1152569580078125, 0.11484355163574218, 0.11507379150390624, 0.11519391632080078, 0.11592908477783204, 0.11622809600830078, 0.1167237091064453, 0.11624447631835938, 0.1160263671875, 0.11568156433105468, 0.11551817321777344, 0.11504774475097657, 0.11537667083740234, 0.1172031021118164, 0.11671142578125, 0.11492556762695312, 0.11512387084960937, 0.11513203430175781, 0.11528422546386718, 0.1152248306274414, 0.1157798080444336, 0.11562300872802735, 0.11537059020996093, 0.11661138916015625, 0.1156888656616211, 0.11584976196289062, 0.11620912170410157, 0.11556486511230468, 0.11563037109375, 0.11561574554443359, 0.11579718780517578, 0.11608553314208984, 0.11619261169433594, 0.11529196929931641, 0.11514969635009766, 0.11481292724609375, 0.11512131500244141, 0.11493666839599609, 0.11474931335449219, 0.11454038238525391, 0.11586326599121094, 0.11523129272460937, 0.114789794921875, 0.11492617797851562, 0.11482099151611327, 0.11505059051513672, 0.11475772857666015, 0.11585939025878907, 0.11445452880859375, 0.11580003356933594, 0.11611539459228516, 0.11541891479492188, 0.11693849945068359, 0.11555213165283203, 0.11575759887695312, 0.11596611022949219, 0.11527161407470703, 0.115281982421875, 0.11398477172851562, 0.113991455078125, 0.11429484558105468, 0.11448822021484376, 0.11439449310302735, 0.11692505645751954, 0.11445577239990234, 0.11445327758789063, 0.11527155303955078, 0.11632621002197266, 0.11954402923583984, 0.11546562957763672, 0.11484844970703124, 0.1145528335571289, 0.1148375015258789, 0.1152732162475586, 0.11818982696533203, 0.1166867218017578, 0.11629206085205078, 0.1159559326171875, 0.11615574645996093, 0.11649520111083984, 0.11457091522216797, 0.11501590728759765, 0.11500147247314453, 0.1203773422241211, 0.11702297973632812, 0.11712281799316407, 0.11530159759521484, 0.11517008209228516, 0.11482281494140625, 0.11503392028808594, 0.11487081909179687, 0.1164222412109375, 0.1154677734375, 0.11506781005859375, 0.11548876953125, 0.1152791976928711, 0.11550348663330078, 0.11497459411621094, 0.11826959991455079, 0.11698198699951172, 0.11628125, 0.11780364990234375, 0.11544534301757813, 0.11539907073974609, 0.11517542266845703, 0.11500543975830078, 0.11467571258544922, 0.11506630706787109, 0.11584159851074219, 0.11535155487060547, 0.1148960952758789, 0.11508406066894532, 0.1151119384765625, 0.11504790496826171, 0.11530499267578125, 0.11487026977539062, 0.11573248291015625, 0.11535769653320313, 0.11602031707763671, 0.11568624114990235, 0.11543062591552734]",tokens/s,8.629843267414795,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1219, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1020, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 737, in forward self_attn_output, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 655, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1235, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1037, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 763, in forward attn_outputs, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 557, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp7li9pt8s/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1174, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 894, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 507, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 436, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1219, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1020, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 737, in forward self_attn_output, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 655, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.13 GiB is free. Process 102031 has 13.61 GiB memory in use. Of the allocated memory 13.36 GiB is allocated by PyTorch, and 136.95 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpl97_htb0/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1039, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 816, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 551, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 384, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpbvu8u2x2/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp4b9exejx/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.13 GiB is free. Process 78128 has 13.61 GiB memory in use. Of the allocated memory 13.36 GiB is allocated by PyTorch, and 136.95 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp6508kj9_/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpqrmf6093/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1039, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 816, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 551, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 384, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1069, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 881, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 479, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmphnhootaa/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1069, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 881, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 479, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.42 GiB is free. Process 116353 has 13.32 GiB memory in use. Of the allocated memory 13.21 GiB is allocated by PyTorch, and 1.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1219, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1020, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 737, in forward self_attn_output, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 655, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpegez_5ih/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,12217.290752,7107.117056,0.0,6704.594944,6690.791936,s,1,28.101423828125,28.101423828125,0.0,28.101423828125,28.101423828125,28.101423828125,28.101423828125,[28.101423828125],,kWh,0.0006132075959624899,6.762861531004174e-05,0.0002109923910160183,0.00089182860228855,,MB,1462.788096,7321.02656,0.0,6897.532928,6816.506368,s,10,1.1948051300048828,0.1194805130004883,0.0005477162199578166,0.11939017868041993,0.12011031723022461,0.12030620002746582,0.1204629062652588,"[0.11996553802490234, 0.12050208282470704, 0.11914844512939453, 0.11862995147705079, 0.11925766754150391, 0.11952268981933593, 0.12006678771972656, 0.11886930847167969, 0.1196575698852539, 0.11918508911132812]",tokens/s,2142.6088118566563,kWh,3.51165641180589e-06,3.864413281196093e-07,2.313034257833327e-06,6.211131997758827e-06,tokens/kWh,41216319.358914435,MB,1480.511488,7327.318016,0.0,6903.824384,6816.508928,s,10,73.6728212890625,7.367282128906249,0.010363432350135716,7.37047509765625,7.377784619140625,7.379939916992187,7.3816641552734374,"[7.38209521484375, 7.3533876953125, 7.364556640625, 7.37560693359375, 7.373357421875, 7.36988525390625, 7.3773056640625, 7.35447607421875, 7.37106494140625, 7.35108544921875]",tokens/s,8.551321762582345,kWh,0.00021549094965111205,2.3770478932849615e-05,9.624854459136699e-05,0.00033550997317532863,tokens/kWh,187773.85185828104,,s,630,73.67038871002205,0.1169371249365428,0.0011848233795631242,0.11669915008544922,0.11790299224853515,0.11911686859130859,0.1217594069671631,"[0.11924127960205078, 0.12052076721191406, 0.11849727630615234, 0.11764895629882813, 0.11758841705322266, 0.11683401489257812, 0.11737935638427735, 0.11720726776123047, 0.11602735900878906, 0.11585129547119141, 0.11634870147705079, 0.11690393829345704, 0.11911542510986328, 0.11738492584228516, 0.11739199829101563, 0.11672541046142579, 0.11607481384277343, 0.11611116790771485, 0.11600054168701172, 0.11656031799316406, 0.11764125061035156, 0.11744982147216797, 0.11699903869628907, 0.11721932983398438, 0.11692249298095703, 0.11705264282226563, 0.11641104125976562, 0.11691187286376953, 0.11711923217773437, 0.117829345703125, 0.1179316177368164, 0.1175165786743164, 0.12014348602294922, 0.11674294281005859, 0.1168179168701172, 0.11678924560546874, 0.11667155456542969, 0.11767084503173827, 0.11790656280517578, 0.12281702423095703, 0.11965174102783203, 0.11771395111083985, 0.1168055648803711, 0.11633869171142579, 0.11631526184082032, 0.11629657745361328, 0.11806022644042968, 0.11664672088623047, 0.11625062561035156, 0.11595366668701172, 0.11636080169677734, 0.11610736083984376, 0.11594169616699218, 0.11619484710693359, 0.11621826934814453, 0.11716342163085937, 0.1163082275390625, 0.116191650390625, 0.11608271789550781, 0.1191851806640625, 0.11588246154785156, 0.11586736297607422, 0.11635910034179688, 0.1170165786743164, 0.11693465423583985, 0.11664521789550782, 0.11610320281982422, 0.11558092498779297, 0.11532966613769531, 0.11599052429199219, 0.11614803314208984, 0.117057373046875, 0.11704742431640625, 0.1172807388305664, 0.1186957778930664, 0.11670774078369141, 0.11614822387695313, 0.11586534118652343, 0.11637187194824218, 0.11665599822998046, 0.1186080322265625, 0.11829792022705078, 0.11767654418945313, 0.11649791717529297, 0.11640672302246094, 0.11926534271240234, 0.11722547149658204, 0.11648614501953125, 0.11695718383789062, 0.11671481323242187, 0.11704396820068359, 0.11596546936035156, 0.11574063873291016, 0.11574928283691406, 0.11586563110351562, 0.11624982452392578, 0.11644802856445312, 0.11735756683349609, 0.1177509765625, 0.11616822052001953, 0.11601907348632813, 0.11587216186523437, 0.11585081481933594, 0.11585580444335937, 0.11623014068603515, 0.11624409484863281, 0.11709801483154297, 0.11641123199462891, 0.11566028594970704, 0.11651328277587891, 0.11678864288330078, 0.1162733154296875, 0.11619120025634766, 0.1166299819946289, 0.11645718383789062, 0.11702301025390625, 0.11702799987792968, 0.11575587463378906, 0.11563340759277344, 0.11555506896972656, 0.11543305969238281, 0.11572675323486328, 0.11689769744873046, 0.11674614715576172, 0.12185823822021484, 0.12131533050537109, 0.11576163482666016, 0.11563827514648438, 0.11554348754882812, 0.117436767578125, 0.1200882568359375, 0.11690380859375, 0.12424803161621094, 0.11678380584716797, 0.11594080352783204, 0.11583340454101562, 0.1156704330444336, 0.11612220764160157, 0.11681775665283203, 0.11651017761230469, 0.11646192169189454, 0.11716194915771484, 0.11896665954589844, 0.116421630859375, 0.11601203155517578, 0.11635097503662109, 0.11627110290527344, 0.1164676513671875, 0.11634284973144532, 0.11649561309814453, 0.1162422103881836, 0.11635126495361328, 0.11623091125488282, 0.11651187133789062, 0.11669337463378907, 0.11692486572265624, 0.1174999008178711, 0.11875856018066407, 0.11705538940429687, 0.11873299407958984, 0.11720780944824219, 0.1169507827758789, 0.1166830062866211, 0.11673600006103516, 0.11729417419433594, 0.11744579315185547, 0.11795635223388672, 0.1175736312866211, 0.1174814682006836, 0.11643254089355469, 0.11770841979980469, 0.11816432189941406, 0.11596173095703124, 0.11628899383544922, 0.11688400268554687, 0.11664691162109375, 0.11637833404541016, 0.11706591796875, 0.11662550354003906, 0.11616786956787109, 0.11638457489013672, 0.11630326080322266, 0.11623689270019531, 0.11670550537109375, 0.11635279846191406, 0.11656396484375, 0.11699107360839844, 0.11598716735839844, 0.11590652465820313, 0.11637625885009766, 0.11712742614746094, 0.11692237091064453, 0.11658035278320313, 0.1166028823852539, 0.11668438720703125, 0.116392578125, 0.11655673980712891, 0.11648905944824219, 0.11792108917236328, 0.11777021026611328, 0.11683708953857422, 0.11645702362060546, 0.11691897583007813, 0.11655551910400391, 0.11660851287841797, 0.11632466888427734, 0.11688745880126954, 0.11820674896240234, 0.12063257598876953, 0.11744742584228515, 0.11735382080078124, 0.11716041564941407, 0.12096530914306641, 0.11683999633789062, 0.11710012817382813, 0.1172386245727539, 0.11926435089111329, 0.11710733032226563, 0.11669881439208984, 0.11646422576904297, 0.11636653137207031, 0.11636204528808594, 0.11636121368408203, 0.11708211517333984, 0.11732377624511718, 0.11653324890136718, 0.11673942565917969, 0.11941907501220703, 0.11673001861572266, 0.1167998046875, 0.11645033264160157, 0.11683734130859375, 0.11719065856933594, 0.11681996917724609, 0.1167667236328125, 0.11614208221435547, 0.11626905822753907, 0.11660451507568359, 0.11697193908691406, 0.11658998107910157, 0.11729523468017578, 0.11818236541748046, 0.11696514892578125, 0.11649065399169922, 0.11592479705810547, 0.11679705810546875, 0.11624889373779297, 0.11685388946533203, 0.11724486541748047, 0.11734246063232422, 0.1180068130493164, 0.11715452575683594, 0.1161770248413086, 0.11598992156982423, 0.12141171264648437, 0.11659715270996093, 0.1169240951538086, 0.11675270080566406, 0.11664380645751952, 0.11638349151611328, 0.11611888122558593, 0.11633554840087891, 0.11582054138183594, 0.11672576141357421, 0.11642044830322265, 0.11715805053710937, 0.11696482849121094, 0.11685298919677735, 0.11621129608154297, 0.11574547576904297, 0.11679894256591797, 0.11588047790527344, 0.11644627380371093, 0.11754592132568359, 0.11746236419677734, 0.11669280242919922, 0.11685273742675781, 0.11643488311767577, 0.11648623657226563, 0.11692329406738282, 0.11714534759521485, 0.1175032958984375, 0.11719561767578125, 0.11744445037841797, 0.12147042846679687, 0.11709897613525391, 0.11677308654785157, 0.11686236572265625, 0.1164263687133789, 0.11669948577880859, 0.1173279037475586, 0.11738172912597657, 0.12411277008056641, 0.11768771362304688, 0.11692854309082032, 0.11626268768310546, 0.11639826965332031, 0.11676541137695312, 0.11713740539550781, 0.11725331115722656, 0.11674678039550782, 0.11696144104003907, 0.11654771423339844, 0.11641983795166015, 0.1161562271118164, 0.11595871734619141, 0.11650252532958984, 0.11626649475097656, 0.11703961944580078, 0.1169134750366211, 0.11657901000976563, 0.1194834213256836, 0.11687010955810546, 0.11673356628417969, 0.11731104278564453, 0.11694857788085937, 0.11728752136230469, 0.11726707458496094, 0.1167050552368164, 0.11613593292236328, 0.11686297607421875, 0.11588796997070312, 0.11624873352050781, 0.11687052917480469, 0.11717622375488282, 0.11702754974365234, 0.11637760162353515, 0.11636905670166016, 0.11646601867675781, 0.11600691223144531, 0.11603142547607422, 0.11638790130615234, 0.11632953643798828, 0.11739421081542968, 0.1177418212890625, 0.11695414733886719, 0.1171397476196289, 0.11649043273925781, 0.11786281585693359, 0.12213452911376953, 0.11736799621582031, 0.11699488067626954, 0.11800284576416016, 0.11666889953613281, 0.11660889434814453, 0.11623619079589843, 0.11679395294189453, 0.11617894744873047, 0.11709027099609375, 0.1165330581665039, 0.11793817901611328, 0.1177745590209961, 0.1168322525024414, 0.11688140869140624, 0.11635097503662109, 0.11656114959716797, 0.11660684967041016, 0.11627193450927735, 0.11682332611083984, 0.1181984634399414, 0.11705193328857422, 0.11640838623046874, 0.11665408325195313, 0.11658172607421875, 0.11623516845703125, 0.11635587310791015, 0.11696240234375, 0.11686284637451172, 0.11721932983398438, 0.11624857330322266, 0.11624447631835938, 0.11769651031494141, 0.11738854217529297, 0.1171299819946289, 0.1174295654296875, 0.11683296203613282, 0.11840512084960937, 0.11908710479736329, 0.11718185424804688, 0.1162799072265625, 0.11675001525878906, 0.11672812652587891, 0.11738521575927735, 0.1168506851196289, 0.11764093017578126, 0.11723567962646485, 0.11681209564208984, 0.11627110290527344, 0.11622140502929687, 0.11654956817626953, 0.11629942321777344, 0.11658131408691406, 0.11704729461669922, 0.11660460662841797, 0.1163021469116211, 0.12013906860351563, 0.11649504089355468, 0.11632640075683594, 0.11647590637207031, 0.11653494262695313, 0.11678358459472657, 0.1172003173828125, 0.11668115234375, 0.11698790740966797, 0.11649433898925782, 0.11623548889160157, 0.11642921447753907, 0.11683078765869141, 0.11717203521728516, 0.11796275329589843, 0.1172457275390625, 0.12140361785888672, 0.11790259552001953, 0.11701119995117187, 0.11684249877929688, 0.11664310455322266, 0.11674806213378906, 0.11790169525146485, 0.11804729461669922, 0.11717987060546875, 0.11695954895019531, 0.11691558074951172, 0.11602210998535156, 0.12011315155029297, 0.11688345336914062, 0.11731763458251954, 0.11833548736572265, 0.11700656127929687, 0.11795452880859375, 0.11716178894042968, 0.117391357421875, 0.11637935638427735, 0.11712687683105469, 0.11677126312255859, 0.11772265625, 0.11766230773925782, 0.11637971496582031, 0.11634886169433593, 0.11682611083984375, 0.1169012451171875, 0.11648035430908203, 0.11616665649414062, 0.11645263671875, 0.11602403259277344, 0.11571711730957031, 0.11591782379150391, 0.11601318359375, 0.11607369232177735, 0.11642537689208984, 0.11610521697998047, 0.1162795181274414, 0.11658627319335937, 0.11648000335693359, 0.11657548522949218, 0.11663008117675781, 0.11641875457763672, 0.11652323150634766, 0.1164892807006836, 0.11747811126708985, 0.11697932434082031, 0.12866517639160155, 0.1172301788330078, 0.11727276611328125, 0.11691961669921876, 0.11631257629394531, 0.11621574401855468, 0.11647325134277343, 0.11694723510742187, 0.11675888061523437, 0.1170742416381836, 0.11623830413818359, 0.11647325134277343, 0.11615193939208984, 0.11632931518554687, 0.11628086090087891, 0.11623820495605469, 0.11859152221679688, 0.11663763427734375, 0.11630242919921875, 0.11858086395263671, 0.11694742584228515, 0.11664521789550782, 0.11656217956542969, 0.11715817260742188, 0.1163056640625, 0.1161588134765625, 0.11679542541503907, 0.11600691223144531, 0.11558287811279297, 0.11604592132568359, 0.11612774658203125, 0.11634278106689454, 0.11680973052978516, 0.11650048065185546, 0.11661878204345703, 0.11595414733886719, 0.11555161285400391, 0.11669363403320313, 0.11580524444580079, 0.11655683135986328, 0.11694480133056641, 0.11607170867919922, 0.11708204650878906, 0.11791596984863281, 0.11700444793701172, 0.11694105529785156, 0.11642835235595703, 0.12073824310302735, 0.11675164794921875, 0.11694703674316406, 0.11687181091308593, 0.11605197143554688, 0.11674784088134765, 0.11819993591308593, 0.11764924621582032, 0.11740659332275391, 0.11758601379394532, 0.11627110290527344, 0.11634687805175781, 0.1164185562133789, 0.11617068481445313, 0.11619558715820312, 0.1160804443359375, 0.11591184234619141, 0.11637641906738282, 0.1167619857788086, 0.11673664093017579, 0.11724928283691406, 0.11704601287841797, 0.11705747222900391, 0.1166615982055664, 0.11612355041503906, 0.11608723449707031, 0.11674825286865234, 0.11701859283447266, 0.11669664001464844, 0.11742240142822266, 0.11966342163085937, 0.11670706939697266, 0.11639529418945313, 0.11671830749511719, 0.1177679672241211, 0.11728889465332032, 0.11712745666503906, 0.12151744079589843, 0.11899148559570312, 0.11703705596923829, 0.11619942474365234, 0.11644441223144532, 0.11641728210449219, 0.11627680206298828, 0.11700816345214844, 0.11688972473144531, 0.11687737274169922, 0.11637932586669922, 0.11638864135742187, 0.11638963317871094, 0.11676012420654297, 0.11689619445800781, 0.11661341094970704, 0.11712633514404297, 0.1171630096435547, 0.11628486633300782, 0.11602118682861329, 0.11946358489990234, 0.11714771270751953, 0.11616508483886719, 0.11675631713867188, 0.11728297424316406, 0.11720416259765624, 0.11648595428466797, 0.1163921890258789, 0.11638655853271485, 0.11621485137939454, 0.11622441864013672, 0.11644367980957031, 0.11659059143066407, 0.12199116516113281, 0.11683747100830078, 0.11608294677734375, 0.11604239654541015, 0.11568319702148437, 0.11634291076660157, 0.11613935852050782, 0.11698038482666015, 0.11741798400878906, 0.1174466552734375, 0.11720706939697266, 0.11722688293457031, 0.116978271484375, 0.11616255950927734, 0.1170000991821289, 0.11911804962158203, 0.11621568298339843, 0.11683123016357422, 0.11626195526123047, 0.11606419372558593, 0.11596185302734376, 0.1162480926513672, 0.11601363372802734, 0.11651881408691406, 0.11620352172851563, 0.11637350463867188, 0.11913011169433593, 0.1165601577758789, 0.11668000030517578, 0.1166719970703125, 0.11667753601074218, 0.11656320190429688, 0.11638246154785156, 0.11651993560791016, 0.11688038635253906, 0.11677641296386719, 0.1158070068359375, 0.11608467102050782, 0.11569337463378906, 0.11605635070800781, 0.1163875503540039, 0.11657743835449219, 0.116314208984375, 0.11658537292480468, 0.11676451110839844, 0.11611296081542968, 0.11567558288574219, 0.11581552124023438, 0.11584194946289063, 0.11610422515869141, 0.11655420684814453, 0.11612210845947266, 0.12016639709472657]",tokens/s,8.551604125231066,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,836.36224,4691.197952,0.0,4288.67584,4213.842432,s,1,13.1441806640625,13.1441806640625,0.0,13.1441806640625,13.1441806640625,13.1441806640625,13.1441806640625,[13.1441806640625],,kWh,0.00017811410254167488,1.963984390321556e-05,6.15728270359911e-05,0.00025932677348088155,,MB,1331.552256,5114.822656,0.0,4699.717632,4535.116288,s,10,8.495073547363281,0.8495073547363281,0.008816630293021722,0.8526043090820312,0.8563384338378907,0.8564941375732422,0.8566187005615234,"[0.8258705444335938, 0.8424969482421875, 0.8485203247070312, 0.8521842041015625, 0.8515008544921875, 0.8530244140625, 0.8566498413085938, 0.8563038330078125, 0.853122314453125, 0.8554002685546875]",tokens/s,301.3511284778197,kWh,2.4629736240277427e-05,2.7161981220857694e-06,1.6278045429832706e-05,4.36239797921959e-05,tokens/kWh,5868332.078353774,MB,1374.851072,5123.211264,0.0,4708.10624,4535.118848,s,10,39.93316381835937,3.9933163818359376,0.008029868402241526,3.9912628173828124,4.002587670898437,4.004738903808594,4.006459890136719,"[3.986468505859375, 3.981783935546875, 3.984834228515625, 3.988429443359375, 3.989638427734375, 3.99288720703125, 3.998876953125, 4.001245361328125, 4.002109619140625, 4.00689013671875]",tokens/s,15.77636079288954,kWh,0.00011720509338513529,1.292726282985848e-05,7.778761315596721e-05,0.00020791996937096104,tokens/kWh,303001.19892571913,,s,630,39.93070735549928,0.06338207516745915,0.0016543357230085244,0.06313673591613769,0.06393251190185546,0.06417336387634277,0.07448652053833008,"[0.08074723052978515, 0.06450694274902344, 0.06364553451538085, 0.06311401748657226, 0.06277939224243165, 0.06255001449584961, 0.06253772735595703, 0.06251520156860352, 0.06354870223999023, 0.06329212951660156, 0.06303497695922852, 0.06254019165039063, 0.0625203857421875, 0.06249059295654297, 0.06258131027221679, 0.06259344100952148, 0.06253158569335937, 0.06279782485961914, 0.06259494400024414, 0.0625747184753418, 0.06258022308349609, 0.06261017608642579, 0.06261446380615235, 0.06312947082519531, 0.06306911849975586, 0.06264838409423829, 0.06378031921386719, 0.06272367858886718, 0.06271065521240235, 0.06265004730224609, 0.06267526245117187, 0.06267084884643555, 0.06390399932861328, 0.06351776123046875, 0.06328515243530274, 0.06283884811401368, 0.06276083374023438, 0.06274732971191406, 0.06269107055664062, 0.06379542541503906, 0.06338784027099609, 0.0631684799194336, 0.06274665451049805, 0.06277020645141601, 0.06275580978393555, 0.0627691535949707, 0.06379305648803711, 0.06349628829956054, 0.063246337890625, 0.06277529525756836, 0.06277939224243165, 0.06284617614746094, 0.0628171501159668, 0.06387088012695312, 0.06350636672973632, 0.0632682876586914, 0.06279350280761718, 0.06278224182128907, 0.0628856315612793, 0.06298041534423827, 0.06291404724121094, 0.06344960021972657, 0.06353100967407227, 0.07654137420654297, 0.06432006072998046, 0.06356111907958985, 0.06305023956298828, 0.06285436630249024, 0.06259596633911133, 0.06259814453125, 0.06261862564086915, 0.0626596794128418, 0.06251993560791015, 0.06251532745361328, 0.06251740646362304, 0.06247983932495117, 0.06253827285766601, 0.0626420783996582, 0.06265628814697266, 0.06280819320678711, 0.0635926399230957, 0.06309683227539062, 0.06296368026733398, 0.06256835174560547, 0.06273855972290039, 0.06363532638549804, 0.06325475311279297, 0.06304143905639649, 0.06263923263549805, 0.06262400054931641, 0.06256668853759766, 0.06260771179199219, 0.06257372665405274, 0.06260796737670898, 0.06269977569580078, 0.06259507369995117, 0.06384819030761718, 0.06353891372680665, 0.0632509765625, 0.06285311889648437, 0.06270556640625, 0.06273033523559571, 0.06272185516357422, 0.06395260620117188, 0.0636190071105957, 0.06334518432617188, 0.06290009689331055, 0.06268524932861329, 0.06277030563354492, 0.0627375373840332, 0.06391334533691406, 0.06357161712646485, 0.06335686492919922, 0.0629195213317871, 0.06282035064697265, 0.06279804611206055, 0.06285516738891601, 0.06403250885009766, 0.06370716857910157, 0.06331801605224609, 0.06290841674804687, 0.06280303955078125, 0.06282320022583007, 0.06275699234008789, 0.06277632141113282, 0.06322870254516602, 0.07385497283935546, 0.06427548980712891, 0.06351152038574219, 0.06305116653442383, 0.06280867385864258, 0.06253740692138672, 0.06247232055664063, 0.062498046875, 0.06255507278442383, 0.06258892822265626, 0.06256435012817382, 0.0626063346862793, 0.06254694366455078, 0.06250086212158203, 0.06248828887939453, 0.06310671997070312, 0.06364633560180664, 0.06379910278320312, 0.06344425582885742, 0.06314400100708008, 0.0627353286743164, 0.06258470535278321, 0.06259916687011718, 0.0625802879333496, 0.06363539123535156, 0.06343936157226562, 0.06320521545410156, 0.06268457412719726, 0.06259584045410156, 0.06261555099487305, 0.0625951042175293, 0.0638006706237793, 0.0637056007385254, 0.06322723388671875, 0.06285356903076172, 0.06268758392333984, 0.06275686264038086, 0.06269308853149413, 0.06269744110107422, 0.0641641616821289, 0.06371971130371094, 0.06327846527099609, 0.06310742568969727, 0.06276508712768555, 0.06277484893798828, 0.0627613754272461, 0.06377062225341797, 0.06346956634521485, 0.06324220657348632, 0.06273027038574219, 0.06275507354736329, 0.06282976150512695, 0.06272057723999024, 0.06385359954833984, 0.06343369674682617, 0.06332118225097656, 0.06286572647094726, 0.0638873291015625, 0.06335542297363281, 0.06413024139404297, 0.06372025680541993, 0.06330172729492188, 0.06293251037597657, 0.07450860595703125, 0.06415328216552735, 0.06346137619018555, 0.06300188827514648, 0.06345596694946289, 0.06347564697265624, 0.06295148849487305, 0.06253567886352539, 0.06256639862060547, 0.06259286499023438, 0.06256451034545898, 0.06251929473876953, 0.06255820846557616, 0.06252544021606446, 0.06250697708129883, 0.06263388824462891, 0.06361612701416015, 0.06339203262329102, 0.06315897750854492, 0.06402870178222657, 0.06360063934326173, 0.06311897659301757, 0.06290694427490234, 0.06257849502563477, 0.06267289733886719, 0.06280531311035156, 0.06272048187255859, 0.06264649581909179, 0.06270697784423829, 0.06268592071533204, 0.06270585632324219, 0.06371923065185547, 0.06326383972167969, 0.0630588493347168, 0.06433984375, 0.06381785583496094, 0.06321152114868164, 0.06299391937255859, 0.06267340850830078, 0.0627176628112793, 0.06267644882202149, 0.06440838623046875, 0.06383580780029297, 0.06328559875488281, 0.06310860824584962, 0.06277785491943359, 0.06273814392089844, 0.0627710075378418, 0.06306249618530274, 0.06377267074584961, 0.0633193588256836, 0.06306886291503906, 0.06326822280883788, 0.06382368087768554, 0.06334342575073242, 0.06332620620727539, 0.0632729606628418, 0.0628529281616211, 0.06388959884643555, 0.06346467208862305, 0.06330243301391601, 0.06282854461669922, 0.06285084915161133, 0.07443244934082031, 0.06446137237548828, 0.06369164657592774, 0.06315302276611329, 0.06286540985107422, 0.06254105758666992, 0.06253235244750976, 0.062494110107421875, 0.06251785659790039, 0.062494720458984375, 0.062485694885253906, 0.06259721755981446, 0.0625709457397461, 0.06260969543457032, 0.06288300704956054, 0.06461443328857422, 0.06367721557617187, 0.06329721450805664, 0.06322617721557618, 0.06363897705078125, 0.06316908645629883, 0.06300812911987305, 0.0626039047241211, 0.06258268737792969, 0.0625849266052246, 0.06258278274536133, 0.06260876846313476, 0.06264896011352539, 0.06266470336914062, 0.06358329772949219, 0.06363811111450195, 0.06414300537109376, 0.06372217559814453, 0.0632176628112793, 0.06349824142456055, 0.06377267074584961, 0.06321971130371094, 0.06315827178955079, 0.06270121765136719, 0.06271395111083984, 0.06270800018310548, 0.06267900848388672, 0.06271903991699218, 0.06394566345214844, 0.06362099075317383, 0.06333248138427734, 0.06295142364501953, 0.06387868881225586, 0.06334307098388672, 0.06347081756591796, 0.06320822525024414, 0.06387712097167969, 0.06344499206542968, 0.06324019241333008, 0.06275820922851562, 0.06279411315917968, 0.06406585693359375, 0.06369449615478516, 0.063364990234375, 0.06296214294433594, 0.06283081436157227, 0.06283651351928711, 0.0630591697692871, 0.07509142303466797, 0.06442396545410156, 0.06360047912597656, 0.06315631866455078, 0.06292102432250976, 0.06262992095947266, 0.06262192153930664, 0.06259014511108399, 0.06256534576416016, 0.0625541114807129, 0.06254611206054687, 0.06256636810302735, 0.06255702209472656, 0.06260224151611328, 0.06266233444213867, 0.06361078262329102, 0.06330547332763672, 0.06418089294433593, 0.06379430389404298, 0.06355561447143555, 0.06314684677124023, 0.062740478515625, 0.06266387176513671, 0.06383625411987305, 0.0634600944519043, 0.06311523056030273, 0.06275686264038086, 0.06265244674682617, 0.06258297729492188, 0.0626186866760254, 0.06269769668579102, 0.06388288116455078, 0.06369964981079101, 0.06319327926635743, 0.06372556686401368, 0.06353100967407227, 0.06330163192749023, 0.06385868835449218, 0.06359040069580078, 0.06334284973144531, 0.06286051177978516, 0.06271404647827149, 0.06269068908691407, 0.06267715072631835, 0.06373664093017578, 0.06345318222045898, 0.06325609588623046, 0.06274710464477538, 0.06287360000610352, 0.06382966232299804, 0.06332188796997071, 0.06388908767700195, 0.0634788475036621, 0.06326665496826171, 0.06315331268310546, 0.06384118270874023, 0.06335478210449219, 0.06401638031005859, 0.0636416015625, 0.06337900924682617, 0.06293049621582031, 0.06279062271118165, 0.06281820678710938, 0.07548915100097656, 0.06432316589355469, 0.06362368011474609, 0.06310707092285156, 0.06280192184448243, 0.06249062347412109, 0.06255369567871094, 0.06259247970581054, 0.06260409545898438, 0.06266249465942383, 0.06256268692016602, 0.06255984115600587, 0.06251961517333984, 0.06251110458374023, 0.0635239372253418, 0.06401526641845703, 0.06475536346435547, 0.06381379318237304, 0.06341651153564454, 0.0631992301940918, 0.06274457550048829, 0.06259673690795899, 0.06261766433715821, 0.06363782501220704, 0.063327392578125, 0.06317142486572265, 0.0625761604309082, 0.06273276901245117, 0.0626688003540039, 0.06261964797973633, 0.06500911712646484, 0.06385308837890626, 0.06369894409179687, 0.06391177749633789, 0.06352675247192383, 0.06333427047729492, 0.06283308792114257, 0.06287974548339843, 0.06389519882202148, 0.06348950576782227, 0.06329385757446289, 0.06281206512451172, 0.0628967056274414, 0.06284697723388671, 0.06321971130371094, 0.06397244644165039, 0.06366819381713867, 0.06347036743164063, 0.0634810562133789, 0.06455187225341796, 0.06391571044921875, 0.06345513534545899, 0.06324454498291016, 0.06291263961791992, 0.06308582305908203, 0.06399606323242188, 0.06341827011108399, 0.06333513641357422, 0.06293260955810547, 0.06294694519042969, 0.06292665481567383, 0.06349305725097656, 0.06451129913330078, 0.07453900909423829, 0.06428057861328125, 0.06368022537231445, 0.0631769905090332, 0.06294323348999023, 0.06258278274536133, 0.06261484909057617, 0.06256681442260742, 0.06306585693359375, 0.06298416137695312, 0.06268780899047852, 0.06266470336914062, 0.06259673690795899, 0.06262572860717773, 0.06309286499023438, 0.06432937622070313, 0.06395964813232421, 0.06381574249267578, 0.06385635375976563, 0.06352467346191407, 0.06393247985839844, 0.06353142547607422, 0.06318489456176758, 0.06281830215454101, 0.06264012908935547, 0.06287728118896484, 0.06272195053100586, 0.06393702316284179, 0.06353318405151367, 0.06329321670532227, 0.06286371231079102, 0.06281299209594726, 0.06371014404296875, 0.06343065643310547, 0.06396636962890626, 0.06372355270385742, 0.06328329467773437, 0.06293737411499023, 0.06290886306762696, 0.06388687896728516, 0.06358287811279296, 0.06335212707519532, 0.06334924697875977, 0.06283078384399414, 0.06273318481445313, 0.06281219100952148, 0.06343363189697265, 0.06430719757080078, 0.06388940811157226, 0.06344294357299805, 0.06331001663208008, 0.06400950622558593, 0.0634823989868164, 0.06326252746582031, 0.06287292861938476, 0.06393280029296874, 0.0635151023864746, 0.06333235168457031, 0.0629452781677246, 0.06393228912353516, 0.06358233642578125, 0.0634777603149414, 0.06400204467773438, 0.07615350341796875, 0.06404704284667968, 0.0635104637145996, 0.06305129623413086, 0.06274102401733399, 0.06260137557983399, 0.06261350250244141, 0.06258822250366211, 0.06259702301025391, 0.06254604721069336, 0.0626080322265625, 0.06261305618286132, 0.06255251312255859, 0.06266470336914062, 0.06387724685668945, 0.06500953674316406, 0.06401177978515625, 0.06348236846923828, 0.06396217727661133, 0.06377772903442383, 0.0632174072265625, 0.0630169906616211, 0.06269548797607422, 0.06275001525878907, 0.06268332672119141, 0.0637119026184082, 0.06342031860351563, 0.06322735977172851, 0.06270588684082032, 0.0633163833618164, 0.0638311996459961, 0.06414422607421875, 0.06374345779418945, 0.06344883346557617, 0.06378780746459961, 0.06358531188964844, 0.06329433441162109, 0.06286140823364259, 0.062814208984375, 0.06284857559204102, 0.06286761474609374, 0.06389379119873047, 0.063425537109375, 0.06327398300170899, 0.06275056076049805, 0.06379126358032226, 0.06373510360717774, 0.0635665283203125, 0.06451404571533204, 0.06395868682861328, 0.06402214050292969, 0.0637344970703125, 0.0633507843017578, 0.06297804641723632, 0.06279782485961914, 0.0630552978515625, 0.06387545776367187, 0.06358240127563476, 0.06339379119873047, 0.0634431037902832, 0.06283270263671875, 0.06299420928955078, 0.06390374374389649, 0.07441139221191406, 0.0642356185913086, 0.06354774475097656, 0.06306163024902343, 0.06283020782470704, 0.06253788757324219, 0.06256924819946288, 0.06258396911621093, 0.06359446334838867, 0.06339673614501953, 0.0630923194885254, 0.06253814315795898, 0.06261350250244141, 0.06281161499023437, 0.06307257461547852, 0.065474365234375, 0.06393024063110352, 0.06373571014404297, 0.06317334365844726, 0.0631596794128418, 0.06314627075195313, 0.06380918502807617, 0.06413577270507813, 0.06326067352294922, 0.06280707168579101, 0.06266979217529296, 0.06274662399291993, 0.062674560546875, 0.06437516784667968, 0.06374195098876953, 0.06343468856811524, 0.0638768310546875, 0.06357593536376953, 0.06374761581420899, 0.06392512130737305, 0.06361705780029298, 0.06320950317382812, 0.06296076965332031, 0.06384320068359375, 0.06346752166748047, 0.06399795150756836, 0.0635863037109375, 0.06325625610351562, 0.06295788955688476, 0.06276710510253906, 0.06299033737182617, 0.06391830444335937, 0.06412879943847656, 0.06371244812011718, 0.06337209701538087, 0.06356563186645507, 0.06387731170654297, 0.0634142723083496, 0.06412866973876953, 0.06372592163085937, 0.06338969421386718, 0.06312550354003907, 0.06393763351440429, 0.0634436149597168, 0.06404950714111328, 0.06366515350341796, 0.06340700912475586, 0.06283292770385743]",tokens/s,15.777331325267301,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,840.257536,4691.197952,0.0,4288.67584,4213.842432,s,1,13.69103125,13.69103125,0.0,13.69103125,13.69103125,13.69103125,13.69103125,[13.69103125],,kWh,0.00018598378312498576,2.0508200249727567e-05,6.259143896200636e-05,0.00026908342233671967,,MB,1339.199488,5114.822656,0.0,4699.717632,4535.24736,s,10,8.522136840820313,0.8522136840820312,0.008486166056719516,0.8541427917480469,0.859630908203125,0.8608456665039063,0.8618174731445313,"[0.830426025390625, 0.8493543701171875, 0.8496346435546875, 0.8480897827148437, 0.854280029296875, 0.8540055541992188, 0.8620604248046875, 0.8586854858398437, 0.8593609619140625, 0.8562395629882813]",tokens/s,300.39414384169675,kWh,2.481764627465029e-05,2.736624702243653e-06,1.640149460266707e-05,4.395576557956101e-05,tokens/kWh,5824036.884004073,MB,1395.019776,5125.308416,0.0,4708.10624,4535.24992,s,10,40.23791796875,4.023791796875,0.0076709909071857926,4.025454223632812,4.0324760253906256,4.033512915039062,4.034342426757813,"[4.00916796875, 4.015730712890625, 4.016523193359375, 4.025572265625, 4.021522705078125, 4.025336181640625, 4.026189697265625, 4.03224560546875, 4.031079833984375, 4.0345498046875]",tokens/s,15.65687371024707,kWh,0.00011828820436659747,1.3048302971204392e-05,7.838808122893267e-05,0.00020972458856673455,tokens/kWh,300393.96157858404,,s,630,40.235525993347125,0.06386591427515423,0.0015732270775732622,0.06350155067443847,0.06442507400512695,0.06466185035705566,0.07474415466308595,"[0.07863289642333984, 0.0649615707397461, 0.06408688354492187, 0.06359872055053711, 0.06326067352294922, 0.0631207046508789, 0.06316921615600586, 0.06313708877563476, 0.06312799835205078, 0.06312355041503906, 0.0631379508972168, 0.06318220901489258, 0.06311180877685547, 0.0631459846496582, 0.06319500732421875, 0.06316454315185546, 0.06313727951049805, 0.06314646530151367, 0.0631439666748047, 0.0632279052734375, 0.06317871856689453, 0.06326684951782227, 0.0634071044921875, 0.0632616958618164, 0.06332006454467773, 0.06338492965698242, 0.06335091018676758, 0.06332243347167969, 0.06327449417114257, 0.0632817611694336, 0.06329561614990234, 0.06329753494262695, 0.06334239959716798, 0.06334028625488282, 0.06332223892211913, 0.0633367691040039, 0.06333580780029296, 0.06362176132202148, 0.06346547317504883, 0.06347132873535156, 0.06343503952026368, 0.06344044876098633, 0.06345750427246094, 0.06350051116943359, 0.06345523071289062, 0.063504638671875, 0.0634672966003418, 0.06339750289916993, 0.06337507247924805, 0.06338393783569336, 0.06342272186279296, 0.06344499206542968, 0.06338560104370117, 0.06344089508056641, 0.06342588806152344, 0.06351734542846679, 0.06346342468261719, 0.0640505599975586, 0.06353984069824219, 0.0635533447265625, 0.063508544921875, 0.06353113555908203, 0.06355075073242188, 0.07645750427246094, 0.06499801635742188, 0.06410739135742187, 0.0635627212524414, 0.06333395385742188, 0.06313359832763672, 0.06309328079223633, 0.06314947128295899, 0.06327967834472656, 0.06314089584350586, 0.06317318344116211, 0.06329183959960938, 0.06321977615356446, 0.06323209762573243, 0.06316835021972657, 0.0631968002319336, 0.0631583023071289, 0.06318524932861327, 0.06317401504516601, 0.06316851043701172, 0.06317119979858399, 0.06326678466796876, 0.06320131301879883, 0.06323583984375, 0.06320323181152344, 0.06331222534179687, 0.06336921691894531, 0.0634754867553711, 0.06441801452636718, 0.06401023864746094, 0.06383347320556641, 0.06338604736328125, 0.06336735916137695, 0.06334431838989257, 0.06335248184204102, 0.06341904067993163, 0.06342041778564453, 0.06338560104370117, 0.0633628158569336, 0.06340019226074219, 0.06442361450195312, 0.06409248352050781, 0.0639549446105957, 0.06347760009765625, 0.06381372833251953, 0.06345731353759766, 0.06343254470825195, 0.06341436767578125, 0.06337955093383789, 0.06340732955932617, 0.06341712188720704, 0.06347568130493164, 0.06532918548583984, 0.0634634895324707, 0.06347359848022462, 0.06345884704589844, 0.06348780822753906, 0.06457206726074219, 0.06417584228515626, 0.06406172943115235, 0.06353046417236329, 0.06353068923950195, 0.06349705505371094, 0.0757188491821289, 0.06493548583984375, 0.06420444488525391, 0.06358895874023437, 0.06332761764526368, 0.06303116989135742, 0.06344371032714843, 0.06419241333007812, 0.06374947357177735, 0.06358473587036133, 0.06327337646484375, 0.06325030517578124, 0.06317078399658203, 0.06428144073486328, 0.0638424301147461, 0.06363833618164062, 0.06314803314208985, 0.06317068862915039, 0.06322320175170898, 0.06325001525878907, 0.06313862228393555, 0.06318454360961914, 0.06328156661987305, 0.06323814392089844, 0.06321779251098633, 0.06323126220703125, 0.06325308990478516, 0.06331552124023437, 0.06329747009277344, 0.06335935974121094, 0.06339369583129882, 0.06334486389160156, 0.06464083099365234, 0.06426150512695313, 0.06385747146606445, 0.06352822494506837, 0.06332489776611328, 0.06336307144165039, 0.06334054565429688, 0.0633733139038086, 0.06339993667602539, 0.06339097595214843, 0.06335161590576172, 0.06340921783447266, 0.06446169281005859, 0.06409744262695312, 0.06389846420288085, 0.0633507843017578, 0.06339174270629883, 0.06344089508056641, 0.06338947296142577, 0.06352099227905274, 0.06335049438476563, 0.06339408111572266, 0.06341376113891602, 0.06346364974975587, 0.06348214340209961, 0.06348185729980468, 0.06349388885498047, 0.06348371124267578, 0.06401634979248047, 0.0646123504638672, 0.06402652740478515, 0.0760350112915039, 0.06498508453369141, 0.06429081726074219, 0.06375737762451172, 0.06347462463378906, 0.06315238571166992, 0.06312063980102539, 0.06308451080322265, 0.06310147094726562, 0.06323984146118164, 0.06317820739746094, 0.06315507125854493, 0.06343593597412109, 0.06426505279541016, 0.06404025268554688, 0.06373558425903321, 0.06330569458007812, 0.06352582550048828, 0.06377267074584961, 0.06337945556640626, 0.06439730834960937, 0.0639815673828125, 0.06377215957641602, 0.06325503921508789, 0.06322566223144531, 0.06328953552246094, 0.06427625274658202, 0.06397951889038087, 0.06398588943481445, 0.06338518524169921, 0.0633532485961914, 0.06359574508666992, 0.06332086563110352, 0.06331347274780273, 0.06322534561157227, 0.06469705963134766, 0.06335919952392578, 0.06340822219848632, 0.06335068893432617, 0.06334668731689454, 0.06332412719726563, 0.06354723358154298, 0.06437907409667969, 0.06419046020507813, 0.06391574478149414, 0.06393996810913086, 0.0634230728149414, 0.06333238220214844, 0.06342643356323242, 0.06342428970336914, 0.06344972610473633, 0.06363747024536133, 0.06348355102539062, 0.067328125, 0.0644754867553711, 0.06421084594726563, 0.06399302291870117, 0.0635558090209961, 0.06353100967407227, 0.06354300689697266, 0.06352576065063477, 0.06353244781494141, 0.06357276916503907, 0.07486585235595702, 0.06477865600585937, 0.06409651184082031, 0.06380748748779297, 0.06333030319213867, 0.06319036865234375, 0.06319990539550781, 0.06316003036499024, 0.0631212158203125, 0.0642052764892578, 0.06386422348022461, 0.0637364158630371, 0.0631459846496582, 0.06317631912231446, 0.06319756698608399, 0.06324838256835938, 0.06318710327148437, 0.06317580795288086, 0.06318112182617187, 0.06326108932495117, 0.06320947265625, 0.06320537567138672, 0.06438441467285157, 0.06416393280029296, 0.0637946891784668, 0.06349251174926758, 0.06335548782348632, 0.06336080169677734, 0.06432790374755859, 0.06401638031005859, 0.06380262374877929, 0.0632880630493164, 0.06332780838012696, 0.06337692642211915, 0.06333110427856445, 0.06329145431518554, 0.06325439834594726, 0.06334054565429688, 0.06335712051391601, 0.06358425521850586, 0.06455500793457031, 0.06427184295654297, 0.06390172958374024, 0.06352691268920899, 0.06338313674926757, 0.0638493766784668, 0.06468370819091797, 0.0640551986694336, 0.06376899337768555, 0.0634532814025879, 0.0634059829711914, 0.06452838134765625, 0.06413107299804688, 0.06392012786865234, 0.06349020767211914, 0.06346227264404297, 0.06344393539428711, 0.06347753524780274, 0.06356579208374023, 0.06363337707519531, 0.06366012954711914, 0.06458592224121094, 0.06434150695800782, 0.07444620513916016, 0.0648321304321289, 0.06414540863037109, 0.0636701774597168, 0.0633908462524414, 0.06316336059570313, 0.06423292541503907, 0.06398944091796875, 0.06373871994018555, 0.06323958587646485, 0.06317484664916992, 0.06315430450439453, 0.06417641448974609, 0.06394800186157226, 0.06371417617797852, 0.06320323181152344, 0.06326867294311524, 0.06334188842773437, 0.06420365142822265, 0.06416588592529297, 0.06380275344848632, 0.063285888671875, 0.06323737716674804, 0.06322611236572266, 0.06422169494628906, 0.0639593276977539, 0.06374188613891602, 0.06328217697143555, 0.06338844680786133, 0.06333030319213867, 0.06433382415771484, 0.06398566436767578, 0.0637330551147461, 0.06331052780151367, 0.06326476669311523, 0.06333423995971679, 0.06330294418334961, 0.0633760643005371, 0.06333859252929687, 0.06362067031860351, 0.063727294921875, 0.06454972839355469, 0.0639920654296875, 0.0638809585571289, 0.06342451095581055, 0.06343299102783204, 0.06369558334350586, 0.06444496154785156, 0.06410467529296875, 0.06392399978637696, 0.0633939208984375, 0.06345683288574219, 0.06341097640991211, 0.06465945434570312, 0.06427852630615234, 0.0639918098449707, 0.06355292892456055, 0.06363600158691406, 0.06357612609863281, 0.06365593719482422, 0.06363967895507812, 0.06477196502685546, 0.06360492706298829, 0.0751732177734375, 0.06483010864257813, 0.06409830474853516, 0.06367641448974609, 0.06328067016601563, 0.06308502578735352, 0.0632360954284668, 0.06322175979614258, 0.06310204696655274, 0.06316255950927735, 0.06319935989379882, 0.06318873596191406, 0.06416675567626953, 0.06386073684692382, 0.06415264129638672, 0.06322476959228515, 0.0633589744567871, 0.06421708679199219, 0.06390361785888672, 0.06378713607788086, 0.06369011306762695, 0.06323247909545898, 0.06323126220703125, 0.06320832061767578, 0.06422732543945313, 0.06392627334594726, 0.06378483200073243, 0.06327094268798829, 0.06323600006103515, 0.06323174285888672, 0.0643486099243164, 0.06394406509399414, 0.06376118469238282, 0.06331100845336914, 0.06367683029174805, 0.06445072174072265, 0.06404338836669922, 0.06389017486572265, 0.0633287696838379, 0.06335948944091797, 0.06355923080444335, 0.06494866943359374, 0.06438636779785156, 0.06394540786743164, 0.06369075012207032, 0.06343270492553711, 0.06341388702392578, 0.06335318374633789, 0.06335273742675782, 0.06336038589477538, 0.06389616012573242, 0.06452607727050781, 0.06397900772094726, 0.06382643127441406, 0.06346384048461914, 0.06354499053955077, 0.06448982238769531, 0.06405433654785156, 0.06448633575439452, 0.06421686553955078, 0.06394287872314453, 0.06349824142456055, 0.06351788711547851, 0.07438127899169922, 0.06480470275878907, 0.06405709075927735, 0.06360310363769531, 0.06331391906738282, 0.0630841293334961, 0.06437932586669921, 0.06842723083496094, 0.06371788787841796, 0.06333235168457031, 0.06332211303710937, 0.06321356964111328, 0.06313395309448243, 0.06318463897705077, 0.06324019241333008, 0.06317670440673828, 0.06313724899291992, 0.06441324615478515, 0.06401532745361328, 0.06369075012207032, 0.06340364837646484, 0.06423609924316406, 0.06391500854492188, 0.0644493408203125, 0.06406553649902344, 0.06370918273925781, 0.0633364486694336, 0.06331801605224609, 0.06326067352294922, 0.06425804901123047, 0.06402009582519531, 0.06378246307373046, 0.06327155303955079, 0.06328051376342773, 0.06326969528198242, 0.06433100891113282, 0.06403065490722656, 0.06383216094970703, 0.06384713745117188, 0.06440457916259766, 0.06455513763427734, 0.06422367858886718, 0.06386723327636719, 0.06366822433471679, 0.06340812683105469, 0.06339699172973633, 0.06337011337280274, 0.06337945556640626, 0.06338086318969727, 0.06340067291259766, 0.06338156890869141, 0.06366511917114258, 0.06463938903808594, 0.06425801849365234, 0.0640025634765625, 0.06456934356689453, 0.06402662658691406, 0.06452611541748046, 0.06412687683105468, 0.06406790161132812, 0.06346547317504883, 0.0634818229675293, 0.06350259017944336, 0.07422160339355469, 0.06468720245361329, 0.06402899169921875, 0.06357196807861328, 0.0633246078491211, 0.06305807876586914, 0.0631049919128418, 0.06309600067138672, 0.0631058578491211, 0.06314144134521485, 0.0632344970703125, 0.06329958343505859, 0.06325657653808593, 0.06324224090576172, 0.06340403366088868, 0.06438706970214844, 0.06412643432617188, 0.06372553634643555, 0.06471123504638672, 0.06427238464355468, 0.06376243209838867, 0.06349168014526367, 0.06323648071289062, 0.0644382095336914, 0.06408611297607422, 0.06378496170043946, 0.06338937759399414, 0.06333212661743164, 0.06320995330810547, 0.0642558364868164, 0.06395926284790039, 0.06373318481445313, 0.06325020980834961, 0.06328598403930664, 0.06361913681030273, 0.06496256256103515, 0.06440748596191406, 0.06395030212402343, 0.06367087936401367, 0.06336819076538086, 0.06446578979492187, 0.06424972534179688, 0.06397091293334961, 0.06353139114379883, 0.06341660690307617, 0.06345087814331055, 0.06466381072998047, 0.06425997161865235, 0.06394614410400391, 0.06352560043334961, 0.06363504028320313, 0.06449398040771484, 0.06408096313476562, 0.06393337631225586, 0.06354156875610352, 0.06453011322021485, 0.06398771286010742, 0.06383945465087891, 0.06346796798706054, 0.06462908935546875, 0.06412825775146484, 0.06404169464111328, 0.06484585571289063, 0.07516365051269532, 0.06468812561035156, 0.0640184326171875, 0.06358220672607422, 0.06323968124389648, 0.06429132843017578, 0.0640348129272461, 0.06367232131958007, 0.06325155258178711, 0.06311824035644531, 0.06315843200683594, 0.06417513275146484, 0.06385337448120117, 0.06376992034912109, 0.06328617477416992, 0.06327827072143555, 0.06317027282714843, 0.06440025329589844, 0.06404096221923829, 0.06373702239990234, 0.06340895843505859, 0.06426521301269532, 0.06377395248413086, 0.06359807968139648, 0.06325801467895507, 0.0632635841369629, 0.06340979385375976, 0.06440290832519531, 0.06382841491699219, 0.06368713760375977, 0.06329507064819336, 0.06378537750244141, 0.06432358551025391, 0.06390940856933594, 0.06454713439941406, 0.06419062042236329, 0.06390374374389649, 0.06358015823364258, 0.06438076782226562, 0.06387113571166993, 0.0637050895690918, 0.06335283279418945, 0.06342348861694336, 0.06448582458496094, 0.06392684936523438, 0.06460620880126954, 0.06429020690917969, 0.06408252716064453, 0.06361455917358398, 0.06367068862915039, 0.06446611022949218, 0.0640551986694336, 0.06377519989013672, 0.06353286361694335, 0.06354598236083984, 0.06344847869873046, 0.06506352233886718, 0.064525634765625, 0.06403302764892578, 0.06366211318969726, 0.06384476852416993, 0.06455091094970702, 0.06404828643798828]",tokens/s,15.657804501031471,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 232.12 MiB is free. Process 266143 has 14.51 GiB memory in use. Of the allocated memory 14.07 GiB is allocated by PyTorch, and 332.11 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,12224.851968,7107.117056,0.0,6704.594944,6690.791936,s,1,28.27341796875,28.27341796875,0.0,28.27341796875,28.27341796875,28.27341796875,28.27341796875,[28.27341796875],,kWh,0.000621720056037528,6.857295190764175e-05,0.0002141065601739961,0.0009043995681191658,,MB,1481.66656,7321.02656,0.0,6897.532928,6816.506368,s,10,1.2366021347045897,0.12366021347045897,0.0007167371090687957,0.12338806152343751,0.12476929244995118,0.12501571922302246,0.1252128606414795,"[0.12301744079589844, 0.12286134338378907, 0.12328646087646485, 0.12471453094482422, 0.12378614044189454, 0.1234056625366211, 0.12336396789550781, 0.12353398132324218, 0.12337046051025391, 0.12526214599609375]",tokens/s,2070.1888895020834,kWh,3.6320680565844435e-06,4.0055417542477477e-07,2.404250894592742e-06,6.43687312660196e-06,tokens/kWh,39770863.11395778,MB,1536.38912,7329.415168,0.0,6903.824384,6816.508928,s,10,76.10370654296875,7.610370654296875,0.009795883577426938,7.61452783203125,7.617827490234375,7.621764331054687,7.624913803710937,"[7.6141640625, 7.61695263671875, 7.59407470703125, 7.625701171875, 7.59434326171875, 7.6165986328125, 7.61529296875, 7.60268310546875, 7.60900439453125, 7.6148916015625]",tokens/s,8.278177616017915,kWh,0.00022290096202632866,2.4587064327248393e-05,9.916619147280775e-05,0.00034665421782638486,tokens/kWh,181737.29543816583,,s,630,76.1013830261231,0.12079584607321119,0.0011913729780871542,0.12058383941650391,0.12159497833251953,0.12252608032226563,0.12644233581542974,"[0.12063942718505859, 0.12089769744873047, 0.1207806396484375, 0.12071526336669922, 0.12072345733642578, 0.12135014343261719, 0.12173027038574219, 0.12039657592773438, 0.12069884490966797, 0.12049600219726563, 0.12056768035888672, 0.12056822204589844, 0.12071501159667969, 0.12140557098388671, 0.12281788635253907, 0.12108662414550782, 0.1210379867553711, 0.12177903747558594, 0.12084169769287109, 0.12114320373535156, 0.12134674835205078, 0.12058601379394532, 0.1211283187866211, 0.12104579162597656, 0.1206640625, 0.12056288146972656, 0.12099878692626953, 0.12166365051269531, 0.121327392578125, 0.12543590545654296, 0.12109004974365234, 0.12099929809570313, 0.12068217468261719, 0.12124585723876953, 0.12157791900634765, 0.12112111663818359, 0.12205862426757813, 0.12157145690917968, 0.12096701049804688, 0.12093046569824219, 0.1203938217163086, 0.12190412902832032, 0.12025286102294921, 0.12046793365478516, 0.12086787414550781, 0.12032473754882812, 0.1198636474609375, 0.12030290985107422, 0.11972409820556641, 0.12046713256835938, 0.11974752044677735, 0.11980595397949219, 0.12078899383544922, 0.12112064361572265, 0.12009279632568359, 0.1206657257080078, 0.12007462310791016, 0.11993660736083984, 0.12016675567626953, 0.1200456314086914, 0.11996774291992188, 0.11984422302246094, 0.11972492980957031, 0.11969612884521484, 0.1195902099609375, 0.12044563293457031, 0.11992380523681641, 0.12006038665771485, 0.12098400115966797, 0.12079443359375, 0.1200176010131836, 0.12075007629394531, 0.12094857788085937, 0.12086083221435547, 0.12065996551513672, 0.12120883178710938, 0.12185356903076172, 0.1270432662963867, 0.12065993499755859, 0.12093718719482421, 0.12066368103027343, 0.12072179412841796, 0.12082774353027344, 0.12355101013183593, 0.12074188995361328, 0.1209405746459961, 0.12022870635986328, 0.12104847717285157, 0.12023468780517578, 0.12011049652099609, 0.12041238403320312, 0.12063113403320312, 0.12073935699462891, 0.12125276947021485, 0.12100812530517578, 0.1241376953125, 0.1209480972290039, 0.12036342620849609, 0.11999219512939453, 0.12073587036132813, 0.12065996551513672, 0.12035276794433594, 0.12185395050048828, 0.11977254486083984, 0.12055123138427734, 0.12001692962646485, 0.12033020782470703, 0.12035279846191406, 0.12091494750976563, 0.12143103790283204, 0.12522342681884766, 0.12381970977783203, 0.12019100952148437, 0.12015673828125, 0.11980550384521485, 0.11988748931884766, 0.12041295623779297, 0.12050640106201171, 0.12088227081298829, 0.12084111785888672, 0.12071961975097656, 0.12064316558837891, 0.12047756958007813, 0.12047183990478516, 0.12010272216796875, 0.12061682891845703, 0.1199738540649414, 0.11953561401367188, 0.11994070434570313, 0.12014147186279298, 0.12044569396972657, 0.12037529754638672, 0.12065382385253906, 0.12051209259033203, 0.12094290924072265, 0.11971753692626953, 0.11993132781982421, 0.12007382202148438, 0.12023458862304688, 0.11987948608398437, 0.1202096939086914, 0.1205613784790039, 0.11985305786132812, 0.12117513275146484, 0.11992489624023438, 0.1200579833984375, 0.12050086212158204, 0.12159487915039062, 0.12090470123291015, 0.12138086700439453, 0.12209152221679688, 0.12063353729248047, 0.11993478393554688, 0.12001398468017578, 0.11991152191162109, 0.12002182769775391, 0.1211091537475586, 0.1202808609008789, 0.12238883209228515, 0.12313820648193359, 0.12119449615478516, 0.12046745300292969, 0.12015206146240234, 0.11995673370361327, 0.1203842544555664, 0.12045516967773437, 0.12233628845214843, 0.12090467071533204, 0.12063948822021485, 0.12030770874023437, 0.12055990600585938, 0.12007376098632813, 0.1202484130859375, 0.1200989761352539, 0.12016019439697266, 0.12138905334472656, 0.12045209503173829, 0.12282777404785156, 0.12069068908691406, 0.12018694305419922, 0.11999225616455078, 0.11997593688964844, 0.12020845031738281, 0.1209947509765625, 0.12037248229980468, 0.11937619018554688, 0.11959088134765625, 0.12036457824707031, 0.12143689727783204, 0.12205996704101563, 0.12217830657958985, 0.12211183929443359, 0.12348854064941406, 0.12111660766601562, 0.12112601470947265, 0.12102902221679687, 0.12072374725341797, 0.12136422729492187, 0.1213342056274414, 0.12208946990966797, 0.12236163330078124, 0.1213642578125, 0.12125433349609376, 0.12226512145996093, 0.12132809448242188, 0.12124979400634765, 0.12150374603271484, 0.12159078216552735, 0.12087398529052734, 0.12031324768066406, 0.12003286743164063, 0.12017151641845703, 0.12053913879394532, 0.12033404541015626, 0.12022838592529297, 0.1235968017578125, 0.12024601745605469, 0.120135009765625, 0.11983708953857422, 0.119704833984375, 0.11995008087158203, 0.11994086456298828, 0.11993558502197266, 0.1209384002685547, 0.1208279037475586, 0.12080332946777343, 0.1203727035522461, 0.12075062561035156, 0.12094668579101563, 0.120700927734375, 0.12094464111328125, 0.1208015365600586, 0.12483174133300781, 0.12069452667236329, 0.12059779357910157, 0.1206253433227539, 0.12044137573242188, 0.12096873474121093, 0.12010953521728515, 0.12019296264648438, 0.12672621154785157, 0.12151017761230469, 0.12140310668945313, 0.12092415618896485, 0.12074393463134765, 0.12027263641357422, 0.12004303741455079, 0.12006269073486328, 0.12005747222900391, 0.12049238586425781, 0.11997420501708984, 0.12031378936767578, 0.12075212860107422, 0.11992678070068359, 0.1198279037475586, 0.12004745483398438, 0.11984905242919922, 0.11993344116210937, 0.12574732971191407, 0.12074188995361328, 0.12076646423339844, 0.12022374725341797, 0.11993065643310546, 0.120000732421875, 0.120283203125, 0.11991007995605468, 0.11952761840820313, 0.12064739227294922, 0.12069712066650391, 0.1205821762084961, 0.12001430511474609, 0.11999846649169922, 0.12127699279785156, 0.11978956604003907, 0.12252995300292968, 0.11983856201171875, 0.12066611480712891, 0.12023529815673828, 0.11987388610839844, 0.11967747497558594, 0.1204591064453125, 0.12114329528808594, 0.12134950256347657, 0.12094937896728515, 0.1205513916015625, 0.12272029113769531, 0.12022700500488281, 0.12037407684326172, 0.12022528076171875, 0.1207135009765625, 0.11983657836914062, 0.11969977569580079, 0.12001046752929688, 0.119978271484375, 0.12044400024414062, 0.11970838165283203, 0.12005190277099609, 0.12141974639892578, 0.12019920349121094, 0.11998822021484375, 0.11994316864013672, 0.12022988891601563, 0.12207901000976562, 0.12082342529296874, 0.12188899230957032, 0.12109795379638671, 0.12056233978271484, 0.11973632049560547, 0.11983872222900391, 0.12007628631591796, 0.12198441314697266, 0.12089609527587891, 0.12062239837646484, 0.1205584945678711, 0.1204335708618164, 0.12147097778320312, 0.12118016052246093, 0.12126617431640625, 0.12090531158447265, 0.12112111663818359, 0.12018649291992188, 0.12745977783203125, 0.12152124786376953, 0.12044585418701172, 0.11990022277832031, 0.11983206176757813, 0.12104338836669921, 0.12050579071044921, 0.12020143890380859, 0.12062754821777344, 0.11977113342285156, 0.11953561401367188, 0.12070611572265624, 0.12053395080566406, 0.12113715362548828, 0.12065382385253906, 0.1202391357421875, 0.12055142211914062, 0.12002953338623047, 0.1207589111328125, 0.12339430236816407, 0.12069657897949218, 0.12097702026367188, 0.1210926742553711, 0.121061279296875, 0.12075961303710937, 0.12113775634765625, 0.1203609619140625, 0.1202872314453125, 0.12029446411132813, 0.12032096099853516, 0.12118016052246093, 0.12064153289794922, 0.12064566040039063, 0.13031216430664064, 0.12020531463623046, 0.12275215911865234, 0.12029219055175781, 0.12009190368652344, 0.12078902435302734, 0.12019376373291016, 0.12111257934570313, 0.12104908752441407, 0.12110975646972656, 0.12034329223632813, 0.11998822021484375, 0.11981417846679687, 0.12145046234130859, 0.12017459106445312, 0.12064358520507812, 0.12056575775146484, 0.1200597152709961, 0.12001254272460937, 0.11981664276123047, 0.12042063903808593, 0.11993059539794922, 0.12086460876464844, 0.11994742584228515, 0.12043958282470703, 0.11992412567138672, 0.119410400390625, 0.119814208984375, 0.12014838409423828, 0.12094287872314453, 0.12021568298339844, 0.12104851531982422, 0.12077318572998047, 0.11981619262695313, 0.12019712066650391, 0.11937590026855469, 0.11952534484863281, 0.11996284484863282, 0.12073225402832032, 0.12031609344482422, 0.12075212860107422, 0.12109798431396485, 0.12022630310058594, 0.11980742645263671, 0.11973283386230468, 0.1200063705444336, 0.12070464324951172, 0.12044652557373046, 0.1204170913696289, 0.1273784637451172, 0.12198806762695312, 0.12052476501464844, 0.12027497863769532, 0.12059024047851563, 0.12118566131591797, 0.12226223754882813, 0.1206904296875, 0.1214546890258789, 0.12088451385498047, 0.11992671966552734, 0.11980691528320313, 0.11995136260986328, 0.1206108169555664, 0.12152012634277344, 0.12038451385498047, 0.1216678695678711, 0.1211890869140625, 0.12252134704589844, 0.12116607666015625, 0.12154061126708984, 0.12060665893554688, 0.12120098876953125, 0.12229193878173829, 0.12122930908203125, 0.1213829116821289, 0.1208115234375, 0.12058550262451172, 0.12149116516113281, 0.1211395492553711, 0.1214081573486328, 0.12093462371826172, 0.12090169525146484, 0.12117715454101563, 0.12130397033691406, 0.12384588623046874, 0.12077462768554688, 0.1206154556274414, 0.12054988861083984, 0.12083350372314453, 0.12053753662109375, 0.1217639389038086, 0.12086067199707032, 0.12031385803222656, 0.1201882553100586, 0.12189347076416016, 0.12047980499267578, 0.12048178863525391, 0.12039513397216797, 0.11980454254150391, 0.12066611480712891, 0.11960630035400391, 0.11962671661376953, 0.12055728149414062, 0.12065145874023438, 0.12129647827148438, 0.12016099548339844, 0.12142546844482421, 0.1209329605102539, 0.12010102081298828, 0.11969532775878906, 0.11957247924804687, 0.11990399932861329, 0.12059264373779296, 0.12017459106445312, 0.12155270385742188, 0.12117359924316407, 0.12063600158691407, 0.1197875213623047, 0.1196659164428711, 0.12016102600097656, 0.12054316711425782, 0.12056172943115234, 0.12114902496337891, 0.123021728515625, 0.12127001953125, 0.12031001281738281, 0.11983257293701172, 0.12001894378662109, 0.12062643432617187, 0.12052146911621094, 0.12310467529296874, 0.1254337921142578, 0.12119312286376953, 0.12068441772460937, 0.12236137390136718, 0.12020182037353516, 0.11992649841308593, 0.12093673706054688, 0.12032611083984375, 0.1203177261352539, 0.12018303680419921, 0.119984130859375, 0.12052480316162109, 0.12042034912109376, 0.120534912109375, 0.12044866943359375, 0.12021993255615235, 0.12033366394042969, 0.11986415863037109, 0.12155391693115235, 0.11994422149658203, 0.12039472198486328, 0.12057091522216797, 0.12109078216552735, 0.12073177337646485, 0.11987900543212891, 0.1201569595336914, 0.11979366302490234, 0.11976284790039063, 0.1204101791381836, 0.1202392349243164, 0.12086160278320313, 0.12110643005371094, 0.12051046752929688, 0.12005535888671875, 0.12948291015625, 0.11991657257080078, 0.12044108581542969, 0.12169161224365234, 0.12123600006103516, 0.12075945281982423, 0.12121129608154296, 0.12071916961669922, 0.11989440155029298, 0.11992454528808594, 0.12081606292724609, 0.12034172821044922, 0.12034553527832031, 0.12138687896728516, 0.12032816314697266, 0.12116095733642578, 0.11991721343994141, 0.11966636657714844, 0.1196036148071289, 0.12136447906494141, 0.12033392333984375, 0.1209078369140625, 0.1211187515258789, 0.12101427459716797, 0.12104102325439453, 0.12062054443359375, 0.1203075180053711, 0.12088614654541016, 0.12059878540039062, 0.12024396514892578, 0.1214730224609375, 0.12027289581298828, 0.1208627166748047, 0.12002201843261719, 0.11941766357421875, 0.11946125030517578, 0.12074179077148438, 0.1205439682006836, 0.12096495819091797, 0.12142380523681641, 0.12030915069580078, 0.12002406311035156, 0.11960063934326172, 0.11949488067626954, 0.1201236801147461, 0.12053913879394532, 0.12055757141113281, 0.13013743591308594, 0.1210695343017578, 0.12022787475585937, 0.11966665649414063, 0.11994115447998047, 0.12322815704345703, 0.12099139404296876, 0.12142012786865235, 0.12117971038818359, 0.12113782501220703, 0.12057933044433594, 0.12056221008300781, 0.12064972686767578, 0.12080076599121094, 0.12067072296142578, 0.12134400177001953, 0.12096307373046875, 0.12078905487060547, 0.12259954833984375, 0.12025743865966797, 0.12015094757080078, 0.12052272033691407, 0.12094166564941407, 0.12126509094238282, 0.12140338897705077, 0.1210545883178711, 0.120814208984375, 0.1234698257446289, 0.12030528259277344, 0.12062950134277343, 0.12070310211181641, 0.12097740936279297, 0.12205465698242188, 0.12351593780517578, 0.12100911712646484, 0.12432745361328125, 0.12022627258300782, 0.12080886077880859, 0.12042063903808593, 0.12038768005371094, 0.12169862365722656, 0.12149292755126953, 0.1214316177368164, 0.12143504333496094, 0.12062710571289062, 0.12159587097167969, 0.12037152099609374, 0.12043020629882813, 0.1209468765258789, 0.12060813140869141, 0.11989670562744141, 0.11941388702392577, 0.1198900146484375, 0.11962038421630859, 0.12029039764404297, 0.12008493041992188, 0.12048249816894531, 0.12150694274902343, 0.12006185913085937, 0.12008319854736328, 0.11966057586669922, 0.11970297241210938, 0.12011574554443359, 0.1201495361328125]",tokens/s,8.27843036418592,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 232.12 MiB is free. Process 269462 has 14.51 GiB memory in use. Of the allocated memory 14.07 GiB is allocated by PyTorch, and 332.11 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 232.12 MiB is free. Process 267766 has 14.51 GiB memory in use. Of the allocated memory 14.07 GiB is allocated by PyTorch, and 332.11 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,822.075392,5878.185984,0.0,5475.663872,5452.374016,s,1,16.659697265625,16.659697265625,0.0,16.659697265625,16.659697265625,16.659697265625,16.659697265625,[16.659697265625],,kWh,0.00026579074360421755,2.931148400345489e-05,9.015646101395713e-05,0.0003852586886216296,,MB,1337.249792,6475.874304,0.0,6060.76928,5886.76096,s,10,13.310466064453124,1.3310466064453124,0.007525457827884039,1.3319896240234375,1.3375368652343749,1.3386580200195313,1.3395549438476562,"[1.3114603271484375, 1.32512939453125, 1.3311239013671874, 1.3319124755859375, 1.3320579833984374, 1.3319212646484375, 1.335275634765625, 1.3372877197265625, 1.3397791748046874, 1.3345181884765625]",tokens/s,192.32985438704702,kWh,3.865838559623625e-05,4.263547150051814e-06,2.562599272299959e-05,6.854792546928766e-05,tokens/kWh,3734613.3854145403,MB,1393.115136,6494.748672,0.0,6077.546496,5886.76352,s,10,60.6947392578125,6.06947392578125,0.015386273656732771,6.073875732421875,6.085144921875,6.08661005859375,6.08778216796875,"[6.03820947265625, 6.0473984375, 6.0625771484375, 6.067029296875, 6.0700205078125, 6.07896533203125, 6.07991357421875, 6.07773095703125, 6.0848193359375, 6.0880751953125]",tokens/s,10.379812281983034,kWh,0.00017818514051041648,1.9654212786404055e-05,0.00011813626117559429,0.0003159756144724148,tokens/kWh,199382.47483177218,,s,630,60.69095753479007,0.09633485322982546,0.0017675766299154972,0.09615695953369141,0.0971518310546875,0.09743502044677735,0.10834372085571291,"[0.10626665496826172, 0.09684735870361329, 0.09561119842529296, 0.09486803436279297, 0.09484620666503907, 0.09485183715820313, 0.09494322967529296, 0.09491001892089844, 0.0947470703125, 0.09495481872558593, 0.09490707397460937, 0.09544432067871093, 0.09623619079589844, 0.09563491058349609, 0.09499510192871094, 0.09498406219482422, 0.09490227508544923, 0.0951377944946289, 0.0949534683227539, 0.09575164794921875, 0.09616175842285156, 0.09558422088623048, 0.09650454711914062, 0.09574114990234375, 0.09509542083740234, 0.09521295928955079, 0.09515023803710937, 0.0952939224243164, 0.09506175994873046, 0.09671907043457031, 0.09598365020751953, 0.09520662689208985, 0.09607199859619141, 0.09658620452880859, 0.09558767700195313, 0.09533261108398437, 0.09517641448974609, 0.09534534454345703, 0.09517411041259766, 0.09567491149902344, 0.09653657531738281, 0.09567641448974609, 0.0967188491821289, 0.09622489929199218, 0.09672537231445312, 0.09550233459472657, 0.09538543701171875, 0.09538310241699219, 0.09532803344726562, 0.09565058898925781, 0.09687452697753907, 0.09623126220703125, 0.095828125, 0.09733446502685547, 0.09624447631835938, 0.09607135772705078, 0.09704895782470703, 0.09624153900146484, 0.09555366516113281, 0.09564774322509766, 0.09544694519042969, 0.09544239807128906, 0.09627820587158203, 0.10983916473388672, 0.09682892608642578, 0.09584690856933593, 0.09486675262451172, 0.09459986877441406, 0.09480806732177735, 0.09490220642089844, 0.09540819549560547, 0.09489923095703125, 0.09501181030273438, 0.09662777709960937, 0.09614015960693359, 0.09619667053222657, 0.0955588150024414, 0.09504444885253906, 0.0948854751586914, 0.09497232055664062, 0.09504259490966797, 0.09502735900878906, 0.09592473602294922, 0.0950664291381836, 0.09597132873535157, 0.09664717102050781, 0.0958439712524414, 0.09495705413818359, 0.0958966064453125, 0.0962737274169922, 0.09561904144287109, 0.09687462615966796, 0.09611923217773438, 0.09519878387451172, 0.09521395111083984, 0.09694812774658203, 0.09657484436035156, 0.09561939239501953, 0.09534639739990235, 0.09512422180175781, 0.09589475250244141, 0.09651590728759765, 0.09573993682861329, 0.09526163482666015, 0.09567027282714843, 0.09683763122558593, 0.09617362976074219, 0.09543929290771484, 0.09675571441650391, 0.09583990478515625, 0.09553750610351562, 0.0957132797241211, 0.09660374450683594, 0.09578307342529296, 0.09549616241455078, 0.09541251373291015, 0.09527091217041016, 0.09691340637207031, 0.09708748626708984, 0.09618431854248047, 0.09534464263916016, 0.09607123565673828, 0.09679277038574219, 0.09580086517333984, 0.09560288238525391, 0.09557376098632812, 0.10936934661865234, 0.0966593246459961, 0.09568585968017577, 0.09492105865478516, 0.09484652709960938, 0.09496883392333984, 0.09510707092285156, 0.09495731353759766, 0.09475917053222656, 0.09541190338134765, 0.09669766235351562, 0.09616438293457032, 0.09592620849609375, 0.09584207916259765, 0.095884033203125, 0.09600729370117188, 0.09540003204345703, 0.09517340850830078, 0.09506111907958985, 0.09524703979492187, 0.10209490966796875, 0.09585881805419921, 0.09557810974121093, 0.09623961639404296, 0.09543785858154297, 0.09534998321533203, 0.09608940887451171, 0.09538710021972656, 0.09512239837646484, 0.09601433563232421, 0.09672223663330078, 0.0958511962890625, 0.09582796478271484, 0.09658953857421874, 0.09606172943115235, 0.09629901123046875, 0.09591190338134765, 0.0953854751586914, 0.09530793762207031, 0.09621228790283202, 0.09669292449951172, 0.09583964538574219, 0.09689558410644532, 0.09626764678955078, 0.09584294128417968, 0.09685606384277344, 0.09642508697509766, 0.09547567749023438, 0.09532303619384766, 0.09620409393310547, 0.09645536041259765, 0.09576636505126954, 0.09686147308349609, 0.09723766326904297, 0.09617625427246093, 0.09629609680175781, 0.09669522857666016, 0.09600204467773438, 0.09692940521240234, 0.09644070434570312, 0.09546758270263672, 0.09553414154052735, 0.09708223724365235, 0.10841907501220703, 0.09637296295166016, 0.0955921630859375, 0.09491629028320313, 0.09498390197753906, 0.09498076629638671, 0.09639068603515626, 0.09556835174560546, 0.09464217376708985, 0.09521151733398438, 0.09681100463867187, 0.09616793823242188, 0.09588937377929688, 0.09590541076660156, 0.09581199645996094, 0.09517056274414062, 0.0963892822265625, 0.0957376937866211, 0.09505353546142578, 0.09706543731689453, 0.0961526107788086, 0.09652851104736328, 0.09586969757080079, 0.09669395446777344, 0.09579859161376954, 0.09531894683837891, 0.0951910400390625, 0.09523814392089844, 0.09657548522949219, 0.09601638031005859, 0.09611993408203125, 0.09645145416259765, 0.09600204467773438, 0.09623551940917968, 0.09700694274902344, 0.09612355041503906, 0.09517056274414062, 0.095246337890625, 0.09617203521728515, 0.09648127746582032, 0.09621504211425781, 0.09645875549316406, 0.09718147277832032, 0.0962930908203125, 0.09614950561523437, 0.09666099548339843, 0.09587967681884765, 0.09723872375488281, 0.09613753509521485, 0.09545728302001953, 0.09592352294921876, 0.09688751983642578, 0.09633334350585937, 0.09689065551757813, 0.09594940948486329, 0.09666796875, 0.09664002990722656, 0.09709785461425781, 0.09659580993652343, 0.09602448272705078, 0.09612783813476562, 0.0968089599609375, 0.09757286071777344, 0.10815923309326173, 0.09650105285644531, 0.09576707458496093, 0.09520476531982422, 0.09494745635986328, 0.09486300659179688, 0.09502393341064454, 0.09490611267089843, 0.09492915344238281, 0.09703449249267578, 0.09744992065429688, 0.0967012481689453, 0.09685033416748047, 0.09586339569091797, 0.09649951934814453, 0.0958199691772461, 0.09501286315917969, 0.09484848022460937, 0.09505436706542969, 0.09592626953125, 0.09668608093261719, 0.09705455780029297, 0.09617219543457031, 0.09622937774658204, 0.09673932647705077, 0.09603849792480469, 0.09502556610107422, 0.09532621002197265, 0.09532851409912109, 0.09665267181396485, 0.09636605072021484, 0.09662147521972657, 0.09708953857421875, 0.09633507537841797, 0.09665792083740235, 0.09597747039794922, 0.095240478515625, 0.09534841918945312, 0.0967020492553711, 0.09602531433105468, 0.09627552032470703, 0.09653958129882813, 0.09650787353515625, 0.09680480194091796, 0.09701795196533203, 0.09618787384033203, 0.09534722900390626, 0.09613641357421875, 0.09649977874755859, 0.09585337829589843, 0.09622518157958984, 0.0971962890625, 0.09609190368652344, 0.09663680267333985, 0.09648707580566407, 0.09638054656982421, 0.09721097564697266, 0.09635606384277344, 0.096010498046875, 0.0970072021484375, 0.09608793640136719, 0.09733792114257812, 0.09646514892578124, 0.10851136016845703, 0.09656114959716797, 0.0961904296875, 0.09484233856201171, 0.0948470687866211, 0.09497222137451172, 0.10112409973144532, 0.09498860931396484, 0.0948814697265625, 0.09616130828857422, 0.09760537719726563, 0.09681795501708984, 0.09593251037597657, 0.09571673583984375, 0.09516505432128906, 0.09593856048583985, 0.09623961639404296, 0.09552210998535156, 0.09501766204833985, 0.09593772888183594, 0.09676220703125, 0.09690188598632812, 0.09610006713867188, 0.096036865234375, 0.09591171264648438, 0.09631356811523438, 0.09666150665283203, 0.09653663635253906, 0.09571225738525391, 0.09527977752685547, 0.09706297302246093, 0.0962541732788086, 0.09698889923095703, 0.09609145355224609, 0.09571427154541015, 0.09666764831542969, 0.09604889678955078, 0.09664128112792969, 0.09700176239013672, 0.09610956573486328, 0.09527164459228515, 0.0969230728149414, 0.0968259506225586, 0.09728182220458985, 0.09617206573486328, 0.09560594940185548, 0.09603392028808594, 0.09667772674560547, 0.09669808197021484, 0.09666336059570313, 0.09596361541748047, 0.09644172668457031, 0.09652082824707031, 0.0966328353881836, 0.0969175033569336, 0.09635622406005859, 0.09588713836669922, 0.09707350158691407, 0.09668169403076173, 0.09684534454345703, 0.09614399719238281, 0.09642144012451172, 0.09677021026611328, 0.10964988708496094, 0.0966475830078125, 0.09566207885742188, 0.09492479705810547, 0.09488758087158203, 0.09506851196289062, 0.09481600189208984, 0.09489842987060547, 0.09489615631103515, 0.09702217864990234, 0.0987786865234375, 0.09708729553222656, 0.09656463623046875, 0.09670256042480468, 0.09583248138427734, 0.09502973175048827, 0.09490841674804687, 0.09509891510009766, 0.09507740783691407, 0.09603782653808594, 0.0972759017944336, 0.09697280120849609, 0.0973148193359375, 0.09692160034179688, 0.09595059204101562, 0.0949862060546875, 0.09600847625732421, 0.09642601776123047, 0.09544854736328125, 0.09559910583496094, 0.09677823638916015, 0.09729052734375, 0.09693536376953125, 0.09719631958007813, 0.09622732543945313, 0.09549987030029297, 0.09611692810058593, 0.09653062438964843, 0.09572557067871093, 0.09604303741455078, 0.09663613128662109, 0.0967524185180664, 0.09743174743652344, 0.09725727844238281, 0.09647286224365234, 0.09633200073242187, 0.09624332427978516, 0.096783935546875, 0.09593939208984376, 0.09569315338134765, 0.09687753295898438, 0.09689798736572265, 0.09714838409423829, 0.09716671752929687, 0.09630941009521485, 0.09636099243164062, 0.09648576354980469, 0.09692745971679688, 0.096129150390625, 0.09591769409179687, 0.0967928009033203, 0.09680461120605469, 0.09733773040771485, 0.10801532745361328, 0.09650822448730469, 0.0956409912109375, 0.09493132781982422, 0.0948922576904297, 0.0949227523803711, 0.09486537933349609, 0.09502313232421875, 0.09490000152587891, 0.09736009979248048, 0.09768547058105469, 0.09675369262695313, 0.09597337341308594, 0.09593881225585937, 0.09597718048095703, 0.09590975952148438, 0.09547695922851562, 0.09505478668212891, 0.09536102294921875, 0.09711411285400391, 0.09713459014892578, 0.0969656982421875, 0.0967949447631836, 0.09663568115234375, 0.09668386840820313, 0.09590988922119141, 0.09512716674804687, 0.09530815887451172, 0.09577267456054687, 0.09650176239013672, 0.09672268676757813, 0.09679682922363281, 0.09697046661376953, 0.09633216094970704, 0.09659008026123046, 0.09709724426269531, 0.09603094482421876, 0.09539993286132813, 0.09608147430419922, 0.09657183837890625, 0.09590908813476562, 0.09690751647949218, 0.09661014556884766, 0.0971475830078125, 0.09611257934570312, 0.0965302734375, 0.09645692443847656, 0.09603062438964843, 0.09686969757080079, 0.09638992309570313, 0.09637068939208984, 0.09694153594970703, 0.09662313842773437, 0.09760755157470703, 0.09669602966308594, 0.09625846099853516, 0.09674956512451172, 0.09596514892578124, 0.09711017608642578, 0.0962864990234375, 0.09649932861328125, 0.09687702178955078, 0.0966819839477539, 0.10890684509277344, 0.09648518371582031, 0.09577401733398437, 0.09491311645507812, 0.09495724487304688, 0.09493939208984375, 0.09493081665039063, 0.09491641235351563, 0.09498041534423828, 0.09799225616455078, 0.09920256042480469, 0.09724205017089843, 0.09632563018798829, 0.09594802856445313, 0.09559117126464843, 0.0950269775390625, 0.0949900131225586, 0.094974365234375, 0.09524237060546875, 0.09721158599853516, 0.09742623901367188, 0.0976374740600586, 0.09711065673828125, 0.09620304107666015, 0.09600819396972657, 0.09596518707275391, 0.09580137634277344, 0.09518450927734375, 0.09569519805908203, 0.0969195556640625, 0.09700057220458984, 0.09741606140136719, 0.09708873748779297, 0.0964755859375, 0.09610022735595704, 0.09643373107910157, 0.09662681579589844, 0.09576322937011719, 0.09576038360595703, 0.09675344085693359, 0.0965429458618164, 0.09755213165283202, 0.0972147216796875, 0.09659107208251953, 0.09613801574707032, 0.09623932647705079, 0.09608140563964844, 0.0961869125366211, 0.09708137512207031, 0.09707746887207032, 0.09625599670410157, 0.09703628540039062, 0.09715017700195312, 0.09655375671386719, 0.09630271911621094, 0.09641203308105468, 0.09628652954101563, 0.09643785858154297, 0.09711622619628907, 0.09747920227050781, 0.097185791015625, 0.09651404571533204, 0.09705888366699218, 0.10859417724609376, 0.0966645736694336, 0.09570636749267578, 0.09496348571777344, 0.09490045166015625, 0.09495823669433594, 0.09502342224121094, 0.09494230651855469, 0.09494598388671875, 0.09753929901123047, 0.0983334732055664, 0.0980289306640625, 0.09678707122802735, 0.0960921630859375, 0.09594588470458984, 0.09513251495361329, 0.0951844482421875, 0.0964807357788086, 0.09568764495849609, 0.09622528076171875, 0.09717337799072266, 0.09742144012451172, 0.09717699432373048, 0.09678409576416015, 0.09583821105957031, 0.09517545318603515, 0.095502685546875, 0.09682508850097657, 0.09577062225341797, 0.0959543685913086, 0.09726624298095703, 0.09727196502685546, 0.09800883483886719, 0.09686557006835937, 0.09612166595458985, 0.09541545867919922, 0.09530764770507813, 0.09665583801269531, 0.09589984130859375, 0.09689049530029296, 0.09635091400146484, 0.09726067352294922, 0.09879436492919921, 0.09708134460449219, 0.09614112091064453, 0.0954343032836914, 0.0963057632446289, 0.09665715026855469, 0.09634435272216797, 0.09683968353271484, 0.09606902313232422, 0.09762687683105468, 0.09772013092041015, 0.09734339141845703, 0.09689510345458985, 0.09618409729003906, 0.09544012451171875, 0.09697789001464843, 0.09653043365478516, 0.09722201538085938, 0.0961112289428711, 0.09743769836425781, 0.09744537353515625]",tokens/s,10.38045905996562,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: BloomForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpg7n4bfmp/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: BloomForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp8tkr7657/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,968.036352,1117.650944,0.0,715.128832,705.334272,s,1,9.6525888671875,9.6525888671875,0.0,9.6525888671875,9.6525888671875,9.6525888671875,9.6525888671875,[9.6525888671875],,kWh,3.1467435416668834e-05,3.4638936184990963e-06,9.937230171999309e-06,4.486855920716724e-05,,MB,1415.872512,1480.45824,0.0,1065.353216,1002.182656,s,10,0.5653496246337891,0.0565349624633789,0.00026067475044885244,0.0565526237487793,0.056705346298217774,0.05691934452056885,0.05709054309844971,"[0.057133342742919924, 0.05655539321899414, 0.056446144104003906, 0.05654985427856445, 0.05665779113769531, 0.056529598236083986, 0.05626335906982422, 0.056065086364746095, 0.05658252716064453, 0.0565665283203125]",tokens/s,4528.171397758098,kWh,1.7738441104292936e-06,1.956219530218424e-07,1.1706575021818204e-06,3.1401235656329564e-06,tokens/kWh,81525454.22154365,MB,1464.287232,1518.206976,0.0,1101.0048,1002.185216,s,10,22.072639648437505,2.20726396484375,0.01558987783192663,2.201259521484375,2.2329701416015624,2.2354812866210936,2.2374902026367187,"[2.203192626953125, 2.18940869140625, 2.200020751953125, 2.217302734375, 2.232412109375, 2.19568603515625, 2.237992431640625, 2.1974287109375, 2.196697265625, 2.202498291015625]",tokens/s,28.54212319116971,kWh,6.455707123665403e-05,7.120453707757209e-06,2.5414113765618136e-05,9.709163871002937e-05,tokens/kWh,648871.5283522372,,s,630,22.070645889282222,0.03503277125282893,0.0005853442147450256,0.03486844825744628,0.03547393608093262,0.035897119140624996,0.03760573219299317,"[0.03492454528808594, 0.03514931106567383, 0.034998783111572264, 0.035321216583251956, 0.0348084487915039, 0.03476623916625977, 0.034840545654296874, 0.0349000015258789, 0.034785888671875, 0.03477910232543945, 0.03475049591064453, 0.034680831909179685, 0.03531366348266601, 0.034713600158691404, 0.034726913452148435, 0.0345849609375, 0.03469500732421875, 0.034710304260253906, 0.034680831909179685, 0.0345164794921875, 0.034654335021972654, 0.03514524841308594, 0.03533500671386719, 0.03503104019165039, 0.03529318237304688, 0.03513139343261719, 0.03495711898803711, 0.03527811050415039, 0.03474700927734375, 0.03483225631713867, 0.034820480346679686, 0.03503638458251953, 0.03520185470581055, 0.035076095581054685, 0.035041088104248046, 0.034823936462402345, 0.034810302734375, 0.034816001892089846, 0.03496345520019531, 0.03491839981079101, 0.03499980926513672, 0.03481363296508789, 0.03741574478149414, 0.03520102310180664, 0.03484672164916992, 0.03480271911621094, 0.0347239990234375, 0.03478515243530273, 0.034720703125, 0.03483180618286133, 0.03480201721191406, 0.035049278259277346, 0.03498783874511719, 0.035254432678222654, 0.034917984008789066, 0.03493769454956055, 0.03493856048583984, 0.03472211074829101, 0.03512115097045899, 0.03569801712036133, 0.0350214729309082, 0.03489382553100586, 0.034947071075439456, 0.034904159545898435, 0.03477104187011719, 0.03476809692382812, 0.034841281890869144, 0.035266654968261715, 0.035471263885498046, 0.03491839981079101, 0.0347586555480957, 0.034641151428222654, 0.03457510375976563, 0.034665889739990234, 0.034528865814208984, 0.0346317138671875, 0.03476380920410156, 0.03478691101074219, 0.03477948760986328, 0.03470950317382813, 0.034675968170166015, 0.03467913436889648, 0.03469331359863281, 0.03461347198486328, 0.03463577651977539, 0.03464751815795898, 0.03486288070678711, 0.03498646545410156, 0.03468918228149414, 0.03470963287353516, 0.034883201599121096, 0.03476092910766602, 0.0346993293762207, 0.034633823394775394, 0.03465427017211914, 0.03476860809326172, 0.03468105697631836, 0.03462144088745117, 0.034667648315429685, 0.034659198760986325, 0.03468003082275391, 0.034675487518310545, 0.034541568756103515, 0.034616352081298825, 0.03459356689453125, 0.034635967254638675, 0.03461862564086914, 0.0348922233581543, 0.034640094757080075, 0.03493596649169922, 0.03499923324584961, 0.03513139343261719, 0.0347852783203125, 0.034738174438476564, 0.035157310485839845, 0.0348658561706543, 0.03490102386474609, 0.03470025634765625, 0.034643489837646486, 0.03464812850952149, 0.034740638732910153, 0.034655681610107424, 0.03464659118652344, 0.03453952026367187, 0.03459628677368164, 0.03463232040405274, 0.034949630737304685, 0.034805248260498044, 0.03484364700317383, 0.03530912017822266, 0.03784339141845703, 0.035227649688720705, 0.034909374237060545, 0.03497798538208008, 0.035172065734863284, 0.0348433609008789, 0.0348197135925293, 0.03480838394165039, 0.0347193603515625, 0.034748672485351566, 0.03469939041137695, 0.0346901741027832, 0.03486764907836914, 0.03486272048950195, 0.0347490234375, 0.034750560760498046, 0.034789505004882815, 0.03486515045166016, 0.03498393630981445, 0.03478937530517578, 0.034862144470214844, 0.03468281555175781, 0.03488460922241211, 0.03476399993896485, 0.03476310348510742, 0.03476115036010742, 0.034783199310302736, 0.03469267272949219, 0.034700767517089844, 0.034713985443115235, 0.03470604705810547, 0.034729984283447264, 0.03474431991577148, 0.03506790542602539, 0.03474774551391602, 0.03488745498657227, 0.03473616027832031, 0.03485168075561523, 0.03480575942993164, 0.0349224967956543, 0.03584320068359375, 0.03498073577880859, 0.03512934494018555, 0.034844318389892576, 0.03489379119873047, 0.0348370246887207, 0.034908000946044924, 0.03506175994873047, 0.03495222473144531, 0.034917343139648435, 0.03516211318969727, 0.034938846588134766, 0.034846111297607424, 0.034859329223632815, 0.03475270462036133, 0.034766559600830076, 0.034812030792236326, 0.03484454345703125, 0.03464028930664063, 0.034826465606689457, 0.03512124633789063, 0.03476083374023437, 0.03483427047729492, 0.034678848266601565, 0.034678207397460935, 0.03463248062133789, 0.03489177703857422, 0.03496147155761719, 0.03466233444213867, 0.03502489471435547, 0.03462102508544922, 0.034635326385498044, 0.034748607635498044, 0.03469753646850586, 0.04017308807373047, 0.03512982559204102, 0.034929214477539064, 0.03480521774291992, 0.03483884811401367, 0.034904064178466795, 0.034971649169921876, 0.03485651016235351, 0.034704830169677736, 0.03473715209960938, 0.03481804656982422, 0.0347770881652832, 0.03472825622558594, 0.03474809646606445, 0.034772991180419925, 0.03520512008666992, 0.03507120132446289, 0.03480031967163086, 0.03538703918457031, 0.03524038314819336, 0.036071487426757816, 0.03545695877075195, 0.03514486312866211, 0.03496227264404297, 0.03511417770385742, 0.03481068801879883, 0.03473408126831055, 0.03476275253295898, 0.034746623992919924, 0.034813697814941404, 0.03578499221801758, 0.03640204620361328, 0.03684438323974609, 0.038299648284912106, 0.03531078338623047, 0.03522598266601563, 0.03502227020263672, 0.03522390365600586, 0.03522832107543945, 0.03494857788085937, 0.03492099380493164, 0.03489791870117188, 0.03490326309204102, 0.035412769317626956, 0.03504451370239258, 0.03550275039672852, 0.036771072387695315, 0.03535968017578125, 0.036036224365234376, 0.035533153533935546, 0.03537267303466797, 0.03532646560668945, 0.03517424011230469, 0.03611663818359375, 0.03506995010375977, 0.035160255432128903, 0.035071487426757815, 0.03513967895507813, 0.03957110214233398, 0.0351319694519043, 0.035250175476074216, 0.0351759033203125, 0.03499676895141601, 0.03505740737915039, 0.03518057632446289, 0.035004638671875, 0.03509241485595703, 0.03511097717285156, 0.03513516616821289, 0.03521900939941406, 0.03583977508544922, 0.03504009628295898, 0.03514326477050781, 0.034931102752685544, 0.035072063446044924, 0.03503519821166992, 0.03546928024291992, 0.03539971160888672, 0.03542630386352539, 0.03508841705322266, 0.035006431579589846, 0.035143680572509765, 0.03523788833618164, 0.03513078308105469, 0.03524185562133789, 0.03498057556152344, 0.03515990447998047, 0.03995830535888672, 0.035788673400878906, 0.03534099197387695, 0.03533804702758789, 0.03512115097045899, 0.03512115097045899, 0.03522150421142578, 0.03513753509521484, 0.03513958358764648, 0.03511404800415039, 0.03508115386962891, 0.035278526306152344, 0.035223678588867186, 0.035653759002685546, 0.03591945648193359, 0.035406303405761716, 0.035738689422607425, 0.03512636947631836, 0.03744732666015625, 0.03642496109008789, 0.03509743881225586, 0.0349021110534668, 0.03477468872070313, 0.03458070373535156, 0.03549798583984375, 0.035030879974365235, 0.03491382217407227, 0.03468582534790039, 0.034955009460449216, 0.03716422271728516, 0.03635897445678711, 0.034748031616210935, 0.034743904113769535, 0.03464886474609375, 0.034697025299072266, 0.03458886337280274, 0.03452275085449219, 0.03455628967285156, 0.03504035186767578, 0.03513158416748047, 0.03488431930541992, 0.03473750305175781, 0.034724510192871094, 0.03474739074707031, 0.03533926391601563, 0.03496550369262695, 0.03460111999511719, 0.03464380645751953, 0.0348037109375, 0.03467673492431641, 0.03453366470336914, 0.03490163040161133, 0.03461948776245117, 0.03452924728393555, 0.03485036849975586, 0.035076576232910155, 0.03651331329345703, 0.03525680160522461, 0.03502899169921875, 0.03481923294067383, 0.034635841369628904, 0.03479776000976562, 0.03459132766723633, 0.03461529541015625, 0.0349463996887207, 0.03482387161254883, 0.03495830535888672, 0.03461062240600586, 0.0347367057800293, 0.03459849548339844, 0.03482838439941406, 0.03483065414428711, 0.03460419082641602, 0.03473289489746094, 0.03481804656982422, 0.034516990661621096, 0.034471553802490236, 0.034705665588378905, 0.03487871932983398, 0.034459648132324217, 0.03464896011352539, 0.03468697738647461, 0.034435073852539064, 0.03458662414550781, 0.03444732666015625, 0.034457630157470706, 0.0345272331237793, 0.03500848007202149, 0.036518497467041014, 0.03628851318359375, 0.035465217590332034, 0.035321537017822265, 0.03530297470092773, 0.03570150375366211, 0.035388992309570315, 0.035621601104736327, 0.03577212905883789, 0.03618761444091797, 0.035746593475341794, 0.03590732955932617, 0.03596857452392578, 0.035523006439208984, 0.03674726486206055, 0.03554304122924805, 0.03569049453735352, 0.0353361930847168, 0.03554908752441406, 0.03557494354248047, 0.035644351959228514, 0.03544390487670898, 0.03532064056396484, 0.03523513412475586, 0.03534713745117188, 0.03597107315063477, 0.03535257720947266, 0.035670272827148436, 0.03571231842041016, 0.0381014404296875, 0.036201473236083984, 0.035627166748046876, 0.03552073669433594, 0.036060897827148435, 0.035408798217773436, 0.03522911834716797, 0.035240222930908206, 0.035118751525878907, 0.035174079895019535, 0.035251136779785155, 0.035209217071533204, 0.03526015853881836, 0.03643008041381836, 0.03531516647338867, 0.03522956848144531, 0.03526710510253906, 0.03545510482788086, 0.03545087814331055, 0.03530892944335937, 0.03516684722900391, 0.03520713424682617, 0.03546547317504883, 0.03536054229736328, 0.03588463973999023, 0.035456897735595704, 0.034926273345947265, 0.035126113891601564, 0.03483443069458008, 0.034598209381103515, 0.03461977767944336, 0.03468659210205078, 0.03473273468017578, 0.03510857772827149, 0.03491616058349609, 0.03479190444946289, 0.03473612976074219, 0.035251678466796874, 0.03488956832885742, 0.03504403305053711, 0.03497574234008789, 0.034789600372314454, 0.034870849609375, 0.034681121826171876, 0.03473968124389649, 0.034804191589355465, 0.03472895812988281, 0.034763137817382814, 0.035076736450195316, 0.034902015686035154, 0.03476025772094726, 0.034920894622802734, 0.03491635131835937, 0.03469311904907227, 0.03502489471435547, 0.03501391983032227, 0.03496214294433594, 0.03490816116333008, 0.034692577362060543, 0.03484451293945313, 0.03501046371459961, 0.03473283386230469, 0.034697216033935545, 0.03469107055664063, 0.034582527160644534, 0.034802879333496094, 0.03473030471801758, 0.034726593017578126, 0.03501651382446289, 0.03463948822021484, 0.0347031021118164, 0.034898239135742186, 0.03530924987792969, 0.03477782440185547, 0.034788288116455075, 0.03470435333251953, 0.034740222930908206, 0.03478927993774414, 0.03465430450439453, 0.034678783416748044, 0.034753822326660154, 0.03485974502563476, 0.0349409294128418, 0.03493856048583984, 0.034742462158203126, 0.03505769729614258, 0.03485081481933594, 0.03482223892211914, 0.03472588729858399, 0.035110080718994144, 0.03581155014038086, 0.03474809646606445, 0.03462876892089844, 0.03571862411499024, 0.03473846435546875, 0.03530752182006836, 0.03501670455932617, 0.034716670989990234, 0.034812606811523435, 0.03475235366821289, 0.034625118255615234, 0.03474111938476562, 0.03533824157714844, 0.03640348815917969, 0.03506515121459961, 0.034826656341552735, 0.03468902587890625, 0.034680831909179685, 0.034770336151123044, 0.03470940780639648, 0.03460371017456055, 0.03464995193481445, 0.03470975875854492, 0.03497974395751953, 0.034869247436523435, 0.03457843017578125, 0.03489555358886719, 0.03507231903076172, 0.03546489715576172, 0.035352897644042966, 0.035323326110839846, 0.03501728057861328, 0.03487539291381836, 0.03485696029663086, 0.035026943206787106, 0.03469510269165039, 0.034693183898925783, 0.03481135940551758, 0.034648193359375, 0.03466896057128906, 0.0347955207824707, 0.035407135009765625, 0.034818782806396484, 0.03473104095458984, 0.034667072296142576, 0.03476726531982422, 0.03469424057006836, 0.034845279693603515, 0.03492012786865235, 0.03467737579345703, 0.034713600158691404, 0.03483852767944336, 0.0347729263305664, 0.03527840042114258, 0.03482787322998047, 0.03494377517700195, 0.03481407928466797, 0.03468406295776367, 0.03463676834106445, 0.03465974426269531, 0.03514211273193359, 0.034721790313720705, 0.03480495834350586, 0.034691871643066405, 0.0345272331237793, 0.034584190368652346, 0.034695552825927733, 0.03525763320922851, 0.03463587188720703, 0.034714080810546874, 0.03488681411743164, 0.03561116790771485, 0.03475487899780273, 0.0352740478515625, 0.03472864151000977, 0.034732032775878906, 0.034942432403564455, 0.03498448181152344, 0.0348037109375, 0.03477503967285156, 0.03476224136352539, 0.034924831390380856, 0.035047744750976564, 0.03503708648681641, 0.03474227142333984, 0.034917633056640626, 0.034992897033691406, 0.03462758255004883, 0.03485081481933594, 0.034902015686035154, 0.03481932830810547, 0.034751232147216794, 0.03462940979003906, 0.034993438720703124, 0.034706367492675784, 0.03463372802734375, 0.03505766296386719, 0.034751617431640625, 0.034677631378173826, 0.03464191818237305, 0.03495731353759766, 0.03473174285888672, 0.03767043304443359, 0.03699289703369141, 0.0348803825378418, 0.03474431991577148, 0.034649600982666014, 0.03464969635009765, 0.03470572662353515, 0.0346993293762207, 0.035590686798095704, 0.03561471939086914, 0.034969215393066404, 0.03482195281982422, 0.0346420783996582, 0.034707328796386716, 0.03519952011108399, 0.03493270492553711, 0.0346561279296875, 0.03487334442138672, 0.034670177459716796, 0.034635967254638675, 0.03513695907592773, 0.034687393188476565, 0.03470336151123047, 0.035161727905273436, 0.03459328079223633, 0.03525807952880859, 0.03563161468505859, 0.0348284797668457, 0.034979808807373045, 0.03465119934082031]",tokens/s,28.54470155338479,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,1879.138304,1260.25728,0.0,857.735168,836.20864,s,1,10.8408974609375,10.8408974609375,0.0,10.8408974609375,10.8408974609375,10.8408974609375,10.8408974609375,[10.8408974609375],,kWh,5.023002842083504e-05,5.533503444879641e-06,1.5682512545999935e-05,7.144604441171461e-05,,MB,1926.803456,1610.481664,0.0,1186.988032,1141.95456,s,10,0.6291210594177247,0.06291210594177246,0.00014660878102622184,0.06296092796325684,0.06309957962036133,0.0631221736907959,0.06314024894714355,"[0.06309455871582031, 0.0629511375427246, 0.06277273559570312, 0.06314476776123047, 0.06277936172485352, 0.06274528121948242, 0.0629799690246582, 0.06297071838378906, 0.06298796844482422, 0.06269456100463867]",tokens/s,4069.1691395124762,kWh,1.8201295519927712e-06,2.0072930487647254e-07,8.206976889937938e-07,2.8415565458630375e-06,tokens/kWh,90091467.78117262,MB,1935.855616,1648.2304,0.0,1222.639616,1140.908544,s,10,38.58473901367188,3.858473901367187,0.010880827424593573,3.8577065429687503,3.8706626953125,3.8758563720703125,3.8800113134765626,"[3.881050048828125, 3.854677490234375, 3.869508544921875, 3.85395947265625, 3.8630380859375, 3.86251123046875, 3.846297119140625, 3.860735595703125, 3.85092431640625, 3.842037109375]",tokens/s,16.32769888055404,kWh,0.00011093725466884025,1.2236546480918302e-05,3.914902873120618e-05,0.0001623228298809647,tokens/kWh,388115.4613075649,,s,630,38.582610435485854,0.06124223878648545,0.0007062953440699154,0.06108078384399414,0.061756304931640624,0.06237941341400146,0.06414799041748047,"[0.06163059234619141, 0.061222911834716794, 0.06149660873413086, 0.06129235076904297, 0.0625447998046875, 0.06151772689819336, 0.061343231201171876, 0.06146928024291992, 0.061477951049804684, 0.060999969482421874, 0.06364582443237304, 0.06214060974121094, 0.06176598358154297, 0.06273356628417968, 0.06303318405151367, 0.062437503814697266, 0.06175990295410156, 0.06175289535522461, 0.06149801635742187, 0.06119644927978515, 0.06106316757202149, 0.061575233459472654, 0.06118214416503906, 0.061232318878173826, 0.061523841857910155, 0.06126988983154297, 0.0614095344543457, 0.06119686508178711, 0.06136537551879883, 0.06111116790771484, 0.06112870407104492, 0.06192127990722656, 0.06165926361083984, 0.061284191131591795, 0.061337921142578126, 0.06128607940673828, 0.061083553314208984, 0.06111577606201172, 0.06123737716674805, 0.061383583068847655, 0.06110351943969727, 0.061137214660644534, 0.061461761474609376, 0.061653118133544925, 0.06310771179199219, 0.06180227279663086, 0.061743583679199215, 0.06191206359863281, 0.06162287902832031, 0.06184566497802734, 0.0613480339050293, 0.06191836929321289, 0.06188819122314453, 0.06103526306152344, 0.061163745880126956, 0.06108134460449219, 0.06278742218017579, 0.06229238510131836, 0.06146047973632812, 0.061462528228759764, 0.06120774459838867, 0.06136086273193359, 0.061091136932373044, 0.064921630859375, 0.0610123519897461, 0.06081536102294922, 0.06079283142089844, 0.060819007873535155, 0.06089094543457031, 0.06101190567016602, 0.06083590316772461, 0.06082595062255859, 0.06101020812988281, 0.06104671859741211, 0.06095663833618164, 0.060830913543701175, 0.06155699157714844, 0.06091433715820312, 0.06208217620849609, 0.061285537719726564, 0.0610544319152832, 0.06066022491455078, 0.06089648056030274, 0.06099158477783203, 0.060770751953125, 0.060837055206298826, 0.060778942108154294, 0.06092867279052734, 0.06146428680419922, 0.060715007781982425, 0.060655006408691405, 0.061157760620117185, 0.060979423522949217, 0.060846080780029295, 0.060723201751708984, 0.06106016159057617, 0.060767230987548826, 0.06068627166748047, 0.061068607330322267, 0.06148575973510742, 0.06667878723144531, 0.06200934219360352, 0.06096012878417969, 0.061002208709716794, 0.06145395278930664, 0.06091215896606445, 0.060783710479736325, 0.060930145263671874, 0.06145926284790039, 0.061127838134765626, 0.06246396636962891, 0.06158374404907226, 0.06129043197631836, 0.061078079223632814, 0.060672000885009764, 0.060602336883544924, 0.06074092864990235, 0.06054713439941406, 0.060631038665771485, 0.06106748962402344, 0.06087724685668945, 0.06136832046508789, 0.06124652862548828, 0.06083065414428711, 0.06085788726806641, 0.061159904479980466, 0.06100912094116211, 0.06117660903930664, 0.061483009338378906, 0.06120774459838867, 0.06085209655761719, 0.06086751937866211, 0.06089113616943359, 0.06151478576660156, 0.06113193511962891, 0.06147628784179687, 0.060993824005126956, 0.06092351913452149, 0.0629969596862793, 0.061521312713623044, 0.06151987075805664, 0.06165139389038086, 0.060886913299560544, 0.06100364685058594, 0.06151318359375, 0.061419551849365234, 0.06082592010498047, 0.06124755096435547, 0.061645023345947264, 0.06138268661499023, 0.06093033599853516, 0.06110924911499024, 0.061451263427734375, 0.06139289474487305, 0.06079292678833008, 0.06131087875366211, 0.06140723037719727, 0.061976577758789064, 0.06174636840820313, 0.06140518569946289, 0.06100870513916016, 0.0618056640625, 0.06148412704467773, 0.061066112518310546, 0.06150239944458008, 0.06101401519775391, 0.06174512100219726, 0.061677120208740235, 0.06137062454223633, 0.061472801208496096, 0.061352127075195315, 0.06321561431884766, 0.06181683349609375, 0.061437950134277344, 0.06136748886108399, 0.06404972839355469, 0.0619420166015625, 0.06208512115478516, 0.06150143814086914, 0.06098739242553711, 0.06109798431396484, 0.06107955169677735, 0.0609312629699707, 0.060918590545654294, 0.06107955169677735, 0.06206259155273437, 0.061159423828125, 0.061044734954833986, 0.06134783935546875, 0.061362560272216794, 0.06125721740722656, 0.06175590515136719, 0.061679615020751956, 0.06103859329223633, 0.060929473876953126, 0.06111004638671875, 0.061026817321777345, 0.06106140899658203, 0.06097100830078125, 0.060829696655273435, 0.06103244781494141, 0.06085836791992188, 0.060787776947021484, 0.06097401428222656, 0.061711742401123044, 0.061561473846435545, 0.061308929443359375, 0.06242035293579101, 0.06118259048461914, 0.06141952133178711, 0.061429759979248044, 0.061558143615722656, 0.06101599884033203, 0.0607341423034668, 0.06093619155883789, 0.06058393478393555, 0.06071091079711914, 0.061265758514404293, 0.061300609588623045, 0.061434303283691406, 0.060915550231933596, 0.06137971115112305, 0.061204448699951175, 0.06089436721801758, 0.0635860481262207, 0.061943809509277345, 0.06097100830078125, 0.06105465698242187, 0.061112640380859375, 0.06078976058959961, 0.06106316757202149, 0.06071603012084961, 0.060959934234619144, 0.06087353515625, 0.06098739242553711, 0.06107455825805664, 0.06101046371459961, 0.06149568176269531, 0.061144256591796876, 0.06070556640625, 0.06099148941040039, 0.06155462265014648, 0.061158496856689455, 0.06116451263427734, 0.06090956878662109, 0.06101401519775391, 0.060913665771484375, 0.060728416442871094, 0.060760704040527344, 0.06115161514282227, 0.06083164978027344, 0.061448192596435545, 0.06100112152099609, 0.06084844970703125, 0.06115523147583008, 0.06133769607543945, 0.06137855911254883, 0.06156841659545898, 0.06158550262451172, 0.06145596694946289, 0.06137654495239258, 0.06163955307006836, 0.06086761474609375, 0.060638175964355466, 0.06113232040405273, 0.0612889289855957, 0.060747776031494144, 0.06100118255615234, 0.060948928833007815, 0.061335647583007816, 0.060897281646728516, 0.06246915054321289, 0.06179500961303711, 0.06112694549560547, 0.061304065704345705, 0.061177951812744144, 0.06065615844726562, 0.06091497421264649, 0.06087356948852539, 0.06085017776489258, 0.06076422500610352, 0.061090782165527345, 0.060738529205322266, 0.060628318786621095, 0.06074163055419922, 0.06128028869628906, 0.06091558456420899, 0.06089372634887695, 0.0610654411315918, 0.060794750213623044, 0.060946048736572264, 0.06181321716308594, 0.061394432067871096, 0.06121017456054687, 0.06100579071044922, 0.06120707321166992, 0.06418812561035156, 0.061499263763427736, 0.06106959915161133, 0.06568361663818359, 0.06139731216430664, 0.061284481048583986, 0.061134719848632814, 0.06123702239990234, 0.06099350357055664, 0.06170771026611328, 0.061719486236572266, 0.06180659103393555, 0.061704193115234375, 0.06162227249145508, 0.06122905731201172, 0.061204063415527345, 0.06111062240600586, 0.06137247848510742, 0.06100166320800781, 0.06158335876464844, 0.06168112182617187, 0.06232937622070313, 0.06151065444946289, 0.061080223083496095, 0.06128252792358398, 0.061149311065673825, 0.061246910095214845, 0.061345470428466796, 0.06147980880737305, 0.06138195037841797, 0.06254463958740235, 0.06145145416259765, 0.061717247009277346, 0.06109183883666992, 0.06102735900878906, 0.06290070343017579, 0.06160611343383789, 0.06165273666381836, 0.061362720489501955, 0.06138060760498047, 0.06341996765136719, 0.06145478439331055, 0.06129459381103516, 0.06117375946044922, 0.061112415313720705, 0.060939777374267576, 0.06101177597045898, 0.06121123123168945, 0.06092800140380859, 0.06134988784790039, 0.06095452880859375, 0.06091785430908203, 0.061042686462402344, 0.060773792266845705, 0.060854881286621094, 0.061076801300048826, 0.06091584014892578, 0.06134636688232422, 0.06091481781005859, 0.060838241577148434, 0.061516319274902344, 0.06144409561157226, 0.0611223030090332, 0.060798465728759764, 0.060673919677734375, 0.06070528030395508, 0.06101824188232422, 0.06105254364013672, 0.06119619369506836, 0.06152675247192383, 0.06169964981079101, 0.0613579216003418, 0.06146928024291992, 0.061456382751464846, 0.06099311828613281, 0.0609958381652832, 0.061854881286621095, 0.061064193725585934, 0.06117375946044922, 0.06099148941040039, 0.06085631942749024, 0.06100182342529297, 0.06125411224365234, 0.061224960327148435, 0.06092390441894531, 0.06118809509277344, 0.06101580810546875, 0.06121433639526367, 0.06356032180786132, 0.061531776428222655, 0.060743263244628906, 0.061209056854248045, 0.061419841766357425, 0.06092790222167969, 0.06095881652832031, 0.06101615905761719, 0.061009822845458986, 0.0607907829284668, 0.06051430511474609, 0.060563488006591795, 0.06053385543823242, 0.06109196853637695, 0.060598529815673825, 0.060719615936279295, 0.0607088623046875, 0.06107872009277344, 0.06059500885009766, 0.06084198379516602, 0.06103859329223633, 0.060801025390625, 0.060871967315673826, 0.06111305618286133, 0.06084991836547852, 0.060813568115234376, 0.0609156494140625, 0.06083343887329101, 0.060698848724365234, 0.06054111862182617, 0.060788734436035156, 0.06059132766723633, 0.06081001663208008, 0.06132320022583008, 0.060899391174316406, 0.06103859329223633, 0.06159296035766602, 0.06146480178833008, 0.060993953704833986, 0.060931327819824216, 0.06118886566162109, 0.061050174713134765, 0.061682369232177736, 0.060868415832519535, 0.060626785278320314, 0.06053433609008789, 0.06281491088867187, 0.06120867156982422, 0.06211379241943359, 0.06152627182006836, 0.06099881744384766, 0.06081548690795899, 0.06084451293945312, 0.06088118362426758, 0.060885726928710936, 0.060988414764404295, 0.06090956878662109, 0.06119558334350586, 0.06080316925048828, 0.06132608032226562, 0.06086969757080078, 0.06070515060424805, 0.06061510467529297, 0.06061670303344727, 0.06073548889160156, 0.060980960845947264, 0.060983585357666015, 0.06209270477294922, 0.06101667022705078, 0.0610530891418457, 0.060872543334960935, 0.06077804946899414, 0.060851806640625, 0.06114579010009766, 0.06099369430541992, 0.061255584716796874, 0.06210569763183594, 0.06060031890869141, 0.061105983734130856, 0.06092524719238281, 0.06096985626220703, 0.06127731323242187, 0.06203020858764648, 0.06092031860351563, 0.06099353790283203, 0.061843456268310545, 0.06146047973632812, 0.06136342239379883, 0.06150147247314453, 0.061458400726318356, 0.06142617416381836, 0.0608873291015625, 0.06084159851074219, 0.060950912475585935, 0.061773536682128906, 0.06150547027587891, 0.061636959075927734, 0.060862464904785155, 0.06071670532226563, 0.06077475357055664, 0.060948543548583985, 0.0613109130859375, 0.061031455993652346, 0.06112313461303711, 0.061467041015625, 0.06092512130737305, 0.06096774291992187, 0.06133084869384765, 0.060924095153808595, 0.06499574279785156, 0.06231356811523438, 0.061499935150146484, 0.06086665725708008, 0.06262198257446289, 0.06345513534545899, 0.06149039840698242, 0.06110070419311524, 0.06110838317871094, 0.061016128540039065, 0.0612044792175293, 0.061112064361572266, 0.06141404724121094, 0.061343936920166015, 0.061298686981201174, 0.061136161804199216, 0.06081955337524414, 0.06102899169921875, 0.0609356803894043, 0.06068441772460938, 0.061192543029785156, 0.061319198608398434, 0.061061119079589846, 0.061013057708740236, 0.06075183868408203, 0.060989761352539064, 0.061074081420898436, 0.06060579299926758, 0.06071129608154297, 0.06061494445800781, 0.06073129653930664, 0.061505630493164064, 0.06133081436157226, 0.0624400634765625, 0.061448192596435545, 0.061104095458984375, 0.06142057418823242, 0.06112768173217773, 0.06110003280639648, 0.06141068649291992, 0.061289089202880856, 0.06102563095092774, 0.06086928176879883, 0.06466486358642579, 0.06065404891967773, 0.06099174499511719, 0.06059811019897461, 0.060778656005859376, 0.060806304931640624, 0.06064156723022461, 0.06075449752807617, 0.06075187301635742, 0.060878528594970706, 0.06872300720214844, 0.06120140838623047, 0.06070899200439453, 0.06078694534301758, 0.060752513885498044, 0.06091775894165039, 0.060800128936767575, 0.06071590423583984, 0.0607534065246582, 0.06060697555541992, 0.06048767852783203, 0.06070636749267578, 0.06081148910522461, 0.060771873474121094, 0.06040956878662109, 0.060453441619873045, 0.060842399597167966, 0.06056243133544922, 0.060721630096435546, 0.06081180953979492, 0.06074367904663086, 0.06115971374511719, 0.06108713531494141, 0.06088272094726563, 0.06134048080444336, 0.06094438552856445, 0.06100486373901367, 0.06106003189086914, 0.06094566345214844, 0.060958656311035156, 0.06069113540649414, 0.060887168884277344, 0.0609600944519043, 0.060701343536376955, 0.06157926559448242, 0.062429183959960936, 0.06150454330444336, 0.061270206451416016, 0.06120243072509766, 0.06091763305664062, 0.0608449592590332, 0.06139494323730469, 0.06118096160888672, 0.06110038375854492, 0.061333152770996095, 0.060885982513427736, 0.06249062347412109, 0.061324737548828126, 0.06133542251586914, 0.06122940826416016, 0.06071945571899414, 0.06092355346679688, 0.060780895233154296, 0.06067737579345703, 0.06081932830810547, 0.06055516815185547, 0.06082863998413086, 0.06078668975830078, 0.06041561508178711, 0.060827552795410154, 0.06070115280151367, 0.060649471282958986, 0.060536510467529295, 0.06058425521850586, 0.06036275100708008, 0.060723201751708984, 0.060673534393310545, 0.06043209457397461, 0.06061340713500977, 0.06048972702026367, 0.06085958480834961, 0.060558143615722655, 0.06058732986450195, 0.06061945724487305, 0.06049523162841797, 0.06296025466918945, 0.06092617416381836, 0.06157078552246094, 0.060571327209472656, 0.060579681396484376, 0.06092035293579102, 0.06136406326293945, 0.060942401885986326, 0.06110960006713867]",tokens/s,16.32859966936208,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: BloomForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: BloomForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 272.12 MiB is free. Process 568546 has 14.47 GiB memory in use. Of the allocated memory 14.35 GiB is allocated by PyTorch, and 13.83 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1121.3824,9791.471616,0.0,9388.949504,9304.608768,s,1,32.0674375,32.0674375,0.0,32.0674375,32.0674375,32.0674375,32.0674375,[32.0674375],,kWh,0.000730990131200042,8.062639954391234e-05,0.0002417368600559633,0.0010533533907999175,,MB,1578.676224,10206.707712,0.0,9789.505536,9597.898752,s,10,7.923096862792968,0.7923096862792968,0.0032394745726825936,0.792263397216797,0.7964029724121093,0.7970545440673829,0.7975758013916016,"[0.7962581787109375, 0.7920283203125, 0.789807861328125, 0.7869620361328125, 0.792260009765625, 0.78822705078125, 0.7922667846679687, 0.7923345947265625, 0.7977061157226563, 0.7952459106445312]",tokens/s,323.10598296757104,kWh,2.3028215502566896e-05,2.5396205941437976e-06,1.2687146901848066e-05,3.8254982998558755e-05,tokens/kWh,6691938.6687387815,MB,1611.4688,10206.707712,0.0,9789.505536,9597.901312,s,10,367.3625078125,36.73625078125,0.07998935006397521,36.717792968750004,36.827972265625,36.8443533203125,36.8574581640625,"[36.860734375, 36.6641015625, 36.64830859375, 36.6681328125, 36.64483984375, 36.759375, 36.79619921875, 36.8202734375, 36.6762109375, 36.82433203125]",tokens/s,1.7149273173013857,kWh,0.0010717078155720212,0.00011821744951979844,0.00036965127007975546,0.001559576535171575,tokens/kWh,40395.58083827487,,s,630,367.35990924072314,0.5831109670487662,0.0034430164062997253,0.5825333557128907,0.5872953430175781,0.5892139587402344,0.5953777093505859,"[0.5841551513671875, 0.5835448608398438, 0.583309326171875, 0.5836103515625, 0.58425341796875, 0.5892422485351563, 0.5838101806640625, 0.5854724731445312, 0.5867258911132812, 0.5842042236328125, 0.588651611328125, 0.5865894775390625, 0.5940891723632813, 0.584933837890625, 0.5853363037109375, 0.58491748046875, 0.587058837890625, 0.5869910888671875, 0.5846301879882813, 0.584512451171875, 0.581074951171875, 0.5829132080078125, 0.5874358520507813, 0.5830850830078125, 0.5866639404296875, 0.5844348754882812, 0.5827400512695312, 0.5830725708007812, 0.5821051025390624, 0.5889246826171874, 0.5825172729492187, 0.5855629272460937, 0.5835428466796875, 0.5863473510742188, 0.5847100830078125, 0.5836534423828125, 0.5841510620117187, 0.5817610473632813, 0.5829893188476563, 0.5860111083984375, 0.5854310302734375, 0.5864939575195313, 0.5826170654296875, 0.5852999267578125, 0.5824816284179688, 0.5920341796875, 0.5878558349609375, 0.5906943969726562, 0.5861642456054688, 0.5866843872070312, 0.5860065307617187, 0.5837455444335937, 0.5836165161132812, 0.5827666015625, 0.5823529052734375, 0.58298779296875, 0.5833090209960937, 0.5904463500976562, 0.5858450927734375, 0.5868300170898437, 0.5826428833007813, 0.5829190063476563, 0.5815389404296875, 0.5836663818359376, 0.5812630004882813, 0.57979541015625, 0.58281982421875, 0.5789962768554687, 0.5775032348632813, 0.5827238159179687, 0.5795755615234375, 0.5806182250976563, 0.5839093627929688, 0.5848883056640625, 0.5834641723632813, 0.58153173828125, 0.5814094848632813, 0.5792477416992188, 0.5815177001953125, 0.5862337646484375, 0.5797556762695313, 0.5825478515625, 0.5800853881835938, 0.5854492797851563, 0.5792097778320312, 0.5804625854492188, 0.580411376953125, 0.578334716796875, 0.5805191650390625, 0.5775941162109375, 0.5837332763671875, 0.581666259765625, 0.5814686889648437, 0.5817467041015625, 0.5808855590820312, 0.585484619140625, 0.5824285278320313, 0.5849110107421875, 0.5813162841796875, 0.5792325439453125, 0.5832520751953125, 0.5815316772460938, 0.5820296630859375, 0.5807571411132812, 0.5827800903320313, 0.58137890625, 0.5841138916015625, 0.5868048095703124, 0.5831312255859376, 0.5818619384765625, 0.583710693359375, 0.5805076293945313, 0.5789696044921875, 0.5794488525390625, 0.5887611083984375, 0.5807529907226563, 0.5812628784179688, 0.58098779296875, 0.5815541381835938, 0.58058349609375, 0.5816599731445312, 0.5838956909179688, 0.5812346801757813, 0.5839747924804688, 0.5862208862304688, 0.5862752685546875, 0.583331787109375, 0.58204833984375, 0.5837598876953125, 0.5851867065429688, 0.5903182373046875, 0.581779052734375, 0.5788860473632812, 0.5842178344726563, 0.5799493408203125, 0.5796904907226562, 0.577418212890625, 0.5768502197265625, 0.5808580322265625, 0.5788861694335937, 0.5818059692382812, 0.5788460693359375, 0.5792464599609375, 0.579844482421875, 0.5809518432617188, 0.5899033813476563, 0.5802373657226563, 0.589645751953125, 0.5820590209960937, 0.5819033813476563, 0.5801597290039062, 0.5840716552734375, 0.5806632690429687, 0.5839073486328125, 0.580334716796875, 0.580366943359375, 0.5801823120117188, 0.58395751953125, 0.5801067504882812, 0.581212158203125, 0.5835505981445313, 0.5811311645507813, 0.5809581909179687, 0.5801328735351563, 0.5850048217773437, 0.5832071533203125, 0.57986181640625, 0.5839790649414063, 0.5848521728515625, 0.5804168090820313, 0.5795888061523438, 0.583673583984375, 0.5801737670898437, 0.5794409790039062, 0.5816033325195312, 0.5824903564453126, 0.581211669921875, 0.582650146484375, 0.579495849609375, 0.5799608154296875, 0.58100537109375, 0.5873720703125, 0.5795722045898437, 0.5802495727539062, 0.5789183959960937, 0.5844089965820313, 0.5802210083007813, 0.5794283447265625, 0.5809090576171875, 0.5786199340820313, 0.579380615234375, 0.5845100708007812, 0.580042724609375, 0.5797532958984375, 0.576968505859375, 0.5835989379882812, 0.5782423095703125, 0.58528515625, 0.5790089721679688, 0.5801239624023438, 0.5808055419921875, 0.5841223754882813, 0.58047509765625, 0.5798500366210938, 0.5808988037109375, 0.5810379028320313, 0.5800081176757812, 0.5806755981445313, 0.5819985961914063, 0.581121337890625, 0.5820382080078125, 0.5797693481445313, 0.5807236938476562, 0.5796813354492187, 0.589529052734375, 0.5803817138671875, 0.5811566772460938, 0.5802533569335937, 0.5827261352539063, 0.5791539306640625, 0.5782847290039063, 0.5815137329101563, 0.577752685546875, 0.5789989624023437, 0.5784780883789062, 0.579811279296875, 0.5798277587890625, 0.5793682861328125, 0.580745849609375, 0.5775047607421875, 0.5849723510742187, 0.5979509887695312, 0.583719970703125, 0.5915757446289063, 0.5820991821289062, 0.5900267333984375, 0.588927001953125, 0.5916651611328125, 0.5880852661132813, 0.5783573608398438, 0.5850847778320313, 0.5781826782226562, 0.5904327392578125, 0.5794283447265625, 0.580369384765625, 0.5805188598632812, 0.5805887451171875, 0.584304931640625, 0.584666748046875, 0.584047607421875, 0.579671875, 0.578981689453125, 0.5809561767578125, 0.5805752563476563, 0.5809193115234375, 0.5798994140625, 0.5807656860351562, 0.5794611206054687, 0.587526123046875, 0.5825123901367187, 0.5815519409179688, 0.5789780883789063, 0.578658447265625, 0.5773330688476562, 0.5946553955078125, 0.5802046508789063, 0.5842881469726563, 0.5802715454101562, 0.5808336791992188, 0.5791704711914063, 0.5826941528320313, 0.5831134643554687, 0.578361328125, 0.5796557006835937, 0.5785984497070312, 0.5848207397460937, 0.5791849975585938, 0.579417236328125, 0.5806211547851563, 0.581390380859375, 0.5809848022460937, 0.584015869140625, 0.5873961181640625, 0.5831399536132813, 0.58168505859375, 0.58404833984375, 0.582612060546875, 0.5853140869140625, 0.5826129760742188, 0.5809840087890625, 0.5801480102539063, 0.580514892578125, 0.5801563720703125, 0.5818532104492188, 0.5797805786132812, 0.5819259033203125, 0.5801454467773437, 0.5836929931640625, 0.5822578125, 0.5904493408203125, 0.5793200073242187, 0.579751953125, 0.5822813720703125, 0.5826739501953125, 0.5796615600585937, 0.5798359375, 0.5809876098632812, 0.5802249145507813, 0.5795491943359375, 0.5801776733398437, 0.5849397583007813, 0.5826846923828125, 0.58249609375, 0.58054638671875, 0.5793242797851562, 0.5847333984375, 0.5795369262695312, 0.5793177490234375, 0.5829076538085938, 0.5790617065429687, 0.5813699951171875, 0.58450146484375, 0.578799560546875, 0.5810811157226563, 0.5782487182617188, 0.5787868041992188, 0.583323974609375, 0.5850584716796875, 0.5848248291015625, 0.5815252685546874, 0.5819947509765625, 0.5838826293945313, 0.5859247436523437, 0.5799751586914063, 0.5831127319335937, 0.5803046264648437, 0.5843335571289062, 0.580595703125, 0.5798195190429688, 0.5822013549804688, 0.5801837768554687, 0.5773639526367188, 0.5817034912109375, 0.5847946166992187, 0.5814087524414062, 0.5824470825195313, 0.5787400512695312, 0.5821666870117187, 0.58233154296875, 0.5996917114257813, 0.5871845703125, 0.585985107421875, 0.5842113647460937, 0.5849395141601562, 0.5820152587890625, 0.5813634643554687, 0.5847322387695313, 0.5826748657226563, 0.5821951904296875, 0.5831311645507813, 0.5884099731445313, 0.5859807739257813, 0.5847893676757813, 0.5843023681640624, 0.5813482055664062, 0.5808325805664063, 0.5963394165039062, 0.588430908203125, 0.586219970703125, 0.5861007080078126, 0.5881549072265625, 0.5841654052734375, 0.5844314575195313, 0.5888557739257813, 0.5842879638671875, 0.5833947143554687, 0.5819990234375, 0.5865841064453124, 0.5848158569335937, 0.5819379272460937, 0.58380029296875, 0.5833323364257812, 0.597475341796875, 0.5831672973632812, 0.5829713134765625, 0.5836759033203125, 0.5855096435546875, 0.5864673461914063, 0.5830033569335937, 0.5809039916992188, 0.5823239135742188, 0.5816094970703125, 0.58123876953125, 0.5852426147460937, 0.5814541015625, 0.5821070556640625, 0.582349609375, 0.5867179565429688, 0.5817181396484375, 0.5874298095703125, 0.583876953125, 0.5819022827148438, 0.5820660400390625, 0.5852135009765626, 0.5821241455078126, 0.5821973266601562, 0.5802572021484375, 0.5812998657226562, 0.5813291015625, 0.5815670776367188, 0.5867515258789062, 0.5838545532226562, 0.586102783203125, 0.5837473754882813, 0.5837150268554687, 0.5811195068359375, 0.5891793823242187, 0.5816279296875, 0.58267236328125, 0.580719970703125, 0.5850446166992187, 0.5833372192382813, 0.5806517333984375, 0.5872249145507813, 0.5813096313476562, 0.583025634765625, 0.582371337890625, 0.580078857421875, 0.5872484130859374, 0.5856358642578126, 0.5828623657226563, 0.582541259765625, 0.5856399536132812, 0.5999310302734375, 0.5905924072265625, 0.5877145385742187, 0.5840036010742188, 0.5867151489257812, 0.5842462768554687, 0.5835499267578125, 0.5816504516601563, 0.5819085693359375, 0.5814251098632812, 0.5861253051757812, 0.5841874389648437, 0.5887328491210938, 0.586076171875, 0.5895208740234374, 0.583204833984375, 0.5955088500976562, 0.582099365234375, 0.5818753051757812, 0.5808060913085937, 0.5852353515625, 0.5828096923828125, 0.58351318359375, 0.5866915283203125, 0.5816708374023437, 0.581754150390625, 0.58126611328125, 0.5940017700195312, 0.5829735717773438, 0.584998046875, 0.5800989379882813, 0.5819801635742188, 0.5810253295898438, 0.5916307983398438, 0.58723095703125, 0.5834837036132813, 0.5812264404296875, 0.5846359252929687, 0.5825254516601562, 0.5823240356445313, 0.5884266357421875, 0.5864528198242187, 0.581769287109375, 0.5845226440429687, 0.5825752563476563, 0.5865067138671874, 0.5839744262695312, 0.5819483032226562, 0.5839277954101563, 0.5830323486328125, 0.5921981201171875, 0.583498779296875, 0.5875947265625, 0.5830404663085937, 0.5907830200195312, 0.5887479858398438, 0.5816348266601562, 0.5856971435546875, 0.5827686767578125, 0.5806077270507812, 0.5812998046875, 0.58042041015625, 0.5863505859375, 0.5849989013671875, 0.5846666259765625, 0.5818369750976562, 0.5832666015625, 0.5873194580078125, 0.58379248046875, 0.5817704467773438, 0.5820460205078125, 0.5836312866210938, 0.5817001342773438, 0.5815186767578125, 0.5813230590820313, 0.5807431640625, 0.5832093505859375, 0.5819325561523437, 0.5855851440429688, 0.5837188720703125, 0.5833994750976562, 0.5806018676757813, 0.5768859252929688, 0.5870784912109375, 0.5811909790039063, 0.581661376953125, 0.5790799560546875, 0.581582763671875, 0.5795003662109375, 0.5808571166992188, 0.5832875366210938, 0.5789265747070312, 0.5789324340820312, 0.5840509643554688, 0.5812200927734374, 0.5859801635742188, 0.5822279663085937, 0.5817521362304687, 0.579784912109375, 0.5793110961914063, 0.58737353515625, 0.58016357421875, 0.5806755981445313, 0.580298095703125, 0.5860440063476563, 0.5819002685546875, 0.5831168212890625, 0.5812674560546875, 0.582240234375, 0.582046875, 0.5825990600585937, 0.5832698974609375, 0.583396484375, 0.5833951416015625, 0.5809592895507812, 0.5803898315429687, 0.584585205078125, 0.5875054931640625, 0.5808744506835938, 0.581992431640625, 0.5799955444335938, 0.5832991943359375, 0.579430419921875, 0.5801041870117187, 0.5800775756835937, 0.579569580078125, 0.5793055419921875, 0.5835448608398438, 0.5836124267578126, 0.595056640625, 0.5817835693359374, 0.5809193115234375, 0.5821583251953125, 0.5822938232421875, 0.5851909790039063, 0.5831463623046875, 0.5823009033203125, 0.587348876953125, 0.582554443359375, 0.5841068725585937, 0.5851032104492188, 0.5826737670898438, 0.5852884521484375, 0.5824859619140625, 0.5828997192382812, 0.5868523559570312, 0.5856091918945312, 0.5856574096679688, 0.5847356567382812, 0.5847799682617187, 0.5910423583984376, 0.5856091918945312, 0.5883350830078125, 0.5835386962890625, 0.5881384887695312, 0.582649169921875, 0.5841509399414062, 0.5872926635742187, 0.5865702514648438, 0.5851436767578125, 0.58497265625, 0.5882904663085937, 0.5897844848632813, 0.5861280517578125, 0.5831159057617188, 0.583454833984375, 0.5876845703125, 0.6032191772460938, 0.5823987426757813, 0.5830369262695313, 0.5845601806640625, 0.5863121337890626, 0.5836390380859375, 0.5829631958007813, 0.5817647094726562, 0.5818572998046875, 0.5808418579101563, 0.5827478637695312, 0.5842230224609375, 0.587435302734375, 0.584088134765625, 0.5840254516601563, 0.5820137329101562, 0.5810933837890625, 0.5867110595703126, 0.5826027221679687, 0.581074951171875, 0.5820907592773438, 0.5875527954101563, 0.5814861450195312, 0.578650146484375, 0.5898223266601562, 0.5794426879882812, 0.580126708984375, 0.5806610107421875, 0.5791171875, 0.5818260498046876]",tokens/s,1.7149394480799893,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1331.572736,1100.873728,0.0,698.351616,690.178048,s,1,8.58836328125,8.58836328125,0.0,8.58836328125,8.58836328125,8.58836328125,8.58836328125,[8.58836328125],,kWh,4.2078157275075985e-05,4.63403204498846e-06,1.4315567007983088e-05,6.102775632804753e-05,,MB,1508.093952,1415.446528,0.0,1000.341504,957.77792,s,10,0.5905507545471191,0.05905507545471191,0.00027531399679039465,0.05914924812316895,0.059283770751953124,0.05936510047912598,0.059430164260864254,"[0.05926569747924805, 0.05924595260620117, 0.059222625732421874, 0.058463329315185546, 0.05944643020629883, 0.05893881607055664, 0.05892694473266601, 0.05916320037841797, 0.05913529586791992, 0.058742462158203126]",tokens/s,4334.936464458859,kWh,1.7480606688622682e-06,1.9270726999405574e-07,1.1582111727424054e-06,3.0989791115987295e-06,tokens/kWh,82607849.48238403,MB,1513.054208,1423.835136,0.0,1008.730112,957.78048,s,10,25.791041259765628,2.5791041259765626,0.010475013563931488,2.5781575927734375,2.593673583984375,2.5942294921875,2.59467421875,"[2.593550048828125, 2.587754150390625, 2.568033935546875, 2.569312744140625, 2.5866748046875, 2.57572998046875, 2.566980224609375, 2.567634765625, 2.594785400390625, 2.580585205078125]",tokens/s,24.427086663724918,kWh,7.513478347239142e-05,8.287339921599585e-06,2.9191674218258736e-05,0.00011261379761224973,tokens/kWh,559434.1131885165,,s,630,25.78849375534059,0.040934117071969166,0.0005999379895522757,0.040799968719482424,0.041397259140014644,0.04181422061920166,0.04405275905609131,"[0.041025535583496094, 0.04099420928955078, 0.04085558319091797, 0.040929855346679686, 0.04101103973388672, 0.04099497604370117, 0.04108492660522461, 0.041344993591308596, 0.04131343841552734, 0.041016193389892576, 0.041086849212646485, 0.040673023223876954, 0.04070134353637695, 0.040827873229980466, 0.04082483291625977, 0.04072204971313476, 0.04074899291992187, 0.04088467025756836, 0.04085145568847656, 0.04098787307739258, 0.040498046875, 0.040871551513671875, 0.040976673126220706, 0.04134297561645508, 0.04105571365356445, 0.041054752349853514, 0.041440895080566406, 0.04105459213256836, 0.040839168548583986, 0.040779296875, 0.04065884780883789, 0.04187599945068359, 0.04243254470825195, 0.044985855102539066, 0.040925697326660154, 0.04092825698852539, 0.04089516830444336, 0.040640830993652344, 0.040588512420654296, 0.04447097778320312, 0.040962142944335936, 0.04087603378295898, 0.040725631713867186, 0.0408298225402832, 0.044349441528320314, 0.0411761589050293, 0.041010017395019534, 0.0411313591003418, 0.04109385681152344, 0.041240577697753904, 0.04153958511352539, 0.041150463104248046, 0.040812545776367185, 0.040713409423828124, 0.040682304382324216, 0.04151500701904297, 0.04079180908203125, 0.04114662551879883, 0.04084537506103516, 0.04078585433959961, 0.040981983184814455, 0.040921630859375, 0.04085299301147461, 0.041137889862060545, 0.041134078979492186, 0.04082412719726562, 0.042033214569091794, 0.04079475021362305, 0.04067737579345703, 0.04146995162963867, 0.04111718368530273, 0.04097894287109375, 0.04072988891601562, 0.04079216003417969, 0.04075600051879883, 0.04109856033325195, 0.04091958236694336, 0.04168703842163086, 0.04120371246337891, 0.04081868743896484, 0.040869407653808594, 0.04045606231689453, 0.04061881637573242, 0.040695583343505856, 0.040574977874755856, 0.0424898567199707, 0.041307201385498045, 0.04077622222900391, 0.04057068634033203, 0.04098624038696289, 0.04092617416381836, 0.040771583557128906, 0.040959999084472655, 0.04091494369506836, 0.041062400817871096, 0.041023200988769534, 0.04128387069702148, 0.04122623825073242, 0.04119756698608398, 0.04126051330566406, 0.041082782745361326, 0.041075328826904296, 0.04112998580932617, 0.04115222549438476, 0.041664222717285156, 0.04098105621337891, 0.04125228881835938, 0.04091961669921875, 0.04146585464477539, 0.040987808227539065, 0.04107484817504883, 0.041052864074707034, 0.04101939010620117, 0.04102348709106445, 0.04094095993041992, 0.04066569519042969, 0.04115660858154297, 0.04095590209960937, 0.04130201721191406, 0.041146175384521484, 0.04123993682861328, 0.04109363174438477, 0.04120966339111328, 0.041363967895507815, 0.04121395111083984, 0.041213024139404295, 0.041267105102539066, 0.040956928253173826, 0.04077545547485351, 0.040718143463134765, 0.04063177490234375, 0.04110204696655274, 0.04074492645263672, 0.04090009689331055, 0.040736801147460935, 0.04087587356567383, 0.040858238220214844, 0.04081478500366211, 0.040632225036621096, 0.04072995376586914, 0.04084902572631836, 0.04096403121948242, 0.04109414291381836, 0.0414854736328125, 0.04075942230224609, 0.04073875045776367, 0.040973087310791016, 0.0412303352355957, 0.041431041717529295, 0.041562110900878906, 0.04124601745605469, 0.04109356689453125, 0.04082640075683594, 0.04070064163208008, 0.04077363204956055, 0.04100899124145508, 0.04049935913085938, 0.04066041564941406, 0.0408416976928711, 0.0406710090637207, 0.04053228759765625, 0.04056598281860352, 0.040516319274902346, 0.04039072036743164, 0.04043161773681641, 0.04059088134765625, 0.04062255859375, 0.0406036491394043, 0.04107024002075195, 0.040739425659179686, 0.04053974533081055, 0.04089203262329102, 0.04102608108520508, 0.040709983825683596, 0.04073497772216797, 0.04130915069580078, 0.04067414474487305, 0.04051123046875, 0.040434017181396484, 0.04050080108642578, 0.04056953430175781, 0.04027571105957031, 0.04038623809814453, 0.040564289093017576, 0.0402762565612793, 0.04007369613647461, 0.04035334396362305, 0.04019449615478515, 0.04041446304321289, 0.04081868743896484, 0.040777759552001955, 0.040745697021484374, 0.04079840087890625, 0.04062822341918945, 0.04062963104248047, 0.040612480163574216, 0.041248062133789065, 0.04254342269897461, 0.04058560180664063, 0.0417894401550293, 0.0404716796875, 0.040568801879882814, 0.040665313720703124, 0.041269790649414065, 0.04061609649658203, 0.040716289520263675, 0.04053401565551758, 0.04048883056640625, 0.04051980972290039, 0.04125696182250976, 0.04041113662719727, 0.040310432434082034, 0.04016476821899414, 0.04052374267578125, 0.040541152954101566, 0.04083446502685547, 0.0407630729675293, 0.040626815795898434, 0.041081119537353515, 0.040786209106445315, 0.04046112060546875, 0.04061177444458008, 0.041008094787597656, 0.041532928466796876, 0.040688129425048826, 0.04056825637817383, 0.040516159057617185, 0.04032700729370117, 0.04060079956054687, 0.04052883148193359, 0.040589088439941405, 0.04049910354614258, 0.04083126449584961, 0.040936958312988284, 0.040489505767822266, 0.040378593444824216, 0.04061974334716797, 0.04064652633666992, 0.04053216171264649, 0.04058272171020508, 0.040732192993164065, 0.040514049530029295, 0.040234878540039064, 0.040421089172363284, 0.04060028839111328, 0.040771678924560545, 0.04050115203857422, 0.042041439056396485, 0.04109891128540039, 0.0405401611328125, 0.04382755279541015, 0.04051724624633789, 0.04061587142944336, 0.04074038314819336, 0.040243679046630856, 0.04103894424438476, 0.0409527359008789, 0.04076748657226562, 0.04081049728393555, 0.04063641738891602, 0.04064460754394531, 0.04074636840820312, 0.040634078979492186, 0.0409315185546875, 0.04084400177001953, 0.04148012924194336, 0.04077507019042969, 0.040777759552001955, 0.04040902328491211, 0.040705791473388674, 0.04403283309936523, 0.042897537231445314, 0.0437393913269043, 0.04090464019775391, 0.040646080017089845, 0.043358592987060546, 0.040568672180175784, 0.04070966339111328, 0.04233075332641602, 0.041783294677734374, 0.04075497436523438, 0.04084921646118164, 0.04064499282836914, 0.04026889419555664, 0.04037318420410156, 0.040610816955566405, 0.040389633178710936, 0.04081782531738281, 0.041435329437255856, 0.040739486694335934, 0.04088217544555664, 0.04070604705810547, 0.041834495544433595, 0.04082483291625977, 0.040697856903076174, 0.04166611099243164, 0.041036224365234374, 0.040753150939941404, 0.040688926696777344, 0.04084604644775391, 0.0408350715637207, 0.040755199432373046, 0.041924606323242186, 0.04124051284790039, 0.04126726531982422, 0.04088422393798828, 0.04086783981323242, 0.04070195388793945, 0.04060979080200195, 0.04095135879516602, 0.041131839752197266, 0.04066368103027344, 0.04098796844482422, 0.04113827133178711, 0.04090108871459961, 0.04099283218383789, 0.04124105453491211, 0.0413776969909668, 0.04106636810302734, 0.04096627044677734, 0.04171692657470703, 0.04073263931274414, 0.04081340789794922, 0.04055654525756836, 0.0425082893371582, 0.04096819305419922, 0.04064051055908203, 0.04057244873046875, 0.04117724609375, 0.040732032775878904, 0.04116166305541992, 0.04079206466674805, 0.04069807815551758, 0.04083286285400391, 0.04063430404663086, 0.04105625534057617, 0.041078784942626956, 0.042040641784667966, 0.04080915069580078, 0.040626144409179686, 0.04065897750854492, 0.040630271911621094, 0.04070169448852539, 0.0411665267944336, 0.04120428848266602, 0.04118460845947266, 0.040723102569580075, 0.04070518493652344, 0.04065740966796875, 0.04076073455810547, 0.04099296188354492, 0.04054297637939453, 0.04072447967529297, 0.04055609512329102, 0.0404648323059082, 0.04039648056030273, 0.04043382263183594, 0.0404420166015625, 0.04045209503173828, 0.04059340667724609, 0.04088422393798828, 0.04058726501464844, 0.040880126953125, 0.04053606414794922, 0.04072857666015625, 0.04172579193115234, 0.040799583435058594, 0.040868129730224606, 0.04039286422729492, 0.040651134490966793, 0.04087376022338867, 0.041006526947021484, 0.041227039337158204, 0.040742687225341793, 0.041356609344482424, 0.04094460678100586, 0.04075718307495117, 0.040753150939941404, 0.04088838577270508, 0.04067184066772461, 0.040574977874755856, 0.040525440216064454, 0.04072073745727539, 0.040982559204101564, 0.040709342956542965, 0.040978782653808596, 0.040835361480712894, 0.040710208892822265, 0.041395553588867186, 0.040849342346191406, 0.04111996841430664, 0.04123503875732422, 0.04109107208251953, 0.040597118377685544, 0.041427326202392575, 0.040564735412597655, 0.040425281524658206, 0.04049235153198242, 0.04071855926513672, 0.04078659057617187, 0.04072857666015625, 0.04058854293823242, 0.04061008071899414, 0.040771808624267575, 0.04066944122314453, 0.04078121566772461, 0.04051228713989258, 0.040831039428710934, 0.04034844970703125, 0.040215518951416014, 0.04063820648193359, 0.04017587280273437, 0.03979673767089844, 0.039913471221923826, 0.04067737579345703, 0.040005630493164065, 0.039898880004882814, 0.04003417587280273, 0.040497535705566405, 0.04116070556640625, 0.04155766296386719, 0.041008800506591794, 0.04075385665893555, 0.04084230422973633, 0.04065990447998047, 0.04109107208251953, 0.04048691177368164, 0.04048486328125, 0.04067324829101562, 0.040622112274169925, 0.04071782302856446, 0.04021052932739258, 0.04094572830200195, 0.04119587326049805, 0.040738815307617186, 0.040801727294921875, 0.04064313507080078, 0.04215398406982422, 0.04207759857177734, 0.04067184066772461, 0.04122937774658203, 0.040624000549316405, 0.04075942230224609, 0.040581119537353515, 0.040869888305664064, 0.040849601745605466, 0.040984447479248044, 0.04066492843627929, 0.04048502349853516, 0.04085139083862305, 0.04050719833374023, 0.04049046325683594, 0.040934112548828124, 0.04078550338745117, 0.04075667190551758, 0.04093417739868164, 0.04093779373168945, 0.04084313583374023, 0.04080640029907227, 0.04073267364501953, 0.04072652816772461, 0.04084940719604492, 0.04064255905151367, 0.040610912322998044, 0.0407435188293457, 0.040761409759521486, 0.04068556976318359, 0.04096640014648437, 0.04080022430419922, 0.041132064819335935, 0.041306110382080076, 0.040822784423828126, 0.04035747146606445, 0.04033324813842774, 0.04045052719116211, 0.040435585021972656, 0.040573055267333985, 0.04079350280761719, 0.04066774368286133, 0.04058227157592773, 0.04088284683227539, 0.04082294464111328, 0.041115806579589846, 0.040645984649658205, 0.04144902420043945, 0.040866817474365234, 0.040787967681884765, 0.04148223876953125, 0.040753150939941404, 0.04065024185180664, 0.040554912567138675, 0.04049427032470703, 0.040680030822753906, 0.04079203033447266, 0.040861377716064455, 0.040882335662841794, 0.040643070220947264, 0.040716289520263675, 0.04057088088989258, 0.040648704528808595, 0.0405667839050293, 0.04063846588134765, 0.040694881439208984, 0.04055152130126953, 0.04118832015991211, 0.04111280059814453, 0.04090959930419922, 0.040755199432373046, 0.041082782745361326, 0.043262046813964845, 0.042213600158691404, 0.042187873840332034, 0.04076204681396484, 0.04062995147705078, 0.04050156784057617, 0.04043775939941406, 0.04049251174926758, 0.0408950080871582, 0.04460086441040039, 0.041229793548583984, 0.04251955032348633, 0.040697601318359374, 0.04068582534790039, 0.04064665603637695, 0.04070147323608399, 0.04067571258544922, 0.04043990325927734, 0.0415390396118164, 0.043395614624023436, 0.04141260910034179, 0.04077715301513672, 0.040728126525878906, 0.04071737670898438, 0.040820350646972654, 0.041789119720458984, 0.04130844879150391, 0.04420233535766602, 0.041207809448242184, 0.040619968414306644, 0.041015583038330077, 0.0405722541809082, 0.04067372894287109, 0.04097350311279297, 0.04162847900390625, 0.04112179183959961, 0.04081001663208008, 0.040616416931152345, 0.04233420944213867, 0.04131430435180664, 0.04088131332397461, 0.04073926544189453, 0.04059804916381836, 0.04074892807006836, 0.04063388824462891, 0.040648353576660155, 0.04039728164672852, 0.040554622650146484, 0.04120393753051758, 0.04061113739013672, 0.040524478912353515, 0.04072243118286133, 0.04031078338623047, 0.040517024993896485, 0.04135164642333984, 0.044060897827148435, 0.041242111206054685, 0.040608158111572264, 0.040799713134765624, 0.042043968200683596, 0.040574718475341796, 0.04095571136474609, 0.04085760116577149, 0.04082937622070312, 0.0408616943359375, 0.040880126953125, 0.04080169677734375, 0.04086025619506836, 0.04102348709106445, 0.04102048110961914, 0.041013214111328126, 0.04149676895141602, 0.0404317741394043, 0.0413067512512207, 0.04115372848510742, 0.04066182327270508, 0.040664222717285155, 0.040860511779785155, 0.041524574279785155, 0.04101801681518555, 0.04092310333251953, 0.040777759552001955, 0.04062953567504883, 0.040895072937011716, 0.04090483093261719, 0.040707359313964846, 0.04045638275146484, 0.040436256408691404, 0.04054540634155274, 0.04056492614746094, 0.040565025329589846, 0.04049887847900391, 0.041037921905517576, 0.041251041412353515, 0.041049758911132814, 0.04088662338256836, 0.04094403076171875, 0.04052928161621094, 0.04051744079589844, 0.04428678512573242, 0.04068560028076172, 0.04148630523681641, 0.04073056030273438, 0.040779296875, 0.04062380981445313, 0.040737281799316405, 0.04061833572387695, 0.04092886352539062, 0.04066534423828125, 0.041099422454833986, 0.04080806350708008, 0.041421184539794924, 0.041025791168212894, 0.04096988677978516, 0.04080771255493164, 0.0405898551940918, 0.040861438751220704, 0.04116124725341797, 0.0413240966796875, 0.04177939224243164, 0.041167102813720706]",tokens/s,24.429499682180253,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,5307.150336,3468.558336,0.0,3066.036224,2865.160192,s,1,12.8887783203125,12.8887783203125,0.0,12.8887783203125,12.8887783203125,12.8887783203125,12.8887783203125,[12.8887783203125],,kWh,0.00016440750080414545,1.8127949942310964e-05,5.515448856799332e-05,0.00023768993931444976,,MB,5361.102848,3797.8112,0.0,3374.317568,3158.450176,s,10,0.9196216354370117,0.09196216354370117,0.0003951384329182661,0.09176239776611328,0.09243659439086914,0.09260545082092285,0.09274053596496581,"[0.09170451354980469, 0.0913846435546875, 0.09172541046142578, 0.0923990707397461, 0.09172745513916016, 0.09176092529296875, 0.09210259246826172, 0.09176387023925782, 0.09277430725097656, 0.09227884674072266]",tokens/s,2783.753558368021,kWh,2.6897543581417936e-06,2.9663283316171837e-07,1.343716671302813e-06,4.330103862606325e-06,tokens/kWh,59120983.72760775,MB,5361.102848,3797.8112,0.0,3374.317568,3158.452736,s,10,55.74772705078125,5.574772705078125,0.026915023819540723,5.583779541015625,5.599870068359375,5.603106420898437,5.605695502929688,"[5.52031103515625, 5.539662109375, 5.56736279296875, 5.55696923828125, 5.59632666015625, 5.59915087890625, 5.5772841796875, 5.59027490234375, 5.59404248046875, 5.6063427734375]",tokens/s,11.300909172962795,kWh,0.00016288609264601922,1.7966890272654524e-05,6.211852854289637e-05,0.00024297151146157007,tokens/kWh,259289.6575447467,,s,630,55.745409400939906,0.0884847768268888,0.0008786232705047197,0.08841388702392577,0.08925636444091797,0.08990264892578125,0.09236025924682617,"[0.08741318511962891, 0.089255615234375, 0.0875379867553711, 0.0874411849975586, 0.08743344116210937, 0.08772198486328125, 0.08731033325195313, 0.08793087768554687, 0.08762358093261718, 0.0874578857421875, 0.08715865325927734, 0.08709337615966797, 0.08813568115234376, 0.08749846649169922, 0.08707305908203125, 0.08701747131347656, 0.08709529876708984, 0.08704550170898437, 0.08711583709716797, 0.08739027404785156, 0.08730265808105468, 0.08711145782470703, 0.08724502563476562, 0.08718540954589844, 0.08755404663085938, 0.08695193481445312, 0.08725644683837891, 0.08715122985839843, 0.08769945526123046, 0.08771129608154297, 0.08733660888671875, 0.08802086639404297, 0.08736041259765626, 0.08768006134033203, 0.08728880310058594, 0.08778089904785157, 0.08973152160644532, 0.08762764739990235, 0.08755030059814453, 0.08752515411376953, 0.08786943817138672, 0.08793087768554687, 0.08801074981689454, 0.08812258911132813, 0.08786637115478516, 0.0880393295288086, 0.08795123291015625, 0.08824620819091797, 0.08765241241455078, 0.08763116455078125, 0.08750969696044922, 0.0876131820678711, 0.08749286651611328, 0.0875742416381836, 0.0878861083984375, 0.08774451446533203, 0.08733907318115235, 0.08768441772460937, 0.0876816635131836, 0.08843619537353516, 0.08766722869873046, 0.08762777709960938, 0.08766989135742187, 0.087463134765625, 0.08757328033447266, 0.0873995819091797, 0.08909295654296875, 0.0876739501953125, 0.08844313812255859, 0.09272739410400391, 0.08782463836669922, 0.08732015991210937, 0.08721481323242188, 0.08706150054931641, 0.0875323486328125, 0.0873863067626953, 0.08762777709960938, 0.08904415893554687, 0.08836790466308594, 0.0874291534423828, 0.08747161865234375, 0.08841171264648437, 0.087716796875, 0.08704515075683594, 0.08721817779541016, 0.08733602905273438, 0.08731180572509765, 0.0881361312866211, 0.08725836944580079, 0.08757119750976562, 0.08728575897216796, 0.08773567962646485, 0.08757721710205078, 0.08735234832763672, 0.08887532806396484, 0.09070822143554688, 0.08751251220703125, 0.08727446746826172, 0.08805990600585938, 0.08774211120605468, 0.08949795532226562, 0.08751110076904296, 0.0874474868774414, 0.08740201568603516, 0.08750310516357422, 0.08792240142822266, 0.08731670379638672, 0.08814620971679688, 0.0877127685546875, 0.08789923095703125, 0.0883127670288086, 0.08788886260986328, 0.087910400390625, 0.08754790496826172, 0.08787334442138672, 0.08939539337158203, 0.08851817321777344, 0.08744188690185548, 0.08765952301025391, 0.08762265777587891, 0.08755171203613281, 0.08824451446533203, 0.08790534210205078, 0.08779219055175781, 0.08836153411865234, 0.08826860809326172, 0.08760717010498047, 0.08759529876708984, 0.0877586898803711, 0.08772402954101563, 0.0876845474243164, 0.08818447875976562, 0.08812432098388671, 0.08790016174316406, 0.08786534118652344, 0.08770127868652344, 0.08789555358886719, 0.08764662170410156, 0.08757839965820312, 0.08757625579833984, 0.08755049896240234, 0.08960598754882812, 0.08805401611328124, 0.08814205169677734, 0.08822579193115235, 0.08807974243164063, 0.08750867462158203, 0.08773331451416015, 0.0895666275024414, 0.08793545532226563, 0.08784226989746094, 0.08796959686279297, 0.08781897735595703, 0.08773782348632812, 0.0881464614868164, 0.08818688201904297, 0.08905318450927735, 0.08802909088134765, 0.08790640258789062, 0.087959228515625, 0.08788409423828125, 0.0879834213256836, 0.08844358062744141, 0.08788172912597657, 0.08784601593017578, 0.08833283233642578, 0.0881987533569336, 0.08997964477539062, 0.08860281372070312, 0.08886019134521485, 0.08915122985839843, 0.08907215881347656, 0.08846131134033203, 0.0882805404663086, 0.08838550567626953, 0.08822998046875, 0.08833686065673828, 0.0886141128540039, 0.08907408142089844, 0.08928050994873046, 0.08839801788330078, 0.09110546875, 0.0911131820678711, 0.08879897308349609, 0.08818073272705078, 0.08827040100097656, 0.08805020904541015, 0.08980095672607422, 0.09062809753417969, 0.08971206665039062, 0.08807212829589844, 0.0890108184814453, 0.08901529693603516, 0.08780006408691406, 0.08793574523925782, 0.0880865249633789, 0.08805375671386718, 0.0877891845703125, 0.08791283416748047, 0.08805996704101562, 0.08853497314453125, 0.08796774291992188, 0.08775475311279297, 0.08835222625732422, 0.08777168273925781, 0.08784690856933594, 0.09079385375976562, 0.08797129821777344, 0.08806179046630859, 0.08790073394775391, 0.0882643814086914, 0.08972140502929687, 0.09236377716064453, 0.08814284515380859, 0.08861901092529297, 0.08837939453125, 0.08779776000976562, 0.08809677124023438, 0.08826995086669921, 0.08764281463623047, 0.08804582214355469, 0.08772300720214844, 0.08818374633789063, 0.0879544677734375, 0.08765650939941406, 0.08875446319580078, 0.08882144165039063, 0.08746489715576172, 0.0871786880493164, 0.08764883422851563, 0.08759458923339844, 0.08742524719238282, 0.08801673889160157, 0.08782902526855468, 0.08835008239746094, 0.0881299819946289, 0.08827433776855469, 0.08903526306152344, 0.08783676910400391, 0.0875494384765625, 0.08795974731445312, 0.08770352172851563, 0.087714111328125, 0.08783670043945313, 0.08786329650878906, 0.08817855834960937, 0.0879796142578125, 0.0877919692993164, 0.08849584197998046, 0.08856829071044922, 0.08752947235107422, 0.0879452133178711, 0.08815206146240234, 0.08812134552001953, 0.08851171112060546, 0.08863436889648438, 0.0884688949584961, 0.08812783813476563, 0.08843881225585938, 0.09149030303955077, 0.08840732574462891, 0.08822038269042969, 0.08864329528808594, 0.08849027252197265, 0.08927049255371093, 0.088993408203125, 0.08880143737792968, 0.08913283538818359, 0.08861619567871094, 0.08853603363037109, 0.08854678344726563, 0.08912083435058593, 0.08835132598876953, 0.08845206451416016, 0.0883782730102539, 0.08833023834228515, 0.0883056640625, 0.08744044494628907, 0.08803167724609375, 0.08862140655517578, 0.08846147155761719, 0.08906956481933594, 0.09148553466796874, 0.08865638732910157, 0.0884205093383789, 0.08862876892089844, 0.08841878509521485, 0.08860057830810547, 0.08887814331054687, 0.08873670196533204, 0.08933171081542969, 0.08852275085449218, 0.09270681762695313, 0.09041292572021484, 0.08875247955322266, 0.08842626953125, 0.0885021743774414, 0.08842594909667968, 0.08868927764892579, 0.08869683074951172, 0.08866751861572265, 0.09100761413574218, 0.08885862731933594, 0.08924774169921874, 0.08920432281494141, 0.08896758270263672, 0.0886246109008789, 0.08883229064941406, 0.08866620635986328, 0.08849014282226562, 0.08881970977783203, 0.08851197052001954, 0.08851087951660157, 0.08849132537841797, 0.08873043060302735, 0.08813510131835937, 0.08949542236328124, 0.09060415649414062, 0.08831568145751953, 0.08814031982421874, 0.08846720123291016, 0.08835302734375, 0.08843849945068359, 0.09329837036132813, 0.08867900848388671, 0.08877606201171875, 0.08935641479492187, 0.08868611145019531, 0.088882080078125, 0.08854259490966797, 0.08843539428710938, 0.08870854187011719, 0.0889369888305664, 0.08908598327636719, 0.08910233306884766, 0.088890625, 0.08910924530029297, 0.08876032257080078, 0.08853282928466796, 0.0887863998413086, 0.08906031799316406, 0.08861392211914063, 0.08851116943359374, 0.08882176208496094, 0.08886835479736328, 0.0888611831665039, 0.08860873413085937, 0.08890348815917969, 0.08917603302001953, 0.08927871704101563, 0.08861865234375, 0.08883363342285157, 0.0886197738647461, 0.08848108673095703, 0.08890233612060547, 0.08859852600097656, 0.08856547546386719, 0.08878710174560547, 0.08883161926269531, 0.08865229034423829, 0.08955289459228516, 0.08861036682128906, 0.08942431640625, 0.08910438537597656, 0.08878079986572265, 0.08878079986572265, 0.08857190704345703, 0.08925552368164062, 0.08891433715820313, 0.08887920379638672, 0.09090857696533203, 0.08829740905761718, 0.0882092514038086, 0.08819529724121093, 0.0885202865600586, 0.08822006225585938, 0.08818867492675782, 0.08838310241699218, 0.08824832153320313, 0.08819625854492187, 0.08850927734375, 0.08802713775634766, 0.08822895812988281, 0.08842912292480469, 0.08822956848144531, 0.08893081665039063, 0.0882152328491211, 0.08850479888916016, 0.08804761505126953, 0.08826675415039062, 0.08812857818603516, 0.08847456359863282, 0.08888114929199219, 0.08813772583007813, 0.08801225280761718, 0.08794297790527343, 0.08841423797607421, 0.08826358032226563, 0.08799148559570312, 0.08822803497314453, 0.08829174041748047, 0.08831913757324218, 0.08846630096435547, 0.09134102630615235, 0.08844438171386719, 0.0880335693359375, 0.08801001739501953, 0.08797052764892578, 0.09066905975341796, 0.08821247863769531, 0.08808345794677734, 0.08840732574462891, 0.08815699005126953, 0.08838483428955078, 0.08864009857177735, 0.08849366760253906, 0.08859839630126953, 0.08786793518066406, 0.08804902648925782, 0.08781629180908203, 0.08831644439697266, 0.08992070770263672, 0.08838841247558593, 0.08800665283203125, 0.08812134552001953, 0.08843260955810547, 0.0881131820678711, 0.08787548828125, 0.09023497772216797, 0.08845673370361327, 0.08801261138916015, 0.08785270690917969, 0.0883858871459961, 0.08864006042480468, 0.0885145263671875, 0.09252057647705078, 0.089133056640625, 0.08988057708740234, 0.08892979431152344, 0.08826521301269531, 0.08849779510498047, 0.08869235229492188, 0.08886505889892578, 0.08869487762451173, 0.0882352294921875, 0.08831241607666016, 0.08829065704345704, 0.09258070373535156, 0.08868617248535156, 0.08835116577148437, 0.08839164733886719, 0.08815206146240234, 0.08822374725341797, 0.08835276794433594, 0.08896697235107422, 0.08906156921386718, 0.08849967956542969, 0.08846800231933594, 0.08849203491210937, 0.08851455688476563, 0.08842797088623047, 0.08816902160644531, 0.08845267486572266, 0.08867475128173828, 0.08849158477783203, 0.08868434906005859, 0.08925055694580078, 0.08875536346435547, 0.08849068450927734, 0.089119873046875, 0.08910530853271484, 0.08862038421630859, 0.08864649963378907, 0.08854918670654296, 0.08825651550292969, 0.08852413177490234, 0.08958428955078125, 0.08953241729736328, 0.08919859313964844, 0.08863289642333984, 0.08875052642822266, 0.09005583953857423, 0.08875094604492187, 0.08846336364746094, 0.08836873626708984, 0.08831632232666016, 0.0883220443725586, 0.08837324523925781, 0.08858419036865234, 0.08886271667480469, 0.08902851104736328, 0.08838349151611329, 0.08880921936035156, 0.08860707092285157, 0.08815821075439453, 0.08831590270996094, 0.08841011047363281, 0.08851046752929688, 0.08919055938720703, 0.08904512023925781, 0.08880281829833984, 0.08887036895751953, 0.08901299285888672, 0.0890408935546875, 0.08906159973144531, 0.08811949157714843, 0.08824358367919923, 0.08811270141601563, 0.08928550720214844, 0.0884858856201172, 0.08840396881103516, 0.08833551788330078, 0.08815497589111328, 0.09087305450439453, 0.0912330551147461, 0.08857142639160157, 0.08893071746826171, 0.08841836547851563, 0.08850182342529297, 0.08875667572021484, 0.0888770523071289, 0.08860614776611328, 0.0908314208984375, 0.08862223815917969, 0.08854393768310546, 0.08928044891357421, 0.0883629150390625, 0.0883388442993164, 0.08821132659912109, 0.08839568328857422, 0.08835289764404297, 0.08829535675048829, 0.08842655944824218, 0.08836857604980469, 0.0887520980834961, 0.08851087951660157, 0.08846096038818359, 0.08853763580322266, 0.08810905456542968, 0.09099443054199219, 0.08818243408203125, 0.08826284790039063, 0.08824486541748047, 0.0882738265991211, 0.08827769470214844, 0.0895239028930664, 0.0913677749633789, 0.08868367767333984, 0.08926310729980469, 0.08874598693847656, 0.08877021026611329, 0.08906723022460937, 0.08860530853271484, 0.08915353393554687, 0.08869068908691406, 0.08913100433349609, 0.08860262298583985, 0.0885432357788086, 0.08894361877441406, 0.08889209747314453, 0.08876064300537109, 0.08875539398193359, 0.08860713958740235, 0.08839750671386719, 0.08833302307128907, 0.08853644561767578, 0.08882879638671876, 0.08859008026123047, 0.08837760162353515, 0.0891883544921875, 0.08856111907958984, 0.08874857330322265, 0.0885002212524414, 0.08866316986083984, 0.08834342193603516, 0.0885002212524414, 0.08902044677734375, 0.08852912139892578, 0.0893355484008789, 0.08871641540527343, 0.08911062622070312, 0.08863148498535156, 0.08844143676757812, 0.08859442901611328, 0.08873548889160156, 0.08830931091308594, 0.08842211151123047, 0.0887790069580078, 0.08889826965332032, 0.08899574279785157, 0.08854537963867187, 0.08827225494384766, 0.0909439697265625, 0.08882128143310547, 0.08879987335205078, 0.08870297241210938, 0.08931737518310547, 0.08875206756591797, 0.08882182312011719, 0.08983094024658203, 0.08965577697753906, 0.0887287368774414, 0.08893711853027343, 0.08871340942382812, 0.08882173156738281, 0.08893590545654297, 0.08876908874511719, 0.08905522918701173, 0.08919654083251953, 0.08841558074951172, 0.08875894165039062, 0.08825398254394531, 0.08827507019042968, 0.08794966125488281, 0.0892416000366211, 0.09288703918457031, 0.08932761383056641, 0.08878892517089844, 0.08936863708496094, 0.08881491088867187, 0.08857807922363281, 0.08898572540283203, 0.08901481628417969, 0.08936563110351563, 0.0887070083618164, 0.09235164642333984, 0.08957520294189453, 0.0909271011352539, 0.0885002212524414, 0.08841353607177735]",tokens/s,11.301379015244569,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.13 GiB is free. Process 559375 has 13.61 GiB memory in use. Of the allocated memory 13.36 GiB is allocated by PyTorch, and 136.95 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1117.92128,9791.471616,0.0,9388.949504,9304.608768,s,1,31.904826171875,31.904826171875,0.0,31.904826171875,31.904826171875,31.904826171875,31.904826171875,[31.904826171875],,kWh,0.000728227331416656,8.03218450805696e-05,0.0002412899152540171,0.0010498390917512428,,MB,1589.051392,10204.61056,0.0,9789.505536,9597.898752,s,10,7.845305419921876,0.7845305419921875,0.0028480465460863458,0.7848037719726563,0.7864161193847656,0.788398519897461,0.7899844403076173,"[0.7828428344726562, 0.7850107421875, 0.7844032592773438, 0.7851332397460937, 0.7845968017578125, 0.7859755859375, 0.7903809204101563, 0.7829585571289063, 0.7856124877929688, 0.7783909912109375]",tokens/s,326.3097945810111,kWh,2.290987691281794e-05,2.5265835268104295e-06,1.2609475899538758e-05,3.804593633916713e-05,tokens/kWh,6728708.099541654,MB,1608.941568,10206.707712,0.0,9789.505536,9597.901312,s,10,364.44474609375004,36.44447460937501,0.13364949145778682,36.458794921875,36.54053359375,36.634692578125,36.710019765625,"[36.5073828125, 36.36546875, 36.3217734375, 36.50337109375, 36.463671875, 36.519609375, 36.37814453125, 36.45391796875, 36.7288515625, 36.2025546875]",tokens/s,1.7286571057823352,kWh,0.0010802692469434334,0.00011916183516953862,0.0003723614154102661,0.0015717924975232381,tokens/kWh,40081.62661373727,,s,630,364.44215985107434,0.5784796188112288,0.004295999765247389,0.5778168029785156,0.5840293273925781,0.5853253234863282,0.5928475012207032,"[0.5740711059570313, 0.5742484130859375, 0.572947265625, 0.5727286376953125, 0.5823311157226563, 0.5847039794921876, 0.5773905639648438, 0.5759932250976563, 0.5779175415039063, 0.5817753295898438, 0.5802557983398438, 0.576774169921875, 0.5840223999023437, 0.5769417114257812, 0.577950927734375, 0.5788983154296875, 0.5765524291992188, 0.5786873168945312, 0.5735712280273437, 0.58209228515625, 0.5773854370117187, 0.5800919189453125, 0.5778424072265625, 0.5850829467773437, 0.58563037109375, 0.5812059936523437, 0.5815029907226562, 0.5810269165039063, 0.5807686767578125, 0.579041259765625, 0.584216552734375, 0.577721435546875, 0.5773444213867187, 0.5758750610351563, 0.5773516845703125, 0.5753604125976562, 0.58071923828125, 0.5756436767578125, 0.5816852416992188, 0.5790116577148438, 0.5842330322265625, 0.5778150024414063, 0.5774606323242187, 0.577818603515625, 0.5779943237304688, 0.5768568725585937, 0.5770010986328125, 0.5780747680664062, 0.580119384765625, 0.5778414306640625, 0.581956298828125, 0.5806858520507813, 0.5806550903320312, 0.58302197265625, 0.5774547119140625, 0.586657470703125, 0.578696533203125, 0.5935482177734375, 0.5787823486328125, 0.5810728759765625, 0.5801661376953124, 0.58222998046875, 0.58263330078125, 0.5781201782226563, 0.5775789794921875, 0.5758218383789062, 0.5744814453125, 0.5729473876953125, 0.5758048706054687, 0.5746726684570312, 0.5750423583984375, 0.5801956176757812, 0.5763427124023438, 0.58819970703125, 0.5799774780273438, 0.5770916748046875, 0.5775010986328125, 0.5794365234375, 0.5799280395507812, 0.5791477661132812, 0.5798002319335938, 0.57578173828125, 0.5756959838867187, 0.5751417846679687, 0.5736843872070313, 0.5754447631835937, 0.5774259643554688, 0.5741171264648437, 0.5747391967773438, 0.5805037841796875, 0.58535693359375, 0.578521240234375, 0.5755125732421875, 0.5742091674804688, 0.5777230224609375, 0.5759688720703126, 0.5763646850585937, 0.574736083984375, 0.5733973388671875, 0.5757095336914062, 0.5743576049804687, 0.5748070678710937, 0.5746238403320313, 0.5745345458984376, 0.57837158203125, 0.5768496704101562, 0.585257080078125, 0.5754915161132812, 0.5892344970703125, 0.579862548828125, 0.5766918334960938, 0.5762276000976563, 0.5760023193359375, 0.5748770751953125, 0.5806314086914063, 0.5764992065429687, 0.5735223999023438, 0.5740482788085938, 0.5775435180664062, 0.5754349365234375, 0.5785966186523438, 0.5764307861328125, 0.5744718627929688, 0.575117431640625, 0.5791921997070313, 0.5844151611328126, 0.573886474609375, 0.573724609375, 0.5778350830078125, 0.5776585083007812, 0.5766004638671876, 0.5799557495117188, 0.5750036010742188, 0.5771461181640625, 0.574704345703125, 0.5738720703125, 0.5730980224609376, 0.5768555908203125, 0.5755592651367187, 0.5779628295898438, 0.5800591430664063, 0.5857520751953125, 0.5792706298828125, 0.5761414184570313, 0.5754205322265625, 0.5793671875, 0.57426708984375, 0.573880615234375, 0.5742058715820313, 0.5726577758789062, 0.5776373901367188, 0.5781166381835937, 0.5756866455078125, 0.5734666137695312, 0.5751951293945312, 0.5759959106445313, 0.5752109985351562, 0.5852366333007812, 0.5880703735351562, 0.5760992431640625, 0.5753486328125, 0.5741849365234375, 0.58165234375, 0.5752408447265625, 0.5759115600585938, 0.5774315795898437, 0.575990234375, 0.5747379760742187, 0.575626708984375, 0.5755113525390625, 0.5750335693359375, 0.5745846557617188, 0.574601318359375, 0.5771469116210938, 0.5795000610351563, 0.5744144287109375, 0.5805142211914063, 0.5754716186523438, 0.573470703125, 0.574076171875, 0.576868896484375, 0.5748164672851562, 0.5754142456054687, 0.579905029296875, 0.5745805053710937, 0.5758320922851563, 0.5747799072265625, 0.5789165649414062, 0.5743424682617188, 0.5757988891601562, 0.5851997680664063, 0.580947998046875, 0.5851975708007813, 0.581490478515625, 0.57675390625, 0.5800418701171876, 0.5783858642578125, 0.5777479248046875, 0.5771812133789063, 0.574637939453125, 0.5800137939453125, 0.5760664672851562, 0.5768314819335938, 0.5771672973632812, 0.5790167236328125, 0.581564453125, 0.579968994140625, 0.5839927978515626, 0.5802420043945312, 0.5846193237304688, 0.5787921752929688, 0.5788566284179687, 0.5812817993164062, 0.580583740234375, 0.5834588012695312, 0.5791146240234375, 0.5777698364257813, 0.5783161010742187, 0.5775075073242187, 0.5787095336914062, 0.5772103881835937, 0.5808614501953125, 0.5788779296875, 0.5800591430664063, 0.5825167236328125, 0.5803519897460937, 0.5852672119140625, 0.580482666015625, 0.5761458129882813, 0.5764190673828125, 0.5813377685546876, 0.57617822265625, 0.5770667724609375, 0.5788018798828125, 0.576052734375, 0.5766702270507813, 0.5770363159179688, 0.5766533203125, 0.5770731811523437, 0.5776998291015625, 0.5802762451171875, 0.5778118286132813, 0.5829208374023438, 0.5876978149414063, 0.5785562744140625, 0.584236083984375, 0.578990478515625, 0.5821302490234375, 0.5791416015625, 0.577518798828125, 0.5784829711914062, 0.5753421630859376, 0.57666357421875, 0.5729581298828125, 0.5805245971679688, 0.5785308837890625, 0.5762088623046875, 0.5770634155273437, 0.5848084716796875, 0.5861864624023437, 0.575411865234375, 0.5763382568359375, 0.57309423828125, 0.5810808715820313, 0.5750066528320312, 0.5782112426757813, 0.5763162841796875, 0.57392333984375, 0.5753096313476562, 0.5744556884765625, 0.57425537109375, 0.5758356323242187, 0.5755025024414062, 0.57614306640625, 0.5758709106445312, 0.5769120483398438, 0.5796043090820312, 0.5853037719726563, 0.5797913208007812, 0.577044189453125, 0.5781834716796875, 0.58134326171875, 0.5796126708984375, 0.5784412231445313, 0.5792537231445313, 0.5761582641601563, 0.5739453735351563, 0.5748045043945312, 0.5746360473632812, 0.5751111450195312, 0.5764812622070312, 0.573844970703125, 0.5782484130859376, 0.579608642578125, 0.5841492919921875, 0.5820338745117187, 0.5770567626953125, 0.5776988525390625, 0.5802260131835938, 0.57678369140625, 0.5776547241210938, 0.5796093139648437, 0.5800347290039063, 0.58165234375, 0.5832069091796875, 0.5821902465820312, 0.580952880859375, 0.583041015625, 0.5825106201171875, 0.5815418701171875, 0.5890349731445312, 0.589391845703125, 0.5828859252929688, 0.5826600952148437, 0.5810708618164062, 0.580431884765625, 0.5802393798828125, 0.5837529907226563, 0.5770513305664062, 0.5815418090820312, 0.5794421997070313, 0.5797680053710937, 0.57919580078125, 0.581265380859375, 0.5805752563476563, 0.5795257568359375, 0.5834981689453125, 0.5866972045898438, 0.5840916748046875, 0.580106201171875, 0.5791580810546875, 0.5815316772460938, 0.5809684448242187, 0.5789263305664063, 0.5849315795898438, 0.581915771484375, 0.5805836181640625, 0.5813255004882812, 0.5803929443359375, 0.5788322143554687, 0.581007080078125, 0.581302490234375, 0.5808273315429687, 0.5820333862304687, 0.5934080200195313, 0.578609130859375, 0.579135498046875, 0.5780905151367187, 0.5800555419921875, 0.5763174438476563, 0.5750701904296875, 0.5762047729492188, 0.5761262817382813, 0.5788612060546875, 0.5780701293945313, 0.57504248046875, 0.5780169677734375, 0.5779561767578125, 0.5799177856445312, 0.5758749389648438, 0.5819634399414062, 0.5819130249023438, 0.5792310791015625, 0.5754785766601562, 0.5755669555664062, 0.5810282592773437, 0.5765675659179688, 0.5757626342773438, 0.5763276977539062, 0.5768826904296875, 0.57883154296875, 0.57745068359375, 0.577761474609375, 0.5774132080078125, 0.5803059692382813, 0.5809365844726563, 0.5787555541992188, 0.579508544921875, 0.5768241577148437, 0.5771078491210937, 0.57657958984375, 0.5792973022460938, 0.5764337768554687, 0.5746568603515625, 0.5788099365234375, 0.5764075317382813, 0.5763658447265625, 0.5754293212890625, 0.5749595947265626, 0.5772717895507813, 0.5768800659179687, 0.5775365600585938, 0.5738086547851563, 0.5776834716796875, 0.5777730712890625, 0.5833092651367188, 0.5756145629882813, 0.5739194946289062, 0.5741591796875, 0.5756214599609375, 0.5740575561523438, 0.5753311157226563, 0.5745685424804687, 0.5742754516601563, 0.57467236328125, 0.57692431640625, 0.5756227416992188, 0.5767800903320313, 0.5762034301757812, 0.5776578979492187, 0.5825934448242187, 0.578150390625, 0.590532470703125, 0.5768331909179687, 0.5773252563476563, 0.5756807250976562, 0.5805089111328126, 0.5800365600585937, 0.57650048828125, 0.5811887817382813, 0.575758544921875, 0.5777802124023438, 0.5766876831054687, 0.5759103393554688, 0.576921875, 0.578029541015625, 0.5767243041992187, 0.5781510620117187, 0.5793239135742188, 0.5931842651367187, 0.5756685791015625, 0.5807528686523438, 0.575428955078125, 0.5804088134765625, 0.5757977294921875, 0.5770365600585937, 0.577371337890625, 0.5765242919921875, 0.5759249877929687, 0.5767106323242187, 0.5758812255859375, 0.5752750244140625, 0.5737587890625, 0.5736311645507812, 0.577028076171875, 0.5920230102539062, 0.5805099487304688, 0.5799744873046875, 0.5756870727539063, 0.579207763671875, 0.5816259155273438, 0.5786664428710937, 0.5752606201171875, 0.5752116088867187, 0.5763623657226562, 0.5750963745117188, 0.5770450439453125, 0.5751577758789063, 0.5758550415039062, 0.5790836181640625, 0.5769959716796875, 0.5806489868164062, 0.5767373046875, 0.5838991088867187, 0.5774397583007812, 0.5773148193359375, 0.5772410888671875, 0.581326171875, 0.5750947875976562, 0.572927734375, 0.5759083862304688, 0.578052490234375, 0.5774513549804687, 0.5799338989257813, 0.576248779296875, 0.5789818725585938, 0.5766942749023437, 0.5734993896484375, 0.5756805419921875, 0.5749790649414063, 0.5879857177734376, 0.5764260864257813, 0.5760676879882812, 0.5757498168945312, 0.57943701171875, 0.5789366455078125, 0.5799915771484375, 0.5814620361328126, 0.5768233032226563, 0.5802677612304687, 0.5826173706054687, 0.5812612915039063, 0.5813098754882813, 0.5848110961914063, 0.5843230590820313, 0.5822218627929687, 0.5775662231445312, 0.5935907592773437, 0.5746421508789062, 0.5797376098632813, 0.57620068359375, 0.5789696044921875, 0.5815206298828125, 0.5782146606445312, 0.584170166015625, 0.5798133544921875, 0.584160400390625, 0.58111474609375, 0.5815214233398438, 0.5809600830078125, 0.5814273681640625, 0.5839216918945312, 0.5816904296875, 0.5982515258789063, 0.5835969848632813, 0.5823519897460937, 0.582893798828125, 0.5876783447265626, 0.5873583984375, 0.5849434814453125, 0.5826068725585938, 0.5807513427734375, 0.58155419921875, 0.5853429565429688, 0.5809786987304687, 0.5819842529296875, 0.5842902221679688, 0.5819720458984375, 0.58090869140625, 0.5810978393554688, 0.5976514282226563, 0.5836390380859375, 0.5892177734375, 0.58032861328125, 0.58473291015625, 0.5800167236328125, 0.5783423461914062, 0.5834443359375, 0.5784685668945313, 0.579842041015625, 0.5772304077148438, 0.5803272705078125, 0.580842041015625, 0.5851781616210937, 0.5849783935546875, 0.5861876220703125, 0.5844558715820313, 0.6001790771484375, 0.5841018676757812, 0.58966162109375, 0.5816358032226563, 0.5837198486328125, 0.5805134887695312, 0.5800814208984375, 0.5809623413085937, 0.5765408325195313, 0.5782490844726562, 0.57780224609375, 0.5796167602539063, 0.5803499755859375, 0.5823160400390625, 0.58634033203125, 0.5775216674804687, 0.586936279296875, 0.5848079223632813, 0.5801866455078125, 0.574852783203125, 0.5773861083984375, 0.5695430908203125, 0.5714488525390625, 0.5686296997070313, 0.57274267578125, 0.57135205078125, 0.5687886962890625, 0.5711908569335937, 0.57350830078125, 0.5719362182617187, 0.5708203125, 0.5719535522460938, 0.5707964477539063, 0.5731676025390625, 0.5839544067382813, 0.5750697021484374, 0.573235107421875, 0.5732122192382813, 0.5701456298828125, 0.5697694091796875, 0.5692054443359374, 0.5757933959960938, 0.5726307983398438, 0.567472412109375, 0.5711471557617187, 0.5687705688476562, 0.5722937622070312, 0.5694847412109375, 0.5754122924804688, 0.573166259765625, 0.5707952880859375, 0.5804161987304688, 0.5748203735351562, 0.5753851318359375, 0.5708087768554687, 0.5740789794921874, 0.5687705688476562, 0.5725225219726563, 0.5694412841796875, 0.571989013671875, 0.5705431518554688, 0.5712425537109375, 0.570162109375, 0.5729093627929688, 0.5713226318359375, 0.5698079833984375, 0.5700780639648437, 0.5796884765625, 0.5877534790039063, 0.5851729736328125, 0.580455810546875, 0.58037060546875, 0.5841103515625, 0.580898681640625, 0.5827156982421875, 0.583486572265625, 0.58699267578125, 0.5804414672851562, 0.5786997680664062, 0.5815316772460938, 0.5797263793945312, 0.5802315673828125, 0.5808748168945312]",tokens/s,1.7286693730973486,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.13 GiB is free. Process 503648 has 13.61 GiB memory in use. Of the allocated memory 13.36 GiB is allocated by PyTorch, and 136.95 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,2579.156992,11834.097664,0.0,11431.575552,10953.091072,s,1,21.489787109375,21.489787109375,0.0,21.489787109375,21.489787109375,21.489787109375,21.489787109375,[21.489787109375],,kWh,0.00041744365853752847,4.603984377980366e-05,0.0001425434473680265,0.0006060269496853586,,MB,1952.227328,12729.581568,0.0,12314.476544,11624.13056,s,10,17.760789184570314,1.7760789184570314,0.0060556263847267875,1.7775396118164062,1.7814933837890625,1.782499072265625,1.783303623046875,"[1.76168359375, 1.7699195556640626, 1.7744102783203124, 1.7750592041015625, 1.7766231689453125, 1.7784560546875, 1.7835047607421874, 1.7812698974609376, 1.779941162109375, 1.7799215087890625]",tokens/s,144.137739229741,kWh,5.171655630459099e-05,5.703948240591926e-06,3.4323221902998746e-05,9.174372644818166e-05,tokens/kWh,2790381.532459257,MB,1956.462592,12731.67872,0.0,12316.573696,11624.13312,s,10,87.72146289062499,8.7721462890625,0.014501294596527442,8.77722802734375,8.784294921875,8.78789404296875,8.79077333984375,"[8.7426259765625, 8.7536796875, 8.76289453125, 8.7676015625, 8.77509375, 8.7793623046875, 8.7817734375, 8.783443359375, 8.7834951171875, 8.7914931640625]",tokens/s,7.181822774496042,kWh,0.0002566548709870752,2.8310979856839396e-05,0.00017036760851620313,0.00045533345936011775,tokens/kWh,138360.1374002565,,s,630,87.71783770751954,0.13923466302780876,0.0016834078389563363,0.13897373962402343,0.14031893615722657,0.14069376831054686,0.14955424789428715,"[0.1513657989501953, 0.13741679382324218, 0.13742463684082032, 0.13753254699707032, 0.13612144470214843, 0.13835650634765626, 0.13834854125976562, 0.14015692138671876, 0.1386453094482422, 0.1378572235107422, 0.13771136474609375, 0.137138427734375, 0.13800556945800782, 0.13867471313476562, 0.13959408569335938, 0.1386018524169922, 0.137989990234375, 0.1381012725830078, 0.13676576232910156, 0.13871717834472655, 0.13928445434570313, 0.13857589721679686, 0.1385861053466797, 0.1382092742919922, 0.13801455688476563, 0.13771559143066406, 0.13799066162109375, 0.14001239013671876, 0.1394819793701172, 0.13869882202148437, 0.1381273651123047, 0.13798927307128905, 0.13815689086914062, 0.1382657928466797, 0.1386565704345703, 0.13897727966308593, 0.14031170654296876, 0.13928533935546875, 0.13826252746582032, 0.138088134765625, 0.1381414031982422, 0.13869676208496093, 0.13832269287109375, 0.13961958312988282, 0.14013699340820313, 0.13960829162597657, 0.13955606079101562, 0.13812998962402342, 0.13823344421386718, 0.13880975341796875, 0.1386038055419922, 0.13933641052246093, 0.13846322631835936, 0.1388903045654297, 0.13923951721191405, 0.13949583435058593, 0.13962080383300782, 0.13793878173828125, 0.13935145568847657, 0.13877293395996093, 0.13886904907226563, 0.1387676544189453, 0.13845539855957031, 0.15045248413085938, 0.1368924102783203, 0.13760275268554686, 0.1385004119873047, 0.138390625, 0.13772598266601563, 0.13819584655761719, 0.14014675903320312, 0.1373365478515625, 0.13777874755859376, 0.1386851501464844, 0.13911862182617188, 0.13844061279296874, 0.13805331420898437, 0.13979254150390624, 0.13792271423339844, 0.13778732299804688, 0.13798733520507814, 0.13953216552734374, 0.1393753662109375, 0.1380405731201172, 0.13930181884765624, 0.13824205017089844, 0.1382412109375, 0.138001220703125, 0.13858406066894532, 0.13892166137695314, 0.13925587463378905, 0.13989071655273438, 0.13867555236816406, 0.13786988830566407, 0.1385142364501953, 0.13871359252929688, 0.13822505187988282, 0.13848335266113282, 0.13948204040527343, 0.14015225219726563, 0.13949929809570313, 0.13846333312988282, 0.13834223937988283, 0.13861360168457032, 0.13832806396484376, 0.13881753540039063, 0.13913658142089844, 0.13886508178710938, 0.13896092224121093, 0.13960188293457032, 0.1403330535888672, 0.1391595458984375, 0.13827276611328124, 0.13945651245117188, 0.13866598510742187, 0.1385533447265625, 0.13868646240234375, 0.13881753540039063, 0.13887481689453124, 0.13981497192382814, 0.14017532348632813, 0.14045558166503908, 0.1386049346923828, 0.1386639404296875, 0.13899507141113282, 0.1388797149658203, 0.1488650817871094, 0.13808956909179687, 0.1377676544189453, 0.13712294006347656, 0.13710450744628908, 0.1380289306640625, 0.13959475708007812, 0.141765625, 0.14000086975097656, 0.1378741455078125, 0.13776351928710937, 0.13721401977539063, 0.13765728759765625, 0.13860009765625, 0.1405730285644531, 0.1398594512939453, 0.13911875915527344, 0.139184326171875, 0.13796092224121093, 0.13782806396484376, 0.13794813537597655, 0.1397139892578125, 0.1384368896484375, 0.13908201599121095, 0.13787855529785156, 0.13911692810058593, 0.13921749877929687, 0.13798403930664063, 0.1388523254394531, 0.13932655334472657, 0.1384602813720703, 0.13844866943359374, 0.13843455505371094, 0.1386348114013672, 0.13926982116699219, 0.14016793823242188, 0.13963250732421875, 0.13911497497558595, 0.13901132202148436, 0.13822198486328124, 0.13893145751953126, 0.1386155548095703, 0.1389926452636719, 0.1389261779785156, 0.14016195678710938, 0.13975552368164063, 0.1395404815673828, 0.13896908569335936, 0.138802490234375, 0.13946482849121095, 0.13896966552734374, 0.1390991668701172, 0.13873866271972657, 0.1389240264892578, 0.13842556762695313, 0.13987078857421875, 0.14029437255859376, 0.14018765258789062, 0.14022621154785156, 0.1390627899169922, 0.1388982696533203, 0.13916160583496093, 0.13960089111328125, 0.14979624938964844, 0.13794473266601562, 0.13776797485351563, 0.13776885986328125, 0.1377230987548828, 0.13724716186523436, 0.13977389526367187, 0.141512451171875, 0.13894525146484374, 0.1385902099609375, 0.13811843872070312, 0.13659341430664063, 0.13769197082519533, 0.13931254577636717, 0.1402268829345703, 0.1392211151123047, 0.13873301696777343, 0.14028854370117189, 0.13801461791992187, 0.13813360595703125, 0.13815731811523438, 0.13846102905273439, 0.1393468475341797, 0.13810073852539062, 0.13896424865722656, 0.13951206970214844, 0.13929029846191407, 0.1380227813720703, 0.13908265686035157, 0.13947203063964844, 0.13830230712890626, 0.13902029418945314, 0.13830348205566406, 0.13832803344726563, 0.1387596435546875, 0.14009117126464843, 0.1403185272216797, 0.13964317321777345, 0.13933401489257813, 0.13930645751953125, 0.1388306884765625, 0.13873756408691407, 0.13862716674804687, 0.13906031799316407, 0.13892620849609374, 0.13891253662109376, 0.14088397216796875, 0.13924713134765626, 0.139315673828125, 0.13914111328125, 0.13984880065917968, 0.1389430694580078, 0.13897760009765625, 0.13959190368652344, 0.13835433959960938, 0.13846453857421875, 0.13958950805664064, 0.13993795776367188, 0.14025811767578125, 0.1406625213623047, 0.1402554931640625, 0.13878477478027343, 0.13864883422851562, 0.14896176147460938, 0.13857395935058595, 0.13935411071777343, 0.13782826232910156, 0.13786880493164064, 0.13809132385253906, 0.13859432983398437, 0.14042710876464845, 0.13831890869140626, 0.1384949188232422, 0.13937577819824218, 0.13921340942382812, 0.13786947631835939, 0.1385319366455078, 0.13984623718261718, 0.13807852172851562, 0.13856973266601563, 0.1382782745361328, 0.1390226593017578, 0.13936262512207032, 0.1387554931640625, 0.13962710571289064, 0.13877658081054686, 0.13883917236328125, 0.13843341064453124, 0.13857157897949218, 0.1385121307373047, 0.13961465454101563, 0.14063320922851563, 0.1406472930908203, 0.1391656036376953, 0.13873263549804687, 0.13880419921875, 0.138350341796875, 0.1389550476074219, 0.13914956665039063, 0.13845887756347655, 0.13978182983398438, 0.13972032165527343, 0.14025593566894531, 0.13927401733398437, 0.13884979248046875, 0.13930268859863282, 0.13903890991210938, 0.13957606506347656, 0.1389629364013672, 0.13823794555664062, 0.1382554931640625, 0.13938368225097655, 0.1402677764892578, 0.14112506103515626, 0.14023735046386718, 0.13919952392578125, 0.13974771118164062, 0.13877212524414062, 0.13879779052734376, 0.13934498596191405, 0.1388573760986328, 0.13968995666503906, 0.1384141082763672, 0.1400975341796875, 0.1404407958984375, 0.140370361328125, 0.1524573059082031, 0.13808639526367186, 0.13782540893554687, 0.13669638061523437, 0.13905337524414063, 0.13910838317871094, 0.13928445434570313, 0.141955078125, 0.13859373474121095, 0.13831178283691406, 0.13781455993652345, 0.13673175048828126, 0.1389433288574219, 0.14037632751464843, 0.14149594116210937, 0.13998297119140626, 0.13821449279785156, 0.13810719299316407, 0.1384185028076172, 0.13819549560546876, 0.1387220458984375, 0.1399797821044922, 0.1395648956298828, 0.13848591613769531, 0.13951181030273438, 0.1395793914794922, 0.13806787109375, 0.13924566650390624, 0.1388748779296875, 0.14040885925292967, 0.13829347229003905, 0.13886370849609375, 0.13828160095214845, 0.13853904724121094, 0.1400647735595703, 0.1406402587890625, 0.14026751708984375, 0.13814694213867187, 0.13939596557617187, 0.13889878845214843, 0.13831642150878906, 0.13886671447753907, 0.1396553955078125, 0.13830490112304689, 0.13958186340332032, 0.13959971618652345, 0.14013250732421875, 0.139831298828125, 0.1390921630859375, 0.13993260192871093, 0.13977894592285156, 0.13903654479980468, 0.13957720947265626, 0.1384163818359375, 0.13856358337402344, 0.13860560607910155, 0.13984591674804686, 0.13961494445800782, 0.13979644775390626, 0.14036515808105468, 0.1405569610595703, 0.13913906860351563, 0.13890255737304688, 0.1515397186279297, 0.13810064697265625, 0.13786944580078125, 0.13787747192382813, 0.1365707550048828, 0.13868453979492187, 0.14083587646484375, 0.14222227478027344, 0.14046188354492187, 0.13867645263671874, 0.13841203308105468, 0.13794216918945312, 0.13776300048828125, 0.13867805480957032, 0.13964306640625, 0.14025593566894531, 0.13808221435546875, 0.13956048583984376, 0.13979656982421876, 0.1385141143798828, 0.13815660095214843, 0.13955708312988283, 0.14019715881347655, 0.13835324096679688, 0.13837673950195312, 0.1384003448486328, 0.13802680969238282, 0.13976800537109374, 0.14059519958496094, 0.14086553955078124, 0.14007501220703125, 0.13866188049316405, 0.138399169921875, 0.13862294006347656, 0.13966397094726563, 0.13888214111328126, 0.1395393524169922, 0.13814070129394532, 0.13944834899902345, 0.13893321228027344, 0.13979440307617189, 0.1403304901123047, 0.1400831298828125, 0.13969168090820314, 0.13872630310058592, 0.1395589141845703, 0.13878253173828126, 0.13876243591308593, 0.13889741516113283, 0.13958096313476562, 0.13843641662597655, 0.14009590148925782, 0.13988275146484375, 0.14018963623046876, 0.1404539489746094, 0.13986611938476562, 0.13907148742675782, 0.13915122985839845, 0.13967167663574218, 0.13916093444824218, 0.13879986572265626, 0.1397349395751953, 0.13852671813964842, 0.1488585968017578, 0.1393487091064453, 0.1387130889892578, 0.13855516052246095, 0.13679837036132814, 0.13812339782714844, 0.1394707794189453, 0.14082400512695312, 0.13853138732910156, 0.13918617248535156, 0.13952000427246095, 0.13939712524414063, 0.1381908416748047, 0.13831336975097655, 0.13958998107910156, 0.1392205810546875, 0.1386495361328125, 0.13789231872558594, 0.13864486694335937, 0.13873625183105467, 0.1400259552001953, 0.1405193328857422, 0.13917312622070313, 0.13856822204589844, 0.13873568725585939, 0.1388193664550781, 0.13870460510253907, 0.13805973815917968, 0.13994668579101563, 0.13854867553710937, 0.14013449096679687, 0.1398561553955078, 0.1401919403076172, 0.14005862426757812, 0.13885440063476562, 0.13939712524414063, 0.13944204711914063, 0.13890879821777344, 0.13903564453125, 0.1385791015625, 0.13882028198242188, 0.13877468872070312, 0.1389486083984375, 0.14069964599609375, 0.14071942138671875, 0.1404354248046875, 0.14032261657714845, 0.13882672119140624, 0.13941506958007813, 0.138621337890625, 0.13970538330078125, 0.13935081481933595, 0.13914682006835938, 0.13906565856933595, 0.13897145080566406, 0.1391060791015625, 0.14037014770507814, 0.14078976440429689, 0.14101298522949218, 0.13994125366210938, 0.13901414489746095, 0.13950425720214843, 0.13939418029785156, 0.14991375732421874, 0.13771475219726562, 0.1379839630126953, 0.13849903869628907, 0.1392062683105469, 0.13787110900878907, 0.1396844787597656, 0.14060850524902344, 0.13845196533203125, 0.13787545776367188, 0.13787135314941407, 0.13780377197265625, 0.13870182800292968, 0.14143539428710938, 0.14059510803222655, 0.13994192504882813, 0.13852525329589843, 0.138994873046875, 0.1379431610107422, 0.13842044067382814, 0.13978466796875, 0.13946421813964843, 0.13830192565917968, 0.1393408660888672, 0.14010610961914063, 0.1396956787109375, 0.13961296081542968, 0.1392845458984375, 0.13931126403808594, 0.1387981719970703, 0.13863836669921875, 0.139229248046875, 0.13814151000976563, 0.13940531921386717, 0.139509765625, 0.13997177124023438, 0.13988873291015624, 0.139115234375, 0.1395072326660156, 0.13886451721191406, 0.13967379760742188, 0.13947724914550783, 0.13931741333007813, 0.1382010955810547, 0.13825843811035157, 0.13974322509765624, 0.13950944519042968, 0.1402674560546875, 0.1401696014404297, 0.14068658447265625, 0.14026205444335937, 0.13893836975097656, 0.13950985717773437, 0.13900563049316406, 0.13956536865234376, 0.139472900390625, 0.13944178771972657, 0.13941183471679688, 0.13818255615234376, 0.13871728515625, 0.14045561218261718, 0.14001756286621095, 0.14081884765625, 0.15143638610839844, 0.13762637329101562, 0.13774464416503907, 0.1380248565673828, 0.13806124877929687, 0.13823033142089844, 0.14042726135253905, 0.14280294799804688, 0.13912797546386718, 0.13946896362304687, 0.13836972045898438, 0.13777891540527343, 0.1380264892578125, 0.1389649658203125, 0.14060832214355468, 0.1390464324951172, 0.13840147399902344, 0.13897602844238283, 0.13922303771972655, 0.13892134094238281, 0.13914358520507814, 0.13980694580078126, 0.1393192901611328, 0.13859564208984376, 0.13863186645507813, 0.13836892700195313, 0.1386367645263672, 0.13990567016601563, 0.13995401000976562, 0.14053312683105468, 0.1407490234375, 0.1396837158203125, 0.13867196655273437, 0.1385705871582031, 0.13954252624511718, 0.13834597778320312, 0.1401308135986328, 0.13872889709472655, 0.13931167602539063, 0.138461181640625, 0.13946588134765625, 0.1410650634765625, 0.14023382568359374, 0.14072618103027343, 0.14018844604492187, 0.1400506591796875, 0.13893571472167968, 0.13873799133300782, 0.13969847106933594, 0.13843455505371094, 0.14024873352050782, 0.13867613220214844, 0.1397457275390625, 0.13850621032714844, 0.1393290557861328, 0.1410380859375, 0.14031622314453124, 0.14093913269042968, 0.140546630859375, 0.140328125, 0.13906207275390625, 0.13956300354003906, 0.1389563446044922]",tokens/s,7.182119583255458,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1173, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 68.12 MiB is free. Process 506607 has 14.67 GiB memory in use. Of the allocated memory 14.27 GiB is allocated by PyTorch, and 293.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1173, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 68.12 MiB is free. Process 510136 has 14.67 GiB memory in use. Of the allocated memory 14.27 GiB is allocated by PyTorch, and 293.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,2581.417984,11834.097664,0.0,11431.575552,10953.091072,s,1,22.038072265625,22.038072265625,0.0,22.038072265625,22.038072265625,22.038072265625,22.038072265625,[22.038072265625],,kWh,0.00042748746734170407,4.7139385525164666e-05,0.00014435650437399916,0.0006189833572408679,,MB,1956.937728,12729.581568,0.0,12314.476544,11624.261632,s,10,17.879141845703124,1.7879141845703124,0.007203075157415105,1.7909700927734375,1.7946715698242188,1.7948674011230468,1.7950240661621093,"[1.7714844970703125, 1.7802391357421874, 1.7827747802734375, 1.7874039306640626, 1.7922506103515625, 1.7908424072265625, 1.7910977783203126, 1.795063232421875, 1.793357421875, 1.7946280517578126]",tokens/s,143.18360590753085,kWh,5.206608552249236e-05,5.742504420288218e-06,3.457108321239799e-05,9.237967315517856e-05,tokens/kWh,2771172.393844406,MB,1961.066496,12733.775872,0.0,12316.573696,11624.264192,s,10,88.398681640625,8.8398681640625,0.025004639656599704,8.84019140625,8.86935732421875,8.871352001953126,8.872947744140626,"[8.79255859375, 8.81256640625, 8.8213994140625, 8.8374541015625, 8.83241796875, 8.8429287109375, 8.84868359375, 8.8689140625, 8.868412109375, 8.8733466796875]",tokens/s,7.1268031186392,kWh,0.0002589350490825048,2.856165963790862e-05,0.00017160644284059943,0.00045910315156101286,tokens/kWh,137224.0634502104,,s,630,88.39461944580069,0.14030891975523935,0.001782069205585883,0.14012771606445312,0.14150240936279299,0.14180280532836914,0.15077528091430664,"[0.15087046813964844, 0.13861068725585937, 0.13889126586914063, 0.13882981872558595, 0.13805078125, 0.1374463348388672, 0.13832127380371093, 0.1399566650390625, 0.138418212890625, 0.1392025604248047, 0.1391545867919922, 0.1396580810546875, 0.1399352264404297, 0.1389913330078125, 0.1382223663330078, 0.13823385620117187, 0.1393270721435547, 0.13798851013183594, 0.13946873474121094, 0.138055419921875, 0.1402227783203125, 0.13989273071289063, 0.1399596405029297, 0.14004019165039064, 0.1378857879638672, 0.13781584167480468, 0.13883062744140626, 0.1390592041015625, 0.14004019165039064, 0.13837677001953125, 0.1383315887451172, 0.1403504638671875, 0.14015078735351563, 0.14026275634765625, 0.14039517211914063, 0.1396321258544922, 0.13918258666992187, 0.13901750183105469, 0.13922991943359375, 0.1390849609375, 0.1392955780029297, 0.13972889709472655, 0.13961187744140624, 0.1390676727294922, 0.1400252227783203, 0.13959231567382813, 0.14006271362304687, 0.14058905029296875, 0.14045709228515624, 0.14134361267089843, 0.14064559936523438, 0.1401741485595703, 0.13927334594726562, 0.13931318664550782, 0.1402596435546875, 0.1395758056640625, 0.13940531921386717, 0.1399910430908203, 0.1398497314453125, 0.13955686950683593, 0.13968112182617187, 0.13936412048339844, 0.13985008239746094, 0.15040921020507814, 0.13888652038574217, 0.13870294189453125, 0.13811561584472656, 0.1373306884765625, 0.1378280029296875, 0.14056614685058594, 0.14200904846191406, 0.13918809509277344, 0.13870704650878907, 0.13998883056640626, 0.13987245178222657, 0.13908554077148438, 0.13836431884765624, 0.139989501953125, 0.13982144165039062, 0.1387704315185547, 0.13879910278320312, 0.139502685546875, 0.13836790466308593, 0.13968083190917968, 0.13983570861816405, 0.1399055633544922, 0.14034544372558594, 0.14036991882324218, 0.14030029296875, 0.139228515625, 0.13914793395996095, 0.13949542236328125, 0.13984153747558595, 0.1405849609375, 0.13873484802246094, 0.13905381774902345, 0.1392756805419922, 0.1392195587158203, 0.13925375366210938, 0.14046592712402345, 0.1404970245361328, 0.13958969116210937, 0.1402163848876953, 0.1405001983642578, 0.14023554992675782, 0.1400361022949219, 0.13994189453125, 0.13993778991699218, 0.13970223999023437, 0.1389911651611328, 0.13978671264648437, 0.13931663513183593, 0.13940386962890625, 0.13958694458007812, 0.139217529296875, 0.14038572692871093, 0.13940611267089845, 0.14050282287597657, 0.1396140441894531, 0.140094970703125, 0.14124099731445314, 0.14064183044433592, 0.14144688415527343, 0.14044441223144533, 0.14155491638183593, 0.14085200500488282, 0.150542236328125, 0.13778147888183595, 0.13799945068359376, 0.1383365478515625, 0.13813352966308592, 0.13817904663085936, 0.14098431396484376, 0.1428255615234375, 0.13850758361816407, 0.13997731018066406, 0.1400581512451172, 0.13993589782714844, 0.1389734344482422, 0.13985920715332031, 0.1413330535888672, 0.13899591064453126, 0.13886390686035155, 0.13936924743652343, 0.13915306091308594, 0.13894227600097656, 0.1394835205078125, 0.14012637329101563, 0.139325439453125, 0.13969398498535157, 0.14052313232421876, 0.14028233337402343, 0.14032815551757813, 0.14012905883789062, 0.1397760009765625, 0.1405254364013672, 0.13958114624023438, 0.13949378967285156, 0.13905436706542967, 0.13945417785644532, 0.13924864196777345, 0.13976976013183592, 0.13901551818847657, 0.13916851806640626, 0.14096333312988282, 0.1395362548828125, 0.13951388549804689, 0.14008380126953124, 0.14107443237304687, 0.14099046325683592, 0.1405460510253906, 0.14152662658691406, 0.1406591033935547, 0.14135501098632813, 0.14061363220214843, 0.14053375244140626, 0.14053520202636718, 0.14029884338378906, 0.13926748657226562, 0.13934652709960937, 0.14135501098632813, 0.13954867553710937, 0.13935411071777343, 0.13930848693847656, 0.14096978759765624, 0.13910870361328126, 0.1399685821533203, 0.14079830932617188, 0.14002496337890624, 0.1523811798095703, 0.13879936218261718, 0.13842227172851562, 0.1378383331298828, 0.13776716613769532, 0.1394298858642578, 0.14144000244140625, 0.1434337615966797, 0.1399869384765625, 0.1401636199951172, 0.14031507873535157, 0.1395281982421875, 0.13764300537109375, 0.1400432586669922, 0.14032077026367187, 0.14007635498046875, 0.13909625244140625, 0.139420166015625, 0.13913497924804688, 0.13841407775878906, 0.13951181030273438, 0.1406750793457031, 0.1406730194091797, 0.13938482666015625, 0.1397139892578125, 0.14054972839355467, 0.14046908569335936, 0.14048268127441407, 0.14176815795898437, 0.14116099548339844, 0.14143621826171876, 0.1410707550048828, 0.14002348327636718, 0.13925753784179687, 0.13917010498046875, 0.1392403259277344, 0.14034506225585938, 0.14016511535644532, 0.14043341064453124, 0.13945242309570313, 0.13920451354980468, 0.1396320037841797, 0.13982940673828126, 0.1399381103515625, 0.14092518615722657, 0.13959986877441405, 0.13986405944824218, 0.13970822143554687, 0.14017555236816406, 0.1407344665527344, 0.1412157440185547, 0.14160076904296875, 0.14098751831054687, 0.14058790588378905, 0.14067453002929686, 0.1406992645263672, 0.1414393310546875, 0.140831298828125, 0.14083071899414062, 0.14027162170410157, 0.13994598388671875, 0.13993983459472656, 0.1398065948486328, 0.15112393188476564, 0.13979443359375, 0.13999494934082032, 0.1391351623535156, 0.13901542663574218, 0.1395224914550781, 0.13976608276367186, 0.14099635314941406, 0.13925820922851562, 0.13911581420898436, 0.1392659912109375, 0.13807232666015626, 0.13940780639648437, 0.1410694122314453, 0.14083573913574218, 0.1400094757080078, 0.13963673400878907, 0.14050656127929687, 0.14022099304199218, 0.1393561248779297, 0.1394173126220703, 0.14071391296386718, 0.1402136993408203, 0.13896751403808594, 0.13941194152832032, 0.13966744995117186, 0.13927439880371092, 0.13950352478027345, 0.14064437866210938, 0.13912669372558595, 0.13903666687011718, 0.1409220733642578, 0.13905386352539062, 0.13984153747558595, 0.14132415771484375, 0.14097946166992187, 0.14100364685058595, 0.14086349487304686, 0.14188893127441407, 0.1403948516845703, 0.14123382568359374, 0.1410108184814453, 0.14059983825683595, 0.14033331298828125, 0.13973635864257813, 0.14081295776367186, 0.13981695556640625, 0.13965516662597657, 0.13929676818847656, 0.13923532104492187, 0.13927218627929688, 0.14163352966308593, 0.13992140197753905, 0.13938893127441407, 0.13937049865722656, 0.14076518249511719, 0.13900595092773438, 0.14018559265136718, 0.14140211486816406, 0.13989424133300782, 0.13941404724121093, 0.1395916748046875, 0.14208963012695314, 0.15296456909179687, 0.1378389129638672, 0.13910044860839843, 0.13832992553710938, 0.1388522491455078, 0.1390594940185547, 0.14188143920898438, 0.14239289855957032, 0.13902847290039064, 0.14062173461914063, 0.14030892944335938, 0.13985792541503905, 0.13882156372070312, 0.13982687377929687, 0.14139840698242187, 0.1396879425048828, 0.1390795593261719, 0.13948243713378905, 0.13942025756835938, 0.13896316528320313, 0.1396183319091797, 0.14080828857421876, 0.1392720642089844, 0.14029209899902345, 0.13915913391113283, 0.13985218811035155, 0.14018559265136718, 0.14212095642089845, 0.14153424072265625, 0.14054066467285156, 0.1411168670654297, 0.14019459533691406, 0.1407130584716797, 0.1402541046142578, 0.13967564392089843, 0.14118911743164062, 0.13922303771972655, 0.14041497802734376, 0.1391730499267578, 0.139171875, 0.14053045654296875, 0.139374267578125, 0.1411300506591797, 0.13942579650878906, 0.14069119262695312, 0.1393740234375, 0.13978297424316405, 0.1397821502685547, 0.1398497314453125, 0.14013174438476564, 0.1404442596435547, 0.13993983459472656, 0.139885986328125, 0.14063206481933593, 0.1407484130859375, 0.14022959899902343, 0.14237440490722655, 0.14104339599609375, 0.14081455993652345, 0.14113034057617188, 0.14088108825683593, 0.1414742431640625, 0.1414633026123047, 0.15016131591796875, 0.1390224914550781, 0.1389160614013672, 0.13953030395507812, 0.14034498596191405, 0.14003427124023438, 0.14128463745117187, 0.1420173797607422, 0.14012582397460938, 0.13960572814941405, 0.13903654479980468, 0.13917190551757813, 0.1394958038330078, 0.14020028686523436, 0.14024697875976563, 0.1397104034423828, 0.13932505798339845, 0.13885081481933595, 0.13896041870117187, 0.13951618957519532, 0.14134690856933593, 0.14005433654785157, 0.14100889587402343, 0.13972714233398437, 0.14049894714355468, 0.1406440887451172, 0.14110540771484376, 0.14156297302246093, 0.14102006530761718, 0.1422274627685547, 0.14062387084960937, 0.14091059875488282, 0.14118502807617186, 0.14048233032226562, 0.14096202087402343, 0.14005453491210937, 0.1411112976074219, 0.13930905151367187, 0.13971046447753907, 0.13944137573242188, 0.14013459777832032, 0.13968649291992188, 0.1408139190673828, 0.13959536743164064, 0.14071682739257813, 0.13981289672851563, 0.14004371643066407, 0.13917628479003907, 0.13939324951171875, 0.14105804443359374, 0.13976576232910157, 0.1408690185546875, 0.13976226806640624, 0.13982925415039063, 0.1398663635253906, 0.1408570861816406, 0.14143487548828124, 0.14085939025878907, 0.14090194702148437, 0.14130426025390624, 0.14108262634277344, 0.14101866149902345, 0.14176492309570313, 0.152748291015625, 0.13876634216308595, 0.13887283325195313, 0.13804135131835937, 0.13905509948730468, 0.13937869262695313, 0.14096588134765625, 0.14309896850585938, 0.1393079376220703, 0.1406054382324219, 0.14039039611816406, 0.14027769470214843, 0.14014041137695313, 0.13967289733886717, 0.1415701141357422, 0.139150146484375, 0.139177978515625, 0.13891789245605468, 0.1388687286376953, 0.13958047485351563, 0.13969891357421876, 0.1408555145263672, 0.13935186767578125, 0.13996226501464842, 0.13893177795410155, 0.1392073974609375, 0.14028125, 0.14046678161621093, 0.1409434509277344, 0.14051113891601563, 0.14108822631835938, 0.14064259338378907, 0.14044390869140624, 0.14115367126464845, 0.14152304077148437, 0.14162998962402343, 0.14126214599609374, 0.14148060607910157, 0.14073968505859374, 0.14141439819335938, 0.14078643798828125, 0.14152723693847657, 0.14137452697753905, 0.14128633117675782, 0.14147174072265625, 0.14081024169921874, 0.1413093719482422, 0.14101560974121094, 0.1416110076904297, 0.1416048583984375, 0.14071807861328126, 0.14179942321777345, 0.14150857543945314, 0.14065206909179687, 0.14140162658691408, 0.1402767028808594, 0.14168003845214844, 0.1410484161376953, 0.14191987609863282, 0.1408264923095703, 0.14166604614257813, 0.14095993041992189, 0.14102581787109375, 0.1532072296142578, 0.13788160705566407, 0.13892131042480468, 0.13778807067871093, 0.13923968505859374, 0.1396131134033203, 0.14271151733398438, 0.14278640747070312, 0.13934556579589844, 0.14071229553222656, 0.14024140930175782, 0.14066213989257811, 0.14023309326171876, 0.14023458862304689, 0.1414569854736328, 0.14080876159667968, 0.1394503631591797, 0.14017330932617186, 0.13906124877929688, 0.138987548828125, 0.13952838134765624, 0.14146333312988282, 0.1407344665527344, 0.13917942810058595, 0.13900364685058594, 0.1405095977783203, 0.138936767578125, 0.13958253479003907, 0.14046234130859375, 0.14074928283691407, 0.13938525390625, 0.13897244262695313, 0.14060188293457032, 0.1393474884033203, 0.14138726806640625, 0.14132118225097656, 0.14120755004882812, 0.14078697204589843, 0.14065122985839842, 0.14073968505859374, 0.14168515014648436, 0.14128524780273438, 0.1416171569824219, 0.14173878479003907, 0.14113075256347657, 0.14080873107910155, 0.1411095733642578, 0.14175436401367186, 0.14112358093261718, 0.14180557250976564, 0.14150172424316407, 0.14144735717773438, 0.14053596496582033, 0.141552001953125, 0.14067097473144533, 0.14140620422363281, 0.14142774963378907, 0.14141743469238283, 0.14080819702148437, 0.14095974731445313, 0.1416378936767578, 0.1414817352294922, 0.1410355224609375, 0.15311666870117188, 0.13907968139648438, 0.13908934020996094, 0.13766697692871094, 0.13812742614746093, 0.13887820434570314, 0.14217507934570311, 0.1428705291748047, 0.13996397399902344, 0.1395676727294922, 0.13890296936035157, 0.1400549774169922, 0.14024266052246093, 0.14099459838867187, 0.1422461395263672, 0.14131007385253908, 0.14132620239257812, 0.14073397827148437, 0.13974937438964843, 0.13920509338378906, 0.14056959533691407, 0.1406652526855469, 0.14018528747558595, 0.1389700164794922, 0.14048367309570312, 0.13947177124023438, 0.13865731811523438, 0.1407996826171875, 0.14068815612792968, 0.140525146484375, 0.13977845764160157, 0.13957711791992186, 0.1397557373046875, 0.13946070861816406, 0.1409041290283203, 0.14106236267089844, 0.140555908203125, 0.14101744079589842, 0.14105807495117187, 0.14015283203125, 0.1414605712890625, 0.14075926208496095, 0.1423197784423828, 0.1414110412597656, 0.14074765014648438, 0.14126585388183593, 0.14130108642578126, 0.14167872619628907, 0.14158050537109376, 0.1415367431640625, 0.14123619079589844, 0.14163034057617188, 0.1411312255859375, 0.14158038330078124, 0.141623291015625, 0.14153750610351562, 0.14147401428222656, 0.14126054382324219, 0.1410970916748047, 0.14206605529785157, 0.14146124267578125, 0.14191206359863281, 0.1411788787841797]",tokens/s,7.127130632496074,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.13 GiB is free. Process 565446 has 13.61 GiB memory in use. Of the allocated memory 13.36 GiB is allocated by PyTorch, and 136.95 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 272.12 MiB is free. Process 571961 has 14.47 GiB memory in use. Of the allocated memory 14.35 GiB is allocated by PyTorch, and 13.83 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1879.048192,2906.5216,0.0,2503.999488,2349.010944,s,1,9.9702880859375,9.9702880859375,0.0,9.9702880859375,9.9702880859375,9.9702880859375,9.9702880859375,[9.9702880859375],,kWh,8.182413306662965e-05,9.018321933508537e-06,2.7256132916014808e-05,0.000118098587916153,,MB,1972.150272,3317.563392,0.0,2902.458368,2642.300928,s,10,1.940512512207031,0.19405125122070313,0.0009623634848450119,0.19400440216064452,0.19519012908935549,0.19550483932495116,0.19575660751342772,"[0.19239990234375, 0.1930158386230469, 0.1936974334716797, 0.19379478454589844, 0.19581954956054687, 0.19335833740234376, 0.1945977020263672, 0.19512019348144533, 0.19421401977539063, 0.1944947509765625]",tokens/s,1319.2391102329962,kWh,5.870382953165971e-06,6.473922958897195e-07,3.876803101440229e-06,1.0394578350495922e-05,tokens/kWh,24628223.615033537,MB,1992.00768,3317.563392,0.0,2902.458368,2642.303488,s,10,26.716282470703124,2.6716282470703123,0.008611752011998841,2.6693057861328127,2.6814554443359375,2.6838455200195312,2.6857575805664062,"[2.665792236328125, 2.686235595703125, 2.665062255859375, 2.6555732421875, 2.67845556640625, 2.68092431640625, 2.6691767578125, 2.667984130859375, 2.669434814453125, 2.6776435546875]",tokens/s,23.581125131868678,kWh,7.791380677266655e-05,8.59386178885768e-06,3.7849841390958374e-05,0.0001243575099524826,tokens/kWh,506603.90372943704,,s,630,26.71401330947876,0.042403195729331364,0.0006809825549955676,0.042255729675292966,0.042754551696777346,0.043198600196838376,0.045182664184570315,"[0.043084449768066406, 0.04283596801757812, 0.04228710556030273, 0.04220678329467773, 0.04233875274658203, 0.04280319976806641, 0.042342113494873046, 0.04257392120361328, 0.04231158447265625, 0.042277153015136716, 0.042161247253417966, 0.042068897247314455, 0.042090496063232424, 0.04208585739135742, 0.04195782470703125, 0.042258529663085936, 0.04221353530883789, 0.042192737579345704, 0.04222272109985352, 0.04213849639892578, 0.04207788848876953, 0.04216864013671875, 0.042057727813720705, 0.04203900909423828, 0.04220953750610352, 0.04253606414794922, 0.04251740646362305, 0.04207535934448242, 0.04229814529418945, 0.04233011245727539, 0.042633087158203124, 0.04213708877563477, 0.0421864013671875, 0.042363582611083986, 0.04233654403686524, 0.04212736129760742, 0.04219903945922852, 0.042035358428955075, 0.042038719177246095, 0.04234406280517578, 0.042094657897949216, 0.04204393768310547, 0.04215561676025391, 0.0419323844909668, 0.042551551818847656, 0.043031230926513675, 0.04204729461669922, 0.04210099029541016, 0.04253696060180664, 0.04215311813354492, 0.04223062515258789, 0.04212736129760742, 0.04218675231933594, 0.04211206436157226, 0.04338140869140625, 0.04258025741577148, 0.04243865585327149, 0.04245913696289062, 0.04267212677001953, 0.04239769744873047, 0.04248108673095703, 0.04219929504394531, 0.042496318817138674, 0.043202560424804685, 0.0425164794921875, 0.042338623046875, 0.04252809524536133, 0.04228364944458008, 0.04215526580810547, 0.042090976715087894, 0.0421305923461914, 0.04201968002319336, 0.04217440032958984, 0.04230355072021484, 0.04216134262084961, 0.041981952667236325, 0.04230227279663086, 0.04193894577026367, 0.04233216094970703, 0.04214080047607422, 0.04199718475341797, 0.04218044662475586, 0.04198758316040039, 0.04204816055297852, 0.042132736206054684, 0.042050239562988284, 0.04209875106811523, 0.04213145446777344, 0.042280960083007815, 0.04254627227783203, 0.04252355194091797, 0.042770111083984375, 0.04256966400146484, 0.042627681732177736, 0.04255926513671875, 0.042403839111328126, 0.04409292984008789, 0.05022297668457031, 0.04257036972045898, 0.04234239959716797, 0.042626815795898436, 0.04241843032836914, 0.04356710433959961, 0.04241372680664063, 0.04241443252563477, 0.042393600463867184, 0.042531841278076174, 0.04235776138305664, 0.04234169769287109, 0.0422279052734375, 0.042438751220703126, 0.042375328063964844, 0.04238713455200195, 0.04238348770141601, 0.04272991943359375, 0.042264575958251956, 0.04250419235229492, 0.04235673522949219, 0.042534782409667966, 0.042987648010253905, 0.04546899032592774, 0.04503340911865234, 0.04289632034301758, 0.04243145751953125, 0.04264432144165039, 0.04253696060180664, 0.04291177749633789, 0.04236223983764648, 0.04213734436035156, 0.042464126586914064, 0.04217446517944336, 0.04222969436645508, 0.04216223907470703, 0.042482879638671874, 0.042181438446044925, 0.04242335891723633, 0.042122177124023434, 0.04230704116821289, 0.04217628860473633, 0.04234112167358398, 0.04269055938720703, 0.042395648956298826, 0.04223497772216797, 0.04233308792114258, 0.042039295196533204, 0.04200457763671875, 0.04224169540405273, 0.04251206588745117, 0.04220755386352539, 0.0420552978515625, 0.0419027214050293, 0.04189155197143555, 0.04191875076293945, 0.04209868621826172, 0.0420904655456543, 0.04213967895507813, 0.042237632751464846, 0.0423570556640625, 0.04213555145263672, 0.04221091079711914, 0.04209091186523437, 0.042156032562255856, 0.042336254119873046, 0.042065921783447265, 0.042022560119628904, 0.04216582489013672, 0.04201724624633789, 0.04217206573486328, 0.04258588790893555, 0.0442479362487793, 0.04252057647705078, 0.04301567840576172, 0.0447044792175293, 0.04238025665283203, 0.042175296783447266, 0.042145790100097655, 0.04220108795166016, 0.04220927810668945, 0.04199568176269531, 0.04221980667114258, 0.04185443115234375, 0.042064735412597656, 0.04217446517944336, 0.04198604965209961, 0.04212275314331055, 0.04224460983276367, 0.04202678298950195, 0.042489566802978516, 0.042267135620117184, 0.04274761581420899, 0.042626911163330075, 0.042765857696533204, 0.04221625518798828, 0.04204483032226562, 0.04209916687011719, 0.04202707290649414, 0.04198329544067383, 0.042067840576171876, 0.04205807876586914, 0.04193539047241211, 0.04202668762207031, 0.04203376007080078, 0.04203084945678711, 0.04238735961914063, 0.04336032104492187, 0.042584064483642575, 0.042164222717285156, 0.04196278381347656, 0.04201071929931641, 0.04214863967895508, 0.0421899528503418, 0.04195199966430664, 0.042067710876464846, 0.04188387298583984, 0.04211865615844727, 0.04199628829956055, 0.0420766716003418, 0.04196966552734375, 0.041985023498535154, 0.04205619049072266, 0.04204800033569336, 0.04190345764160156, 0.0420107536315918, 0.04205007934570312, 0.04210451126098633, 0.04202070236206055, 0.04200086212158203, 0.0422806396484375, 0.04256595230102539, 0.042073760986328125, 0.04243865585327149, 0.042045505523681644, 0.042089855194091794, 0.04196239852905274, 0.042063232421875, 0.04192681503295898, 0.04212508773803711, 0.04218540954589844, 0.04233830261230469, 0.042022369384765626, 0.042207199096679686, 0.042144382476806644, 0.042114078521728514, 0.04213190460205078, 0.04204547119140625, 0.042118942260742184, 0.04223398590087891, 0.04225209426879883, 0.04206051254272461, 0.04207545471191406, 0.0420563850402832, 0.042074111938476565, 0.042748416900634766, 0.04215568161010742, 0.042248287200927735, 0.042493694305419924, 0.04234051132202148, 0.04270694351196289, 0.042401344299316406, 0.04229369735717774, 0.0424356803894043, 0.042159008026123046, 0.0426412467956543, 0.042219680786132814, 0.04223721694946289, 0.042698974609375, 0.04233881759643555, 0.042188800811767575, 0.04231577682495117, 0.04224361419677734, 0.04230377578735352, 0.04348320007324219, 0.044034175872802735, 0.04250124740600586, 0.04224291229248047, 0.042506271362304685, 0.04222771072387695, 0.04213718414306641, 0.04234428787231445, 0.042170047760009766, 0.04280745697021485, 0.042255073547363284, 0.04234854507446289, 0.04319375991821289, 0.042492511749267575, 0.04249507141113281, 0.04236175918579101, 0.0424787826538086, 0.04261356735229492, 0.04261273574829102, 0.042805023193359375, 0.04249212646484375, 0.04266972732543945, 0.04259443283081055, 0.04263711929321289, 0.04365337753295898, 0.04233232116699219, 0.04237292861938476, 0.0423873291015625, 0.04243283081054688, 0.04235580825805664, 0.04249283218383789, 0.042962944030761716, 0.042947967529296874, 0.0425682258605957, 0.04217865753173828, 0.04234454345703125, 0.042302913665771484, 0.04243657684326172, 0.04241254425048828, 0.04239974212646484, 0.042282016754150394, 0.04277142333984375, 0.04250534439086914, 0.04241049575805664, 0.04373932647705078, 0.04264726257324219, 0.042627391815185545, 0.042396255493164066, 0.04234239959716797, 0.0425860481262207, 0.04247353744506836, 0.04234444808959961, 0.04248166275024414, 0.04242233657836914, 0.04239487838745117, 0.04249055862426758, 0.0425164794921875, 0.04271926498413086, 0.04240790557861328, 0.042382816314697265, 0.04239414215087891, 0.04336825561523437, 0.04235084915161133, 0.042450496673583984, 0.042485279083251955, 0.04242076873779297, 0.04897324752807617, 0.04514828872680664, 0.04477849578857422, 0.042455806732177734, 0.0421420783996582, 0.04221152114868164, 0.04195372772216797, 0.04223590469360351, 0.04203641510009765, 0.04235260772705078, 0.04200534439086914, 0.04210671997070312, 0.041912479400634764, 0.04201612854003906, 0.04207475280761719, 0.04231577682495117, 0.0420494384765625, 0.04222166442871094, 0.04377119827270508, 0.04368454360961914, 0.042245311737060545, 0.042371902465820316, 0.042049537658691405, 0.04206387329101562, 0.04196895980834961, 0.04277113723754883, 0.04214169692993164, 0.042149185180664066, 0.042005119323730467, 0.0421253776550293, 0.04212736129760742, 0.04221699142456055, 0.04205206298828125, 0.0425799674987793, 0.04206396865844726, 0.04211088180541992, 0.041981952667236325, 0.0419736328125, 0.04221369552612304, 0.0421715202331543, 0.04242675018310547, 0.042815486907958986, 0.042138111114501955, 0.04237622451782227, 0.04258316802978516, 0.042141441345214845, 0.042090591430664064, 0.042159454345703125, 0.04203177642822266, 0.04485526275634766, 0.042342689514160155, 0.042180351257324215, 0.04211020660400391, 0.04680976104736328, 0.04209852981567383, 0.042070270538330075, 0.041918464660644535, 0.04224367904663086, 0.04202537536621094, 0.04229119873046875, 0.042205184936523435, 0.04210454559326172, 0.04217270278930664, 0.0421130256652832, 0.042074111938476565, 0.04235619354248047, 0.041978305816650394, 0.04241827011108398, 0.04207535934448242, 0.042646305084228515, 0.04190995025634765, 0.04207046508789063, 0.04184009552001953, 0.04189664077758789, 0.042127071380615236, 0.04210710525512695, 0.043595550537109375, 0.042074111938476565, 0.04203724670410156, 0.04221263885498047, 0.0420841293334961, 0.04239664077758789, 0.0419081916809082, 0.04230499267578125, 0.04227872085571289, 0.04214963150024414, 0.04220207977294922, 0.04223590469360351, 0.04219830322265625, 0.04211529541015625, 0.042592769622802736, 0.04232716751098633, 0.04593753433227539, 0.041924606323242186, 0.04192233657836914, 0.04193507385253906, 0.0417973747253418, 0.042020801544189454, 0.041904449462890625, 0.04255744171142578, 0.04198767852783203, 0.041949089050292966, 0.04256310272216797, 0.04236310577392578, 0.0433608627319336, 0.042471424102783206, 0.04210822296142578, 0.042354560852050784, 0.0423164176940918, 0.04210707092285156, 0.042584064483642575, 0.04224204635620117, 0.04207628631591797, 0.04209884643554687, 0.0421864013671875, 0.04222777557373047, 0.04221683120727539, 0.04230822372436523, 0.04239120101928711, 0.042110816955566406, 0.041935295104980466, 0.042018878936767576, 0.04181155014038086, 0.042086814880371096, 0.04194508743286133, 0.04197123336791992, 0.04197795104980469, 0.04207193756103516, 0.0424411506652832, 0.042530654907226566, 0.04207132720947265, 0.04222457504272461, 0.042075294494628906, 0.042320735931396486, 0.042223617553710936, 0.04233385467529297, 0.04223846435546875, 0.04218566513061523, 0.041980415344238284, 0.04210432052612305, 0.042074558258056644, 0.04222819137573242, 0.042405727386474606, 0.04219295883178711, 0.04191651153564453, 0.04220908737182617, 0.041940158843994144, 0.04235961532592773, 0.04240812683105469, 0.04230310440063476, 0.04236531066894531, 0.04271484756469727, 0.042754337310791014, 0.04295270538330078, 0.042444801330566405, 0.04252252960205078, 0.04215407943725586, 0.043861793518066405, 0.04244911956787109, 0.04296492767333984, 0.04233567810058594, 0.04368633651733399, 0.04262521743774414, 0.042487777709960935, 0.042506271362304685, 0.042635520935058596, 0.042530624389648435, 0.04291836929321289, 0.04234035110473633, 0.04302758407592774, 0.042369632720947265, 0.042369312286376956, 0.042417919158935544, 0.04238156890869141, 0.042256385803222656, 0.04250620651245117, 0.042227745056152344, 0.04226662445068359, 0.04213139343261719, 0.04236089706420899, 0.0421743049621582, 0.042390750885009765, 0.04256380844116211, 0.042380001068115236, 0.04249599838256836, 0.04268815994262695, 0.042289505004882814, 0.04229529571533203, 0.04230473709106446, 0.042275615692138675, 0.043031841278076174, 0.04258233642578125, 0.0420948486328125, 0.042633377075195315, 0.042083744049072266, 0.04225289535522461, 0.042065921783447265, 0.042608638763427735, 0.04217401504516602, 0.042535358428955075, 0.04231987380981445, 0.04219609451293945, 0.042087295532226565, 0.04212940979003906, 0.04196895980834961, 0.042033855438232424, 0.04187039947509766, 0.04202169418334961, 0.042385631561279294, 0.04213676834106445, 0.04192943954467773, 0.04202102279663086, 0.04199148941040039, 0.04207465744018555, 0.04194271850585937, 0.042041664123535154, 0.04194675064086914, 0.042756481170654295, 0.04188774490356445, 0.0420362548828125, 0.04181705474853516, 0.04219903945922852, 0.043937793731689455, 0.04323328018188476, 0.044609535217285154, 0.04255692672729492, 0.0431148796081543, 0.0423136978149414, 0.04199030303955078, 0.04216831970214844, 0.04279516983032226, 0.042213409423828126, 0.04211257553100586, 0.04255750274658203, 0.04223158264160156, 0.04242428970336914, 0.04231232070922852, 0.042442176818847654, 0.04283660888671875, 0.04245222473144531, 0.04234310531616211, 0.043096065521240234, 0.04281932830810547, 0.042299232482910155, 0.042154590606689454, 0.04225004959106445, 0.04212940979003906, 0.04241360092163086, 0.04215411376953125, 0.04234223937988281, 0.04244326400756836, 0.04265321731567383, 0.042229503631591794, 0.04250316619873047, 0.042092254638671875, 0.04221887969970703, 0.042449535369873045, 0.04246633529663086, 0.04225145721435547, 0.0424857292175293, 0.04228883361816406, 0.04234620666503906, 0.042146495819091793, 0.0422845458984375, 0.04208809661865234, 0.042617664337158204, 0.042288894653320315, 0.04232147216796875, 0.042129150390625, 0.04221817779541016, 0.04204076766967774, 0.04292256164550781, 0.04519670486450195, 0.04245155334472656, 0.04261254501342773, 0.04257369613647461, 0.04749372863769531, 0.04240969467163086, 0.04220927810668945, 0.042332000732421875, 0.042274974822998045, 0.042261505126953126, 0.04219910430908203, 0.04267718505859375, 0.04205158233642578, 0.042253726959228514, 0.04216649627685547, 0.042293792724609376, 0.042288928985595706, 0.04216636657714844, 0.042528736114501954, 0.042782718658447266, 0.042329761505126955]",tokens/s,23.58312817701791,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,2168.786944,11834.097664,0.0,11431.575552,10953.091072,s,1,21.6584453125,21.6584453125,0.0,21.6584453125,21.6584453125,21.6584453125,21.6584453125,[21.6584453125],,kWh,0.0004168932013166795,4.5979002514183226e-05,0.0001433762258120641,0.0006062484296429269,,MB,1647.276032,12729.581568,0.0,12314.476544,11624.13056,s,10,17.762977172851564,1.7762977172851564,0.007085061050107341,1.7785372314453125,1.7830848510742188,1.7835726989746092,1.7839629772949217,"[1.758892578125, 1.7706796875, 1.7723310546875, 1.7796905517578125, 1.7761463623046876, 1.779974853515625, 1.7773839111328125, 1.7829764404296875, 1.7808411865234375, 1.784060546875]",tokens/s,144.11998479132384,kWh,5.168837491999512e-05,5.700847123941952e-06,3.434333303020121e-05,9.173255507413829e-05,tokens/kWh,2790721.350703692,MB,1651.5072,12731.67872,0.0,12316.573696,11624.13312,s,10,87.70973144531249,8.770973144531249,0.013041638055390142,8.77374462890625,8.7848142578125,8.78533193359375,8.78574607421875,"[8.745994140625, 8.7529951171875, 8.76157421875, 8.768685546875, 8.7710009765625, 8.778998046875, 8.77648828125, 8.78469921875, 8.7834462890625, 8.785849609375]",tokens/s,7.182783365296341,kWh,0.0002566570354687549,2.8309506766306757e-05,0.00017032146959040096,0.0004552880118254626,tokens/kWh,138373.9487174361,,s,630,87.7056473083496,0.13921531318785652,0.001656478559790766,0.13893014526367187,0.1404453826904297,0.1407845603942871,0.14912909576416017,"[0.1503163146972656, 0.1368173828125, 0.1369456024169922, 0.1371890869140625, 0.13747395324707032, 0.13724205017089844, 0.13702656555175782, 0.14176255798339843, 0.1395302429199219, 0.13900186157226563, 0.13805078125, 0.1378148193359375, 0.137312255859375, 0.13733212280273438, 0.1385452423095703, 0.1399171142578125, 0.13934159851074218, 0.13928700256347656, 0.13801312255859374, 0.13719676208496093, 0.13746800231933592, 0.13821817016601562, 0.1394535369873047, 0.1387529296875, 0.14012416076660156, 0.14018150329589843, 0.13805158996582031, 0.137955322265625, 0.13732044982910158, 0.13848985290527344, 0.13833010864257814, 0.13944831848144532, 0.14004620361328124, 0.1395344696044922, 0.13839260864257813, 0.13753033447265625, 0.13823385620117187, 0.13821253967285158, 0.13881782531738282, 0.13890371704101562, 0.13886224365234376, 0.139448486328125, 0.1392391052246094, 0.13829942321777344, 0.13777186584472656, 0.13885848999023437, 0.13907904052734374, 0.13831805419921875, 0.13825680541992189, 0.1390377960205078, 0.1395127410888672, 0.13971372985839844, 0.13929075622558593, 0.13861549377441407, 0.1386024932861328, 0.1388023681640625, 0.13835757446289063, 0.138712890625, 0.13893037414550782, 0.139095458984375, 0.13916426086425782, 0.1404703369140625, 0.13960595703125, 0.14877398681640625, 0.13847439575195314, 0.13790821838378906, 0.13751705932617186, 0.1370767364501953, 0.1371844482421875, 0.13839996337890625, 0.1416300506591797, 0.13906895446777343, 0.13921533203125, 0.13791232299804687, 0.13785906982421875, 0.1365278778076172, 0.13822306823730468, 0.13993014526367187, 0.13873948669433595, 0.14014486694335937, 0.13934597778320312, 0.1387919616699219, 0.1367327423095703, 0.13783238220214844, 0.13954713439941407, 0.1382301788330078, 0.13861241149902342, 0.1386682586669922, 0.14016522216796876, 0.13944012451171875, 0.1380592041015625, 0.13762413024902342, 0.13879446411132812, 0.13872796630859374, 0.13842227172851562, 0.1388006134033203, 0.13968438720703125, 0.1394851837158203, 0.13948722839355468, 0.13815516662597657, 0.13835964965820313, 0.13824409484863281, 0.13880320739746094, 0.13884825134277343, 0.13897917175292968, 0.13912693786621094, 0.13936962890625, 0.13963555908203126, 0.13907305908203124, 0.13896546936035156, 0.13896287536621094, 0.1389630126953125, 0.138682373046875, 0.13862911987304688, 0.13869491577148438, 0.13868826293945313, 0.13974220275878907, 0.14048294067382813, 0.13917391967773438, 0.13844947814941405, 0.13910791015625, 0.13905967712402345, 0.13881753540039063, 0.13862083435058595, 0.13885066223144532, 0.13904666137695312, 0.15160946655273438, 0.13686441040039063, 0.13717240905761718, 0.1384228515625, 0.13840383911132811, 0.1377829132080078, 0.13876876831054688, 0.1405224914550781, 0.138074462890625, 0.13791299438476562, 0.13793894958496095, 0.1387704315185547, 0.13914108276367188, 0.13833619689941407, 0.14002790832519532, 0.13807830810546876, 0.1382620849609375, 0.13795587158203124, 0.13802691650390625, 0.13879052734375, 0.13999250793457033, 0.139636962890625, 0.13935685729980468, 0.13737075805664062, 0.13860751342773436, 0.1382092742919922, 0.1384015655517578, 0.13879420471191406, 0.13963507080078125, 0.14001420593261718, 0.13957862854003905, 0.13867904663085937, 0.13834803771972656, 0.1381790771484375, 0.1388969268798828, 0.13915504455566408, 0.13828594970703126, 0.13880848693847656, 0.13954060363769533, 0.14026133728027343, 0.13864122009277344, 0.13828807067871093, 0.13953433227539064, 0.13912255859375, 0.138595458984375, 0.13872377014160156, 0.1383485107421875, 0.13885420227050782, 0.13914300537109375, 0.14038278198242188, 0.14008358764648438, 0.13909400939941408, 0.13864857482910156, 0.13902336120605469, 0.13878271484375, 0.13891293334960939, 0.13876039123535155, 0.13939517211914063, 0.13873823547363281, 0.139791748046875, 0.13993382263183593, 0.14054861450195313, 0.13923455810546875, 0.15185186767578124, 0.1369531555175781, 0.13769183349609376, 0.13784841918945312, 0.13863772583007813, 0.13841203308105468, 0.1387376708984375, 0.140889892578125, 0.13797517395019532, 0.13779849243164063, 0.13797357177734376, 0.13792063903808593, 0.13945408630371095, 0.13936061096191407, 0.14067100524902343, 0.13813970947265625, 0.1379983367919922, 0.1380063018798828, 0.13806410217285156, 0.13879263305664064, 0.13882194519042967, 0.14089523315429686, 0.13950857543945314, 0.1400342102050781, 0.13937049865722656, 0.13801266479492189, 0.1381908416748047, 0.13871839904785158, 0.13954083251953125, 0.1384432373046875, 0.13916773986816405, 0.13804460144042968, 0.13955564880371094, 0.1393623046875, 0.1387642822265625, 0.13913087463378906, 0.13970431518554688, 0.13887464904785157, 0.13818707275390624, 0.13829315185546875, 0.13827180480957033, 0.1389004211425781, 0.13973033142089844, 0.14021693420410156, 0.14090762329101564, 0.14023977661132814, 0.13872947692871093, 0.1382461395263672, 0.13902554321289062, 0.13893516540527343, 0.13943327331542968, 0.13946246337890625, 0.13835311889648438, 0.13907600402832032, 0.139570556640625, 0.1403152618408203, 0.14046617126464844, 0.13896476745605468, 0.1398335723876953, 0.13861273193359375, 0.13873356628417968, 0.13911996459960937, 0.13926780700683594, 0.14930140686035157, 0.13854071044921876, 0.13665110778808592, 0.1377665557861328, 0.13674024963378906, 0.1380966033935547, 0.1405524139404297, 0.14218470764160157, 0.1398047332763672, 0.13880569458007813, 0.13851033020019532, 0.13785087585449218, 0.13792066955566407, 0.1378978271484375, 0.1402034912109375, 0.13930140686035156, 0.13851628112792969, 0.13988233947753906, 0.13941897583007812, 0.13819740295410157, 0.13773385620117187, 0.13981170654296876, 0.13841714477539063, 0.13961871337890625, 0.13837718200683594, 0.13819558715820313, 0.13957734680175782, 0.13902972412109374, 0.1392911376953125, 0.13925001525878905, 0.13936837768554688, 0.13877650451660156, 0.1385001525878906, 0.13866802978515624, 0.13815708923339845, 0.13913331604003906, 0.14020985412597656, 0.1406903381347656, 0.14032867431640625, 0.1387809600830078, 0.13874102783203124, 0.13885308837890625, 0.13832191467285157, 0.13938633728027344, 0.1393116455078125, 0.13893734741210936, 0.13890643310546874, 0.1394478759765625, 0.14062042236328126, 0.1385533447265625, 0.139399169921875, 0.1390223388671875, 0.13943939208984374, 0.13873225402832032, 0.13910639953613282, 0.13833775329589842, 0.13841583251953124, 0.14005116271972656, 0.14059315490722657, 0.1405111083984375, 0.13982278442382812, 0.13896134948730468, 0.1390919647216797, 0.14889573669433595, 0.13805952453613282, 0.13871273803710937, 0.13928623962402345, 0.1366455078125, 0.13776185607910157, 0.13937461853027344, 0.14081730651855467, 0.13844070434570313, 0.13827603149414064, 0.1385889892578125, 0.13948095703125, 0.1386865234375, 0.13818067932128905, 0.14053170776367188, 0.13849993896484375, 0.1387460174560547, 0.1375272979736328, 0.1386577911376953, 0.13886813354492186, 0.13965373229980468, 0.14063615417480468, 0.1387868194580078, 0.13860786437988282, 0.13836723327636719, 0.13862553405761718, 0.13836697387695313, 0.138745849609375, 0.14005043029785155, 0.1390358428955078, 0.1397379150390625, 0.13991116333007814, 0.13916140747070313, 0.13858767700195312, 0.13891856384277343, 0.13977107238769532, 0.13870883178710938, 0.1393750762939453, 0.13821299743652343, 0.13833920288085938, 0.13942544555664063, 0.14026786804199218, 0.1407283172607422, 0.14038543701171874, 0.13994479370117188, 0.13847325134277344, 0.13878445434570313, 0.1388303680419922, 0.13974021911621093, 0.13872598266601563, 0.13968214416503907, 0.1385533447265625, 0.1392126007080078, 0.13915155029296875, 0.1405358123779297, 0.1410662384033203, 0.1404620819091797, 0.1410556182861328, 0.14070970153808593, 0.13891641235351562, 0.13897727966308593, 0.13974322509765624, 0.13960806274414062, 0.14908248901367188, 0.13909400939941408, 0.13853286743164062, 0.1379430389404297, 0.1380966339111328, 0.1364336700439453, 0.13992140197753905, 0.1409290313720703, 0.1389916229248047, 0.13864300537109375, 0.13933612060546874, 0.1388605499267578, 0.13748838806152344, 0.1383055419921875, 0.13966744995117186, 0.13968118286132813, 0.13819541931152343, 0.13814735412597656, 0.13988925170898436, 0.13929472351074218, 0.1389384002685547, 0.13989849853515626, 0.13980677795410157, 0.1381030731201172, 0.1384443817138672, 0.138174560546875, 0.1387011260986328, 0.13854266357421874, 0.14083526611328126, 0.14071589660644532, 0.14021247863769531, 0.13892185974121093, 0.13872947692871093, 0.13861477661132812, 0.13884754943847658, 0.13963743591308594, 0.13832191467285157, 0.13962428283691405, 0.13870506286621093, 0.138745849609375, 0.14022767639160155, 0.13996124267578125, 0.13974528503417968, 0.13958064270019532, 0.13897193908691408, 0.13936845397949219, 0.1384734344482422, 0.13904611206054687, 0.1387542724609375, 0.13914378356933593, 0.13955686950683593, 0.14059417724609374, 0.13978726196289062, 0.14042828369140625, 0.14048764038085937, 0.13900965881347657, 0.1396771240234375, 0.1388410186767578, 0.13981027221679687, 0.13851705932617187, 0.13899757385253905, 0.13889555358886718, 0.13918016052246093, 0.14914813232421875, 0.13922312927246094, 0.13794905090332032, 0.13805520629882811, 0.13795330810546874, 0.13828973388671875, 0.13906243896484374, 0.141183837890625, 0.1385676727294922, 0.1394974365234375, 0.13928041076660155, 0.1382010955810547, 0.1380917053222656, 0.1385142364501953, 0.1402071075439453, 0.13847142028808593, 0.1386455078125, 0.1384734649658203, 0.13865577697753906, 0.13950973510742187, 0.13943399047851562, 0.13949337768554687, 0.1390592041015625, 0.13978195190429688, 0.138704833984375, 0.1383795166015625, 0.1382993927001953, 0.138819580078125, 0.13991708374023437, 0.1395296630859375, 0.13970921325683594, 0.14012530517578126, 0.13910310363769532, 0.13835877990722656, 0.13860195922851562, 0.14029673767089842, 0.13879295349121093, 0.13956866455078126, 0.13834902954101563, 0.13806524658203126, 0.13867420959472657, 0.140513916015625, 0.1411461181640625, 0.14015692138671876, 0.14066073608398438, 0.13935821533203124, 0.13910832214355467, 0.138548828125, 0.13902262878417967, 0.14023490905761718, 0.1391288299560547, 0.13875190734863282, 0.13908796691894532, 0.13901123046875, 0.13931820678710938, 0.14064157104492186, 0.1415882873535156, 0.1407189178466797, 0.1405788116455078, 0.14055628967285155, 0.139472900390625, 0.1386468811035156, 0.1400265350341797, 0.15159091186523438, 0.13703578186035156, 0.13773414611816406, 0.138387451171875, 0.13926194763183594, 0.13868777465820312, 0.13995004272460937, 0.14125286865234374, 0.13793887329101562, 0.13823033142089844, 0.1380572204589844, 0.13797946166992187, 0.13875704956054688, 0.14116455078125, 0.1410600891113281, 0.13934956359863282, 0.138863037109375, 0.13863320922851563, 0.1382092742919922, 0.1379792938232422, 0.13969879150390624, 0.14008058166503906, 0.1385721893310547, 0.13878288269042968, 0.13868646240234375, 0.14010989379882813, 0.13932496643066405, 0.13865382385253905, 0.14063848876953125, 0.1392762908935547, 0.13821951293945312, 0.13892991638183594, 0.13875564575195312, 0.13856338500976562, 0.13876518249511718, 0.14082867431640625, 0.1400299530029297, 0.13944422912597657, 0.13959577941894533, 0.13947084045410157, 0.13836595153808592, 0.13899395751953125, 0.14022927856445314, 0.13878279113769532, 0.13830543518066407, 0.13943545532226562, 0.13834921264648437, 0.13993983459472656, 0.13976307678222658, 0.1421477508544922, 0.14023114013671875, 0.14032691955566406, 0.13987429809570312, 0.13941075134277345, 0.13873333740234375, 0.13869354248046875, 0.1404435272216797, 0.13884620666503905, 0.13967578125, 0.1384342041015625, 0.13979241943359375, 0.13860652160644532, 0.1390924530029297, 0.14966252136230468, 0.1393544921875, 0.13838482666015625, 0.13807373046875, 0.13785098266601561, 0.13799078369140624, 0.1388623046875, 0.14127952575683594, 0.1391104278564453, 0.1385328369140625, 0.139232666015625, 0.1378555908203125, 0.13858749389648437, 0.1385284423828125, 0.14050547790527343, 0.13865971374511718, 0.13888522338867187, 0.13855984497070312, 0.1384983367919922, 0.13962445068359375, 0.1401157684326172, 0.139736572265625, 0.13843731689453126, 0.13943193054199218, 0.13852262878417967, 0.13822157287597656, 0.1383893737792969, 0.13846745300292967, 0.14028594970703126, 0.1395252227783203, 0.14003292846679688, 0.13988453674316406, 0.14023802185058593, 0.1382711944580078, 0.1386577911376953, 0.13971200561523436, 0.13995404052734375, 0.13869769287109374, 0.13917945861816405, 0.13862290954589843, 0.1385353546142578, 0.1396531219482422, 0.14074453735351564, 0.14092527770996094, 0.14039602661132813, 0.1403151397705078, 0.13887078857421875, 0.1384814453125, 0.1395447998046875, 0.13907763671875, 0.13951385498046875, 0.13949562072753907, 0.1384159393310547, 0.13829058837890626, 0.1398269805908203, 0.140114013671875, 0.14116323852539062, 0.14042521667480468, 0.140621826171875, 0.14052946472167968, 0.13939479064941407, 0.13876272583007812, 0.1398190155029297]",tokens/s,7.183117841717632,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,10038.423552,6201.147392,0.0,5798.62528,5404.427264,s,1,20.89815625,20.89815625,0.0,20.89815625,20.89815625,20.89815625,20.89815625,[20.89815625],,kWh,0.00040024678044160277,4.414279819011307e-05,0.00013691788731201582,0.0005813074659437316,,MB,5797.425152,6501.040128,0.0,6077.546496,5755.126784,s,10,1.5271648864746092,0.15271648864746093,0.0010916964101460994,0.15233518981933591,0.15430630798339845,0.15469798431396484,0.15501132537841797,"[0.15156031799316405, 0.15223187255859374, 0.15421926879882814, 0.15232298278808593, 0.15258055114746094, 0.15164454650878906, 0.15508966064453125, 0.15189042663574218, 0.15327786254882814, 0.15234739685058593]",tokens/s,1676.3088404354578,kWh,4.46883315366123e-06,4.928302049867665e-07,2.421542341272827e-06,7.383205699920823e-06,tokens/kWh,34673285.61667262,MB,5797.425152,6503.13728,0.0,6079.643648,5755.129344,s,10,93.24941699218748,9.32494169921875,0.009716649467416157,9.327667480468751,9.33487021484375,9.335433154296874,9.335883505859375,"[9.3347451171875, 9.3253798828125, 9.3232470703125, 9.329955078125, 9.31920703125, 9.3101689453125, 9.3313193359375, 9.3064541015625, 9.3329443359375, 9.33599609375]",tokens/s,6.756074411197464,kWh,0.0002715263411613404,2.9950804222524113e-05,0.00010775507357772425,0.0004092322189615888,tokens/kWh,153946.82305283807,,s,630,93.24638781738277,0.14801013939267113,0.001136996834721555,0.1477868194580078,0.14893152160644532,0.14994395751953124,0.15228412734985353,"[0.14822604370117187, 0.14845132446289064, 0.14732038879394532, 0.14766947937011718, 0.14751852416992187, 0.14834678649902344, 0.14749139404296874, 0.14758717346191405, 0.14788966369628906, 0.14725205993652343, 0.14748057556152344, 0.14785699462890625, 0.14785519409179687, 0.1472926025390625, 0.14694607543945312, 0.15223139953613282, 0.14753814697265624, 0.14719427490234374, 0.14809088134765624, 0.1476604461669922, 0.14799293518066406, 0.14789337158203125, 0.14782144165039063, 0.1478656005859375, 0.14734051513671875, 0.15003318786621095, 0.14755430603027345, 0.14785055541992187, 0.14901875305175782, 0.1481815643310547, 0.1476259765625, 0.14760675048828126, 0.1473380126953125, 0.1476684112548828, 0.14779222106933593, 0.14894102478027343, 0.14742854309082032, 0.14742816162109376, 0.1516093444824219, 0.14813331604003907, 0.14841651916503906, 0.14779417419433594, 0.1485270690917969, 0.14793724060058594, 0.1483493194580078, 0.15421197509765625, 0.14807405090332032, 0.14790328979492187, 0.1475932159423828, 0.1482701416015625, 0.14885392761230468, 0.14771542358398437, 0.148106689453125, 0.14710476684570312, 0.1476993865966797, 0.14795571899414062, 0.1478248291015625, 0.14723289489746094, 0.14793228149414062, 0.14796070861816407, 0.1523056640625, 0.14738432312011718, 0.14828150939941406, 0.1473555908203125, 0.14798355102539062, 0.14756748962402344, 0.14774681091308595, 0.14753587341308594, 0.14759280395507812, 0.14751312255859375, 0.14724365234375, 0.1476722869873047, 0.14732185363769532, 0.14703750610351562, 0.14734999084472655, 0.14822808837890625, 0.14709555053710938, 0.14780825805664063, 0.1478588409423828, 0.14706953430175781, 0.14831814575195312, 0.15124691772460938, 0.14873568725585937, 0.1476181182861328, 0.14818917846679688, 0.14765174865722655, 0.14752175903320314, 0.14767698669433593, 0.14748141479492188, 0.1481195526123047, 0.14787286376953124, 0.14848255920410156, 0.1492545623779297, 0.14761276245117189, 0.1476637725830078, 0.1484759063720703, 0.14944601440429686, 0.14782733154296876, 0.1481168975830078, 0.14800650024414064, 0.14851174926757812, 0.14794248962402343, 0.14929193115234374, 0.14805606079101563, 0.14873599243164062, 0.14861106872558594, 0.14715493774414062, 0.14847999572753906, 0.14707302856445312, 0.1486366729736328, 0.14780450439453124, 0.15115254211425783, 0.14934707641601563, 0.14762115478515625, 0.14770454406738281, 0.14747853088378907, 0.15162144470214844, 0.14764175415039063, 0.1477742462158203, 0.14731263732910158, 0.14721229553222656, 0.14677197265625, 0.1474822998046875, 0.14777725219726562, 0.14735830688476562, 0.14723446655273437, 0.14743142700195314, 0.14722402954101563, 0.14724876403808593, 0.14806028747558594, 0.14716598510742188, 0.14809292602539062, 0.14666879272460936, 0.1468934326171875, 0.14711616516113282, 0.14706008911132812, 0.14727369689941405, 0.15079696655273436, 0.14703616333007813, 0.14698291015625, 0.146650146484375, 0.14707720947265626, 0.14704473876953125, 0.14790876770019531, 0.14700361633300782, 0.1471543731689453, 0.14658425903320313, 0.14688864135742188, 0.15163334655761718, 0.14857688903808594, 0.14836674499511718, 0.14811177062988282, 0.14697651672363282, 0.1470240020751953, 0.14778338623046874, 0.1473819885253906, 0.14869961547851562, 0.1479493408203125, 0.14912985229492187, 0.14771778869628907, 0.1472327423095703, 0.14745639038085936, 0.147593505859375, 0.14867141723632812, 0.14950694274902343, 0.1489151611328125, 0.14895404052734376, 0.14812774658203126, 0.14807859802246093, 0.14857830810546874, 0.14888755798339845, 0.14978871154785156, 0.1480908508300781, 0.14809088134765624, 0.14783680725097656, 0.1486378173828125, 0.1481810302734375, 0.14882608032226563, 0.1518981170654297, 0.1482915802001953, 0.14908006286621095, 0.14964137268066408, 0.14804328918457033, 0.14771612548828125, 0.1474644775390625, 0.14772019958496094, 0.1470545654296875, 0.14693174743652343, 0.14693113708496094, 0.14788819885253907, 0.1487441864013672, 0.14781849670410158, 0.14918031311035157, 0.147265625, 0.14733885192871093, 0.14694236755371093, 0.147291748046875, 0.14721192932128907, 0.14791552734375, 0.14736384582519532, 0.1472791748046875, 0.1473256378173828, 0.14710400390625, 0.14763180541992188, 0.14695578002929688, 0.1470726776123047, 0.1474198760986328, 0.1472447052001953, 0.14708784484863283, 0.14709747314453125, 0.14758111572265625, 0.14789222717285155, 0.1476300811767578, 0.14706278991699218, 0.14719778442382814, 0.14930245971679687, 0.14838217163085937, 0.1477391357421875, 0.14765574645996093, 0.1478194580078125, 0.14736297607421875, 0.14760841369628908, 0.14761167907714845, 0.14780003356933594, 0.14774185180664062, 0.1494015350341797, 0.1479336700439453, 0.14769151306152345, 0.14832054138183592, 0.14774278259277343, 0.1481988830566406, 0.14809071350097655, 0.14743222045898438, 0.14802073669433594, 0.14795135498046874, 0.14778451538085938, 0.14815744018554688, 0.1482653503417969, 0.15206008911132812, 0.14791632080078124, 0.14839280700683594, 0.1478811492919922, 0.1485545654296875, 0.14835711669921875, 0.15876914978027343, 0.1493086395263672, 0.148036376953125, 0.14764796447753906, 0.1482235870361328, 0.14790956115722656, 0.14894898986816407, 0.14908332824707032, 0.14794122314453126, 0.14828338623046874, 0.14769561767578124, 0.14812364196777345, 0.1482747802734375, 0.14794154357910155, 0.15018009948730468, 0.14735565185546876, 0.14763212585449217, 0.14754365539550782, 0.14715330505371094, 0.147738525390625, 0.14755609130859376, 0.14733302307128907, 0.14736837768554686, 0.14817280578613282, 0.1479882507324219, 0.14822178649902343, 0.14798681640625, 0.14795980834960937, 0.1474429473876953, 0.1475345001220703, 0.14756991577148437, 0.14745071411132812, 0.14749676513671875, 0.14842813110351563, 0.14804031372070312, 0.147091552734375, 0.1467571258544922, 0.14739695739746095, 0.15317634582519532, 0.14735728454589844, 0.14797251892089844, 0.14911651611328125, 0.1477042236328125, 0.14709146118164063, 0.1472225341796875, 0.14821376037597656, 0.14796800231933593, 0.1485773162841797, 0.14765769958496094, 0.14741885375976563, 0.14741941833496094, 0.1482928009033203, 0.14783570861816406, 0.1488501739501953, 0.14718325805664062, 0.14816326904296875, 0.14748185729980468, 0.1475040283203125, 0.14838169860839845, 0.147662841796875, 0.14812979125976564, 0.14729420471191407, 0.1466759033203125, 0.1468302764892578, 0.14707101440429687, 0.1483044738769531, 0.14708682250976562, 0.15314614868164061, 0.14751852416992187, 0.1476982421875, 0.14724954223632813, 0.1477591094970703, 0.14729136657714845, 0.14780458068847657, 0.14735810852050782, 0.14739453125, 0.14686151123046876, 0.14653018188476563, 0.14715565490722657, 0.14701091003417968, 0.14699081420898438, 0.1492747802734375, 0.1474624938964844, 0.147210693359375, 0.14686834716796876, 0.14856179809570313, 0.14756997680664063, 0.14736575317382813, 0.14769261169433595, 0.14793504333496094, 0.1477836151123047, 0.1476300506591797, 0.14842807006835937, 0.1503690185546875, 0.14747427368164062, 0.14702403259277344, 0.14733106994628906, 0.14688050842285155, 0.14730015563964843, 0.14797433471679688, 0.14743113708496094, 0.14689920043945312, 0.1478615417480469, 0.1469890594482422, 0.14805810546875, 0.14979209899902343, 0.147806884765625, 0.1473469696044922, 0.14824476623535157, 0.1470749053955078, 0.14685423278808593, 0.14682041931152343, 0.1467739562988281, 0.14791769409179686, 0.14821104431152343, 0.14777987670898438, 0.14714247131347657, 0.14686627197265625, 0.1474358673095703, 0.14751129150390624, 0.14810255432128908, 0.14832208251953125, 0.14810102844238282, 0.14753884887695312, 0.14778912353515625, 0.14815711975097656, 0.14801673889160155, 0.14913987731933595, 0.14773861694335938, 0.14812594604492188, 0.1497249298095703, 0.1480806427001953, 0.1489304656982422, 0.1509722900390625, 0.14853558349609375, 0.14880972290039063, 0.14761920166015624, 0.14836175537109375, 0.1482296600341797, 0.14897415161132813, 0.14841651916503906, 0.14851277160644533, 0.14874552917480469, 0.14784156799316406, 0.14775849914550782, 0.14768191528320312, 0.14758729553222658, 0.14850390625, 0.1510154266357422, 0.14762200927734376, 0.1471303405761719, 0.14782429504394531, 0.14696263122558595, 0.14791539001464843, 0.14836332702636718, 0.1485760040283203, 0.14715516662597655, 0.14724298095703126, 0.1472668762207031, 0.14715341186523437, 0.14722218322753905, 0.14789482116699218, 0.14756863403320314, 0.14710169982910157, 0.14733721923828125, 0.14719078063964844, 0.1478338623046875, 0.14702120971679689, 0.14835157775878907, 0.1498410186767578, 0.14730032348632813, 0.15188674926757811, 0.1486069793701172, 0.14925619506835938, 0.1478410186767578, 0.14886093139648438, 0.14757273864746093, 0.14784512329101562, 0.14777520751953124, 0.1480840606689453, 0.1480359649658203, 0.14867922973632813, 0.14862336730957032, 0.1476090545654297, 0.14794741821289062, 0.1481920928955078, 0.1479288635253906, 0.14722047424316406, 0.1475583953857422, 0.14854879760742187, 0.14785824584960938, 0.14773481750488282, 0.14728717041015624, 0.15053404235839843, 0.14861587524414063, 0.14767222595214843, 0.14877532958984374, 0.14804377746582031, 0.14765895080566407, 0.15354786682128907, 0.14792778015136718, 0.14817446899414063, 0.14833670043945313, 0.1477205047607422, 0.1482096710205078, 0.147662841796875, 0.1483851776123047, 0.1473460235595703, 0.14805401611328126, 0.14810121154785155, 0.1472563781738281, 0.14742311096191407, 0.14873414611816407, 0.1475919952392578, 0.15076512145996093, 0.14846377563476562, 0.1481095733642578, 0.14731983947753907, 0.14741110229492188, 0.1475524444580078, 0.1472989501953125, 0.14677810668945312, 0.148279296875, 0.14717529296875, 0.14684378051757813, 0.14703330993652344, 0.1474498291015625, 0.14727635192871094, 0.147177734375, 0.14766230773925781, 0.14748931884765626, 0.14708256530761718, 0.14701199340820312, 0.14730268859863282, 0.14710588073730468, 0.14736373901367186, 0.14790838623046876, 0.1481566467285156, 0.15130003356933594, 0.14735324096679686, 0.14632095336914064, 0.14689744567871094, 0.14764215087890625, 0.14719235229492186, 0.14737767028808593, 0.14875672912597657, 0.14809318542480468, 0.14753500366210937, 0.1473278350830078, 0.14733030700683594, 0.14682298278808595, 0.14761430358886718, 0.14714682006835939, 0.14690742492675782, 0.14648240661621093, 0.14667610168457032, 0.14724342346191407, 0.1469992980957031, 0.14791194152832032, 0.14702259826660155, 0.1473900146484375, 0.14792054748535155, 0.14836032104492186, 0.14777027893066405, 0.14820243835449218, 0.1508856964111328, 0.15063229370117187, 0.14740567016601563, 0.14764784240722656, 0.14811407470703125, 0.14728134155273437, 0.14864019775390624, 0.14828518676757813, 0.14759564208984374, 0.1475440673828125, 0.14797209167480468, 0.148636962890625, 0.14782742309570313, 0.14772735595703124, 0.1488414764404297, 0.147338623046875, 0.14994700622558593, 0.14748672485351563, 0.14786991882324219, 0.14764614868164064, 0.14779193115234374, 0.14760867309570314, 0.14792495727539062, 0.14715798950195313, 0.14770927429199218, 0.1474443817138672, 0.14719378662109375, 0.14790383911132812, 0.15090928649902344, 0.1478170166015625, 0.147822265625, 0.14834906005859375, 0.14736384582519532, 0.14803117370605468, 0.14862765502929687, 0.14711964416503906, 0.14748045349121094, 0.14716143798828124, 0.14808294677734374, 0.1481544952392578, 0.1512342071533203, 0.1471696319580078, 0.14738021850585936, 0.14712626647949217, 0.14731878662109374, 0.14862294006347657, 0.14782899475097655, 0.14731893920898437, 0.149392578125, 0.14835746765136718, 0.14759574890136717, 0.14852265930175781, 0.14883670043945313, 0.1484451904296875, 0.1499402313232422, 0.14837554931640626, 0.14871551513671874, 0.1478238983154297, 0.1483297576904297, 0.14830397033691406, 0.14861328125, 0.14823036193847655, 0.1475852813720703, 0.14750732421875, 0.1479105224609375, 0.14790655517578125, 0.14780825805664063, 0.15580160522460937, 0.14781234741210938, 0.14780390930175782, 0.14731494140625, 0.14768515014648437, 0.14750332641601563, 0.147957763671875, 0.147810302734375, 0.14824240112304687, 0.14838787841796874, 0.14766656494140626, 0.148097412109375, 0.1475207977294922, 0.14760829162597655, 0.14750924682617186, 0.14961459350585937, 0.14770521545410156, 0.1476693115234375, 0.14839407348632813, 0.14774089050292968, 0.14780210876464844, 0.14761695861816407, 0.1476033935546875, 0.14830268859863283, 0.14744374084472656, 0.14726307678222655, 0.14740086364746094, 0.14741734313964844, 0.147418212890625, 0.14880450439453125, 0.14706207275390626, 0.14714540100097656, 0.1473966064453125, 0.14862130737304688, 0.14784626770019532, 0.1480078125, 0.14828323364257812, 0.14786688232421874, 0.1476451873779297, 0.15150032043457032, 0.14946357727050782, 0.14797219848632812, 0.1486065216064453, 0.14896786499023437, 0.1482926788330078, 0.1488555908203125, 0.1481356201171875, 0.14804412841796874, 0.1507615966796875, 0.14800653076171874, 0.14798477172851562, 0.14765213012695313, 0.14846121215820313, 0.147993408203125]",tokens/s,6.756293887049173,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,2570.887168,4626.18624,0.0,4223.664128,4030.321664,s,1,12.5597109375,12.5597109375,0.0,12.5597109375,12.5597109375,12.5597109375,12.5597109375,[12.5597109375],,kWh,0.00015487026126247654,1.7072443621940532e-05,5.116420759798679e-05,0.00022310691248240386,,MB,2361.970688,5028.839424,0.0,4613.7344,4385.215488,s,10,4.5540531005859375,0.45540531005859375,0.0005919219635178218,0.45543330383300784,0.4561183135986328,0.4562247299194336,0.4563098629760742,"[0.4558331604003906, 0.4550220031738281, 0.45609466552734373, 0.4542966613769531, 0.4552023010253906, 0.4555552978515625, 0.4553113098144531, 0.45568185424804686, 0.45633114624023435, 0.45472470092773437]",tokens/s,562.1366162091134,kWh,1.3552129847348074e-05,1.4942126184826282e-06,8.969577882726606e-06,2.401592034855731e-05,tokens/kWh,10659595.64674266,MB,2375.24992,5030.936576,0.0,4615.831552,4385.218048,s,10,43.012804687499994,4.301280468749999,0.019296070386907244,4.3017294921875,4.323596044921875,4.325184008789062,4.326454379882812,"[4.3232431640625, 4.32112744140625, 4.31060595703125, 4.3003759765625, 4.3030830078125, 4.28396923828125, 4.279109375, 4.26506298828125, 4.29945556640625, 4.32677197265625]",tokens/s,14.646801215989646,kWh,0.00012529448688723636,1.382049187245163e-05,6.89622066848737e-05,0.00020807718544456168,tokens/kWh,302772.26148267556,,s,630,43.01005075073242,0.06826992182655939,0.00084908261976647,0.0681517448425293,0.06884217987060547,0.06924348373413086,0.07237933837890625,"[0.06934566497802734, 0.06874870300292969, 0.07229654693603516, 0.06864335632324219, 0.06853632354736328, 0.06856025695800781, 0.06834867095947265, 0.06866316986083984, 0.068474365234375, 0.06839961242675781, 0.06864617919921875, 0.0685964126586914, 0.06829647827148437, 0.06886630249023437, 0.06832947540283203, 0.06853993225097656, 0.06825833892822265, 0.06879571533203124, 0.0683644790649414, 0.06921654510498047, 0.06876972961425781, 0.06891474914550781, 0.06847756958007813, 0.06862652587890625, 0.06851302337646484, 0.068729248046875, 0.06872911834716797, 0.06987267303466797, 0.06852400207519531, 0.06859407806396485, 0.06861196899414063, 0.06847293090820313, 0.06846733093261718, 0.06872438049316407, 0.06826019287109375, 0.0686346206665039, 0.0686223373413086, 0.06846463775634766, 0.06854656219482422, 0.0691273956298828, 0.0682831039428711, 0.06859136199951171, 0.07325933074951171, 0.06837042999267579, 0.06830038452148438, 0.06810460662841797, 0.06897049713134766, 0.06822707366943359, 0.06788880157470703, 0.06792636871337891, 0.068063232421875, 0.06824870300292969, 0.0682033920288086, 0.0684458236694336, 0.06826838684082032, 0.068083740234375, 0.06819225311279296, 0.06817190551757812, 0.06782704162597657, 0.06784009552001953, 0.06790534210205078, 0.068098876953125, 0.06807942199707032, 0.06918962860107422, 0.06816349029541016, 0.06850774383544922, 0.06994739532470703, 0.0678268814086914, 0.0681248016357422, 0.06825846099853515, 0.0678809585571289, 0.06794412994384766, 0.06792841339111329, 0.06796614074707032, 0.06872761535644531, 0.06854246520996093, 0.06864486694335938, 0.06822233581542969, 0.06821542358398437, 0.0685137939453125, 0.0683108139038086, 0.06808972930908203, 0.06820899200439454, 0.06893465423583985, 0.06820262145996094, 0.0683400650024414, 0.0682913589477539, 0.06822380828857422, 0.06837753295898437, 0.06804710388183594, 0.06853810882568359, 0.0683663330078125, 0.06815641784667968, 0.06803968048095703, 0.07241315460205078, 0.06856690979003906, 0.06841974639892578, 0.06853782653808593, 0.06845085144042969, 0.06859776306152343, 0.06854803466796874, 0.06849798583984375, 0.06893158721923828, 0.06859571075439454, 0.06897203063964844, 0.06923929595947266, 0.06886553955078124, 0.06838118743896485, 0.07135596466064453, 0.07269996643066406, 0.06848140716552735, 0.06842572784423828, 0.06842543792724609, 0.06813724517822266, 0.0685730209350586, 0.06811663818359374, 0.06828646087646484, 0.06833148956298828, 0.06818739318847657, 0.0682844467163086, 0.06867420959472656, 0.06813491058349609, 0.0684749755859375, 0.06861004638671875, 0.0681779556274414, 0.06773932647705078, 0.06882099151611328, 0.06816767883300781, 0.06791986846923828, 0.06806524658203125, 0.06813494110107422, 0.06816563415527344, 0.06777446746826171, 0.06784614562988281, 0.06965030670166016, 0.06947599792480469, 0.06788313293457031, 0.06784422302246093, 0.06785871887207032, 0.06800335693359374, 0.06820291137695313, 0.06805503845214844, 0.06803273773193359, 0.06833744049072266, 0.06797920227050781, 0.06789891052246094, 0.06835408020019532, 0.06863894653320313, 0.06791414642333984, 0.06823622131347656, 0.06791420745849609, 0.06809561920166016, 0.0678092803955078, 0.0679552001953125, 0.06800828552246094, 0.06814105224609375, 0.068982177734375, 0.06831107330322266, 0.06831775665283203, 0.07250918579101563, 0.06945391845703125, 0.06847824096679687, 0.06824639892578124, 0.06811641693115235, 0.06775552368164063, 0.067916259765625, 0.0677940444946289, 0.06809420776367188, 0.06794290924072266, 0.06800201416015625, 0.0682946548461914, 0.068021728515625, 0.06794844818115234, 0.0679974365234375, 0.06795763397216797, 0.07808390045166015, 0.06816499328613282, 0.06856377410888671, 0.06770687866210938, 0.06827827453613282, 0.06796435546875, 0.06795231628417969, 0.06794902038574219, 0.06828278350830078, 0.06830694580078125, 0.06861174774169922, 0.06854441833496094, 0.06843231964111328, 0.06816767883300781, 0.06895526123046875, 0.0680079345703125, 0.06790029144287109, 0.06808274841308594, 0.06861849975585937, 0.0682400665283203, 0.06814076995849609, 0.06789087677001954, 0.0689607696533203, 0.06810329437255859, 0.06944457244873047, 0.06805299377441407, 0.06803046417236328, 0.06796083068847657, 0.06791372680664062, 0.0682455062866211, 0.06783558654785156, 0.06788742065429687, 0.06862847900390626, 0.06833097839355469, 0.06890060424804688, 0.06832208251953124, 0.0681119384765625, 0.06792031860351562, 0.0678318099975586, 0.06783385467529297, 0.06789324951171875, 0.06862582397460938, 0.06796272277832031, 0.06773554992675782, 0.06787673950195312, 0.06772006225585937, 0.06789260864257812, 0.06803520202636719, 0.06785228729248047, 0.06832742309570312, 0.06790553283691406, 0.06815542602539063, 0.06840054321289063, 0.06791622161865235, 0.06800828552246094, 0.0678766098022461, 0.06804054260253906, 0.06803033447265625, 0.0679222412109375, 0.06784780883789063, 0.0677543716430664, 0.06780313873291016, 0.06798540496826172, 0.06814310455322266, 0.06832659149169922, 0.06791455841064453, 0.07129087829589843, 0.07260966491699218, 0.06841907501220704, 0.06816345977783203, 0.06815821075439453, 0.06805673980712891, 0.06793225860595703, 0.06811264038085937, 0.06828147125244141, 0.06849356842041016, 0.0684796142578125, 0.06955107116699219, 0.06882918548583984, 0.06843801879882813, 0.06855680084228516, 0.06827008056640625, 0.06816973114013672, 0.06834547424316406, 0.06849533081054687, 0.06872515106201171, 0.06964224243164062, 0.06852198028564453, 0.06867298889160156, 0.06857782745361328, 0.06805814361572265, 0.06800892639160157, 0.07125305938720704, 0.06904518127441406, 0.0681429443359375, 0.06835584259033203, 0.0682204818725586, 0.06828108978271484, 0.06837177276611328, 0.06853302764892578, 0.06835814666748047, 0.06823136138916015, 0.06806320190429688, 0.068189697265625, 0.06899308776855469, 0.0686185302734375, 0.06814915466308594, 0.06781660461425781, 0.06814816284179688, 0.06841129302978516, 0.06822220611572266, 0.06804531097412109, 0.06870630645751953, 0.06792431640625, 0.06784137725830078, 0.06790185546875, 0.06820009613037109, 0.06776815795898437, 0.06871532440185547, 0.07195343780517578, 0.06766889953613281, 0.06782361602783203, 0.06763724517822266, 0.06750985717773438, 0.06734384155273437, 0.06739561462402344, 0.06748182678222656, 0.06777635192871094, 0.06748860931396485, 0.06764118194580078, 0.06732118225097657, 0.0675643539428711, 0.0680072021484375, 0.06821920013427735, 0.07113359832763672, 0.06742192077636719, 0.06755123138427735, 0.06764399719238282, 0.06737888336181641, 0.06748560333251953, 0.0682136001586914, 0.06767561340332032, 0.06766236877441406, 0.06738054656982422, 0.06718243408203126, 0.06725472259521484, 0.06713180541992188, 0.06774569702148438, 0.06707743835449219, 0.06747830200195312, 0.06723526763916016, 0.06723190307617187, 0.06735504150390625, 0.06772492980957032, 0.06742198181152344, 0.06723040008544921, 0.06760649871826171, 0.06924691009521484, 0.06742572784423828, 0.06738387298583984, 0.06765900421142577, 0.06744550323486329, 0.06813075256347656, 0.068121826171875, 0.06805590057373047, 0.06838066864013671, 0.06778675079345703, 0.06792806243896485, 0.06782771301269531, 0.06797926330566406, 0.06831922912597656, 0.06772121429443359, 0.06794992065429688, 0.06801065826416015, 0.06858963012695313, 0.06801197052001953, 0.06775545501708985, 0.06800032043457031, 0.06794233703613281, 0.06838483428955078, 0.06789286041259765, 0.06811459350585937, 0.06840480041503906, 0.06909990692138672, 0.06838854217529297, 0.06880118560791015, 0.06817798614501953, 0.06814041900634765, 0.06821318054199219, 0.06790509033203125, 0.06818457794189453, 0.06761225891113282, 0.06773391723632813, 0.06795059204101563, 0.06779849243164063, 0.06798777770996094, 0.06834812927246094, 0.06827830505371094, 0.06803657531738282, 0.07080528259277344, 0.07146870422363281, 0.06808019256591796, 0.06760447692871094, 0.06859366607666016, 0.06865430450439453, 0.06796572875976563, 0.06798687744140625, 0.06862016296386719, 0.06832403564453125, 0.06782876586914062, 0.06809241485595703, 0.06840982055664062, 0.06878355407714844, 0.06872096252441406, 0.06787506866455079, 0.06789900970458984, 0.0687754898071289, 0.06744735717773437, 0.0679731216430664, 0.06761702728271485, 0.06810380554199219, 0.06815987396240235, 0.06806697845458984, 0.06791817474365235, 0.06788301086425781, 0.06797065734863281, 0.06954338836669922, 0.06840777587890624, 0.06771708679199219, 0.06774015808105469, 0.06750768280029297, 0.06795932769775391, 0.06723583984375, 0.0675426254272461, 0.06735814666748047, 0.0680782699584961, 0.06764972686767579, 0.0680093765258789, 0.06771753692626953, 0.06755766296386718, 0.06734966278076172, 0.06788697814941407, 0.0684144287109375, 0.06771075439453125, 0.0674566421508789, 0.06765235137939453, 0.06775587463378906, 0.06773881530761719, 0.06765017700195312, 0.0676824951171875, 0.06774937438964844, 0.06779917144775391, 0.06759257507324219, 0.06779618835449219, 0.06777024078369141, 0.06749890899658204, 0.06751232147216797, 0.06745420837402344, 0.06769491577148437, 0.06782160186767579, 0.06805955505371093, 0.06760243225097656, 0.06762905883789062, 0.06778205108642578, 0.06813533020019531, 0.06792211151123047, 0.06848659515380859, 0.06799427032470703, 0.06749014282226562, 0.06777996826171875, 0.0675528335571289, 0.06789398193359375, 0.06802655792236328, 0.06772306823730469, 0.06779641723632812, 0.06785036468505859, 0.06778514862060547, 0.06775305938720703, 0.06753517150878906, 0.06768521881103516, 0.06750982666015624, 0.06829676818847656, 0.0678231658935547, 0.06801261138916016, 0.0676778564453125, 0.06786438751220703, 0.06748828887939454, 0.06763132476806641, 0.06765955352783203, 0.06736809539794922, 0.06789785766601562, 0.0685013427734375, 0.06732032012939453, 0.06829625701904297, 0.06728508758544922, 0.06741232299804688, 0.06737232208251953, 0.06732054138183594, 0.06754918670654297, 0.06751026916503906, 0.06753651428222657, 0.06758847808837891, 0.0676659164428711, 0.06764339447021485, 0.06811033630371094, 0.06719254302978515, 0.06737715148925781, 0.06734982299804687, 0.06739247894287109, 0.06728089904785156, 0.06723343658447266, 0.067529052734375, 0.06791913604736328, 0.06738428497314453, 0.0673749771118164, 0.06728688049316406, 0.06746268463134765, 0.06802268981933594, 0.06736064147949218, 0.07091222381591797, 0.06724537658691407, 0.06733004760742188, 0.06868000030517578, 0.06744483184814454, 0.0672747802734375, 0.06738150024414062, 0.06761199951171876, 0.06782428741455078, 0.06723529815673829, 0.06909846496582031, 0.06835507202148437, 0.06809468841552735, 0.06817411041259766, 0.06795263671875, 0.06862655639648438, 0.06825353240966797, 0.06836841583251953, 0.0683663330078125, 0.0691568603515625, 0.06860765075683593, 0.06786185455322266, 0.06813085174560547, 0.06842467498779296, 0.06787481689453125, 0.06793215942382813, 0.06783948516845703, 0.06789376068115234, 0.06812601470947266, 0.06855305480957032, 0.0681961898803711, 0.06821939086914063, 0.06771302032470704, 0.06819126129150391, 0.06815433502197266, 0.0683371810913086, 0.06844022369384765, 0.06835753631591797, 0.06803753662109376, 0.06783190155029296, 0.06777641296386719, 0.07040419006347656, 0.06790902709960937, 0.06822105407714844, 0.06869868469238281, 0.06794425964355469, 0.06805027008056641, 0.06848915100097656, 0.06801097869873046, 0.06802764892578125, 0.06804531097412109, 0.06842982482910157, 0.06871392059326172, 0.06797721862792969, 0.06810854339599609, 0.06822537231445312, 0.06806047821044922, 0.06810873413085937, 0.06883958435058593, 0.06895340728759766, 0.06808998107910157, 0.06823958587646485, 0.06805728149414063, 0.06798115539550781, 0.06803314971923828, 0.06763085174560547, 0.06840918731689453, 0.06812627410888672, 0.06790569305419922, 0.06816947174072266, 0.06822370910644532, 0.06804271697998047, 0.06811033630371094, 0.06894041442871093, 0.06875341033935548, 0.06900326538085938, 0.07260163116455078, 0.06875132751464844, 0.0682577896118164, 0.06836803436279297, 0.06846876525878906, 0.06833388519287109, 0.06840882873535156, 0.06861443328857422, 0.06852207946777343, 0.06875788879394532, 0.06874700927734376, 0.06838601684570313, 0.06861692810058594, 0.06895136260986329, 0.0685575714111328, 0.06838214111328125, 0.06840582275390625, 0.0682220458984375, 0.06831552124023438, 0.06873894500732422, 0.06848566436767578, 0.06855078125, 0.06832259368896484, 0.06850633239746094, 0.06896009826660156, 0.0693018569946289, 0.0687432632446289, 0.06910569763183594, 0.06856159973144531, 0.0694883804321289, 0.06842777252197266, 0.06905852508544921, 0.06844547271728516, 0.06817779541015626, 0.06822592163085937, 0.06833356475830078, 0.06825596618652344, 0.068497314453125, 0.06835391998291016, 0.06860140991210938, 0.0686434555053711, 0.0684071044921875, 0.06838665771484374, 0.06855632019042969, 0.06802848052978516, 0.06830137634277343, 0.06877337646484374, 0.06850214385986328, 0.06846451568603516, 0.06843801879882813, 0.06836803436279297, 0.06825814056396484, 0.0684791030883789, 0.06898390197753906, 0.06855343627929687, 0.06890694427490235, 0.06900278472900391, 0.0694974365234375, 0.0691568603515625, 0.0692630386352539]",tokens/s,14.647739051767378,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,2226.610176,7362.9696,0.0,6960.447488,6722.822144,s,1,15.5586279296875,15.5586279296875,0.0,15.5586279296875,15.5586279296875,15.5586279296875,15.5586279296875,[15.5586279296875],,kWh,0.00024386599877502705,2.688329447947475e-05,8.306978867794745e-05,0.0003538190819324492,,MB,1776.050176,7929.20064,0.0,7514.095616,7161.40544,s,10,9.931001525878905,0.9931001525878905,0.005119391927292511,0.9944275512695313,0.9973939392089843,0.9980315032958984,0.9985415545654297,"[0.9795491943359375, 0.9907459716796875, 0.99339013671875, 0.9909696655273438, 0.9954287109375, 0.9934263916015625, 0.995573974609375, 0.9972522583007812, 0.9959961547851562, 0.9986690673828125]",tokens/s,257.7786332354266,kWh,2.882967302726601e-05,3.1785697068974195e-06,1.913473752999813e-05,5.114298026416156e-05,tokens/kWh,5005574.541759585,MB,1776.050176,7931.297792,0.0,7516.192768,7161.408,s,10,48.2109404296875,4.82109404296875,0.010479613122296498,4.8224638671874995,4.830592822265625,4.832172875976562,4.833436918945313,"[4.80013525390625, 4.80412451171875, 4.81830224609375, 4.82221923828125, 4.82218896484375, 4.82270849609375, 4.82813037109375, 4.83024169921875, 4.82913671875, 4.8337529296875]",tokens/s,13.067573343001134,kWh,0.00014136180406357278,1.5594097937506917e-05,9.36259637896049e-05,0.0002505818657906846,tokens/kWh,251414.84121849824,,s,630,48.20776026916506,0.07652025439550006,0.0015963603981832533,0.07635540771484375,0.0774349624633789,0.07782741546630859,0.08724164604187012,"[0.08868131256103516, 0.07672217559814454, 0.07593778991699218, 0.0753070068359375, 0.07484210968017578, 0.07487693023681641, 0.07618345642089844, 0.07571612548828124, 0.07525603485107422, 0.07491187286376953, 0.07613648223876954, 0.07616121673583984, 0.07580985260009766, 0.07581587219238281, 0.075611328125, 0.07678034973144532, 0.07605964660644532, 0.07566438293457031, 0.07523532867431641, 0.07511571502685546, 0.07640761566162109, 0.07582310485839844, 0.0755384292602539, 0.0751103973388672, 0.0776866226196289, 0.07626153564453125, 0.07565216064453124, 0.0752239990234375, 0.07701660919189453, 0.07652825927734375, 0.07576898956298828, 0.07532784271240234, 0.07555101013183593, 0.07770732879638671, 0.07647232055664062, 0.07578009796142578, 0.07535820770263672, 0.07534182739257812, 0.07740761566162109, 0.07653190612792969, 0.07592182159423828, 0.07565315246582031, 0.07645798492431641, 0.07657408142089844, 0.07603660583496094, 0.07579801940917968, 0.075328125, 0.07752294158935547, 0.07643337249755859, 0.07576544189453124, 0.07551830291748046, 0.075287841796875, 0.0771099853515625, 0.0763586883544922, 0.07581180572509766, 0.07520384216308594, 0.0778862075805664, 0.07665849304199218, 0.07616531372070312, 0.07575766754150391, 0.0752923812866211, 0.07574332427978515, 0.0762041244506836, 0.08694652557373046, 0.07653523254394531, 0.0759031982421875, 0.07536576080322266, 0.074904541015625, 0.07644364929199218, 0.0760074234008789, 0.0754585952758789, 0.07496278381347657, 0.07675302124023438, 0.07621631622314454, 0.07563673400878906, 0.07501618957519532, 0.07694687652587891, 0.07649542236328125, 0.07582720184326172, 0.07559542083740234, 0.0751578598022461, 0.07770726776123046, 0.07640268707275391, 0.07569554901123046, 0.07534611511230468, 0.07645852661132813, 0.07606159973144531, 0.07575430297851563, 0.07531657409667969, 0.07527123260498046, 0.07798518371582032, 0.07636576080322266, 0.0759135971069336, 0.07527750396728515, 0.07632364654541016, 0.07652761840820313, 0.07603196716308594, 0.07564701080322266, 0.07516912078857421, 0.077578369140625, 0.07632876586914063, 0.0756860122680664, 0.07524781036376953, 0.07610614776611328, 0.07718025970458985, 0.07633168029785156, 0.07570614624023438, 0.07517001342773437, 0.07703721618652344, 0.07648086547851562, 0.07596620941162109, 0.07563715362548828, 0.07611785888671875, 0.07715328216552735, 0.0768174057006836, 0.07597171020507812, 0.07633395385742188, 0.07603609466552734, 0.07704370880126953, 0.07627120208740235, 0.07602764892578125, 0.07529948425292969, 0.07528425598144531, 0.07698249816894531, 0.07653177642822266, 0.07606060791015624, 0.08759439849853516, 0.07650595092773438, 0.07586544036865234, 0.07566143798828125, 0.07609986877441406, 0.0757218246459961, 0.07579535675048828, 0.07545378875732422, 0.07517183685302735, 0.07628998565673828, 0.07575411224365235, 0.07566671752929688, 0.0760975341796875, 0.07814636993408203, 0.07661571502685546, 0.07609907531738282, 0.0756204833984375, 0.07570854187011719, 0.0774758071899414, 0.07636198425292968, 0.07579033660888672, 0.07535014343261719, 0.07506253051757812, 0.07672857666015626, 0.07631501007080078, 0.07571878051757812, 0.07664937591552734, 0.07661190032958984, 0.07653782653808594, 0.07632921600341797, 0.0764767074584961, 0.07607107543945313, 0.07676723480224609, 0.07608726501464844, 0.07580006408691406, 0.07598115539550782, 0.07554886627197266, 0.07795916748046874, 0.07637811279296874, 0.07568704223632812, 0.07659369659423829, 0.07654844665527344, 0.07782556915283204, 0.07646051025390625, 0.07584294128417969, 0.07533631896972656, 0.07674470520019532, 0.0772747802734375, 0.07652210998535157, 0.0758348159790039, 0.07528681945800782, 0.07664364624023437, 0.07666963195800781, 0.07644159698486327, 0.07688604736328125, 0.07656553649902344, 0.07711126708984375, 0.07720035552978516, 0.07651058959960938, 0.07596819305419922, 0.07659529876708984, 0.0760898895263672, 0.07749382019042969, 0.08792678070068359, 0.07646752166748047, 0.07671673583984374, 0.07608246612548829, 0.0757045440673828, 0.07495526123046875, 0.0763125762939453, 0.07588864135742188, 0.07578787231445312, 0.07559824371337891, 0.0754298858642578, 0.07699456024169922, 0.07676927947998047, 0.0764559326171875, 0.0761401596069336, 0.07664873504638672, 0.07770649719238282, 0.07623152160644531, 0.07571024322509766, 0.0751569595336914, 0.07635193634033204, 0.07602528381347656, 0.07574617767333984, 0.07566336059570312, 0.07538483428955078, 0.0777072982788086, 0.07640787506103516, 0.07581788635253907, 0.07670579528808594, 0.07651673889160156, 0.07776284790039062, 0.07627606201171876, 0.07591657257080078, 0.07549411010742188, 0.07639360046386719, 0.07719615936279296, 0.07624454498291015, 0.07574489593505859, 0.07527091217041015, 0.07687583923339844, 0.07683817291259766, 0.07648751831054687, 0.07672614288330078, 0.07672608184814453, 0.07612783813476562, 0.07844131469726562, 0.07652121734619141, 0.07593164825439454, 0.07533977508544921, 0.0764067840576172, 0.0778076171875, 0.07656204986572265, 0.07602352142333985, 0.07561897277832032, 0.07641478729248047, 0.07790406036376953, 0.07654338836669922, 0.07689625549316406, 0.07645980834960937, 0.07591404724121094, 0.0776146240234375, 0.07643116760253907, 0.07598966217041016, 0.08729804992675781, 0.07684867095947266, 0.07605846405029297, 0.07564364624023437, 0.07485939025878906, 0.0762603530883789, 0.07643276977539062, 0.07581116485595703, 0.07553443145751954, 0.07497090911865234, 0.07688365173339844, 0.0759405746459961, 0.07682048034667968, 0.07668431854248046, 0.07646466827392578, 0.07796781158447266, 0.07617565155029297, 0.07564669036865235, 0.07567974090576172, 0.07628594970703124, 0.07746969604492188, 0.0762081298828125, 0.07557734680175782, 0.07524063873291016, 0.07651766204833985, 0.07681283569335938, 0.07619564819335937, 0.07658515167236328, 0.07617235565185547, 0.07687673950195313, 0.07640473937988282, 0.07581491088867187, 0.07629424285888672, 0.07587625885009766, 0.07781558227539062, 0.07632268524169922, 0.07585417938232422, 0.07540924835205078, 0.07606902313232422, 0.07732137298583984, 0.07640483093261718, 0.0759283218383789, 0.07637606048583985, 0.07683404541015625, 0.07767702484130859, 0.07643965148925781, 0.07628988647460938, 0.07588297271728515, 0.07651315307617187, 0.07672748565673829, 0.07639942169189454, 0.07576780700683594, 0.07617945861816407, 0.07706524658203125, 0.07710205078125, 0.07659232330322266, 0.07609632110595703, 0.07689356994628906, 0.07655677032470704, 0.07786921691894531, 0.07660883331298828, 0.07604294586181641, 0.07648870086669922, 0.08710355377197265, 0.07666764831542969, 0.07579449462890625, 0.07543801879882812, 0.0764211196899414, 0.07583539581298829, 0.07547084808349609, 0.0749095687866211, 0.07635161590576171, 0.07640409851074219, 0.07608179473876953, 0.07562035369873046, 0.07676927947998047, 0.07826809692382812, 0.07646176147460937, 0.07627635192871093, 0.0756365737915039, 0.0756962890625, 0.07694745635986328, 0.07619789123535156, 0.07564083099365235, 0.07573731231689453, 0.07552182769775391, 0.07732864379882813, 0.07632838439941406, 0.076648193359375, 0.0763070068359375, 0.07668498992919921, 0.07712185668945312, 0.0762060775756836, 0.07630438232421875, 0.07583055877685548, 0.07638931274414062, 0.07652738952636719, 0.07621427154541016, 0.07577766418457031, 0.07623513793945312, 0.07743711853027344, 0.07649628448486329, 0.07615846252441406, 0.07670467376708984, 0.07635295867919922, 0.07701152038574219, 0.07654393768310547, 0.0760033950805664, 0.07618537902832032, 0.07594825744628907, 0.07780044555664062, 0.0765182113647461, 0.0758458251953125, 0.07648255920410156, 0.07655564880371094, 0.07759318542480469, 0.07651856231689454, 0.07589913940429688, 0.07681292724609375, 0.07640422058105469, 0.0771478042602539, 0.07674684906005859, 0.07683968353271485, 0.07663327789306641, 0.07593651580810547, 0.07666899108886718, 0.08856982421875, 0.07659264373779297, 0.07601001739501953, 0.07618669128417968, 0.07637702178955078, 0.07643545532226563, 0.075482177734375, 0.07581136322021484, 0.07666636657714844, 0.07595919799804687, 0.07615827178955079, 0.07563142395019531, 0.07660940551757812, 0.0786145248413086, 0.07651123046875, 0.07606646728515624, 0.0763369598388672, 0.0759260482788086, 0.0772833251953125, 0.07618707275390625, 0.07574339294433594, 0.075274658203125, 0.07678771209716796, 0.07644569396972656, 0.0758733139038086, 0.07669427490234375, 0.07635785675048828, 0.07691795349121094, 0.07691756439208984, 0.07651942443847656, 0.07615497589111328, 0.07640054321289062, 0.07661516571044921, 0.07639500427246093, 0.07580467224121094, 0.07571849822998047, 0.07571062469482422, 0.07822541046142578, 0.0766402587890625, 0.07629618835449219, 0.07673241424560547, 0.07640064239501954, 0.07737139129638672, 0.07658659362792969, 0.07613075256347657, 0.07634483337402344, 0.07618198394775391, 0.0765296630859375, 0.07658892822265626, 0.07620211029052734, 0.07655769348144531, 0.07620198059082031, 0.07721430206298828, 0.07688217926025391, 0.07681001281738281, 0.07634944152832031, 0.07594188690185547, 0.07754752349853515, 0.07713177490234376, 0.07646412658691407, 0.07611289978027344, 0.0755077133178711, 0.07711382293701172, 0.08824515533447265, 0.07660374450683594, 0.07593228912353515, 0.07544831848144531, 0.07665049743652344, 0.07610163116455078, 0.07569328308105469, 0.07554873657226563, 0.07641980743408203, 0.07648265838623047, 0.07592924499511719, 0.07546867370605469, 0.0774618911743164, 0.0780800018310547, 0.07801548767089844, 0.07637913513183593, 0.07565286254882812, 0.07601996612548828, 0.07584153747558593, 0.07654310607910156, 0.07590592193603515, 0.07557526397705078, 0.07588662719726562, 0.07788748931884766, 0.07645388793945312, 0.07691059112548829, 0.07681635284423828, 0.07649897766113281, 0.07642217254638672, 0.07740310668945312, 0.07631462097167968, 0.07590911865234375, 0.07584767913818359, 0.07630438232421875, 0.07589622497558594, 0.0764012451171875, 0.07608294677734374, 0.07669376373291016, 0.07777254486083984, 0.07737983703613281, 0.07645929718017579, 0.07671046447753906, 0.07650227355957032, 0.07584246063232422, 0.07840767669677734, 0.07641423797607422, 0.07594601440429688, 0.07578860473632812, 0.07547936248779297, 0.07793682861328124, 0.07657049560546875, 0.0767586898803711, 0.07646991729736329, 0.07674336242675782, 0.07754742431640625, 0.07653376007080079, 0.0759747543334961, 0.07638822174072266, 0.07607923126220703, 0.0775946273803711, 0.07649807739257812, 0.07587107086181641, 0.07651123046875, 0.08692963409423828, 0.07681011199951172, 0.07612348937988281, 0.0759856948852539, 0.07562172698974609, 0.07671212768554687, 0.0761983642578125, 0.07567059326171875, 0.07562509155273438, 0.07553446197509765, 0.07672838592529296, 0.07603807830810547, 0.07667731475830078, 0.07671520233154297, 0.07721580505371094, 0.07764153289794921, 0.07619219207763672, 0.07562290954589844, 0.07526982116699218, 0.07682080078125, 0.07629843139648437, 0.07560934448242188, 0.07532991790771484, 0.0764306869506836, 0.07785148620605468, 0.0765807647705078, 0.07651686096191407, 0.07674896240234375, 0.07629657745361328, 0.07766022491455078, 0.07654323577880859, 0.07616162872314453, 0.07585193634033204, 0.07568179321289062, 0.07686675262451172, 0.07654892730712891, 0.07634320068359375, 0.07669065856933593, 0.07643631744384766, 0.07754550170898437, 0.07653897857666016, 0.07684780883789062, 0.07684732818603515, 0.07636780548095704, 0.07667884826660157, 0.07658512115478516, 0.07605270385742187, 0.0762669448852539, 0.0761800308227539, 0.07714406585693359, 0.07656393432617188, 0.07674470520019532, 0.07642166137695312, 0.07687987518310548, 0.07699046325683594, 0.07830323028564454, 0.07665254211425782, 0.07588992309570312, 0.07629286193847656, 0.07602537536621094, 0.07782892608642578, 0.07657170867919921, 0.07600367736816406, 0.08913116455078125, 0.0776437759399414, 0.07650726318359374, 0.07685311889648437, 0.07596620941162109, 0.07558579254150391, 0.0750018539428711, 0.0767938232421875, 0.07602178955078125, 0.07560594940185547, 0.07493023681640625, 0.07657266998291015, 0.07922412872314454, 0.07743472290039062, 0.07656944274902344, 0.07661734771728515, 0.07608480072021484, 0.07668000030517579, 0.0766088638305664, 0.07609638214111328, 0.0756794204711914, 0.07588054656982422, 0.07705804443359375, 0.07608451080322266, 0.07572767639160156, 0.07642060852050782, 0.07670416259765625, 0.07808573150634765, 0.07646249389648438, 0.0762386245727539, 0.07662351989746094, 0.07654252624511719, 0.0764067840576172, 0.07576579284667968, 0.07631254577636719, 0.07622451019287109, 0.07582297515869141, 0.07784659576416016, 0.07667696380615234, 0.07687599945068359, 0.07686348724365234, 0.07653753662109375, 0.07692320251464843, 0.0776349105834961, 0.07656825256347656, 0.07578518676757813, 0.07529004669189453, 0.07677177429199218, 0.07684288024902344, 0.07620633697509765, 0.07630028533935547, 0.07706409454345703, 0.07689430236816407, 0.07777279663085937, 0.07652352142333985, 0.07599110412597657, 0.07682656097412109, 0.07648665618896484, 0.07756185913085938, 0.07642726135253906, 0.07594393920898437, 0.07663804626464844, 0.07622239685058593]",tokens/s,13.068435382237924,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1902.579712,7362.9696,0.0,6960.447488,6722.822144,s,1,16.066189453125,16.066189453125,0.0,16.066189453125,16.066189453125,16.066189453125,16.066189453125,[16.066189453125],,kWh,0.0002406327451207744,2.653632064823482e-05,8.177812097800996e-05,0.00034894718674701917,,MB,1527.054336,7929.20064,0.0,7514.095616,7161.536512,s,10,9.987707153320311,0.9987707153320311,0.005996558988098421,1.0004908447265626,1.0036164978027344,1.0050781463623046,1.006247465209961,"[0.9849230346679687, 0.991913330078125, 0.999428466796875, 0.9971673583984375, 0.9978992309570313, 1.00155322265625, 1.0031094970703125, 1.0032916870117188, 1.006539794921875, 1.0018815307617188]",tokens/s,256.3150842031801,kWh,2.900218172992826e-05,3.1984743765268106e-06,1.9200545663453335e-05,5.140120176990841e-05,tokens/kWh,4980428.3009948805,MB,1559.969792,7933.394944,0.0,7516.192768,7161.539072,s,10,48.511664062499996,4.85116640625,0.012437812618754778,4.855600830078124,4.86567041015625,4.866942626953125,4.867960400390625,"[4.83149267578125, 4.83238525390625, 4.8376416015625, 4.84983984375, 4.85432177734375, 4.8568798828125, 4.85734033203125, 4.85816015625, 4.86821484375, 4.8653876953125]",tokens/s,12.986567502370967,kWh,0.00014245484337715842,1.5713788079402335e-05,9.453412865754992e-05,0.00025270276011411065,tokens/kWh,249304.75619479452,,s,630,48.50866378021242,0.07699787901621016,0.0015863415168456893,0.07678633499145507,0.07799099731445314,0.07865159873962402,0.08724041122436524,"[0.0884225311279297, 0.07730995178222656, 0.0763449935913086, 0.07599549102783203, 0.07677776336669923, 0.0761374740600586, 0.07585782623291015, 0.07561708831787109, 0.0754892807006836, 0.07755264282226562, 0.07677970886230469, 0.07617314910888671, 0.07626783752441406, 0.07572342681884765, 0.07741990661621094, 0.07663196563720703, 0.07629692840576172, 0.07577347564697266, 0.07572322845458984, 0.07747910308837891, 0.07660832214355469, 0.07615283203125, 0.07562239837646484, 0.07567504119873047, 0.07774678039550781, 0.07681024169921875, 0.07625462341308593, 0.07574137878417969, 0.07569039916992187, 0.07766220855712891, 0.07687168121337891, 0.07640175628662109, 0.07568067169189453, 0.07571651458740235, 0.07747312164306641, 0.07779555511474609, 0.07667139434814453, 0.07633926391601563, 0.07603311920166016, 0.07705257415771484, 0.07684883117675781, 0.07648944091796875, 0.07619983673095704, 0.07589888000488282, 0.07583539581298829, 0.07754956817626953, 0.07688092803955078, 0.07653587341308593, 0.07591574096679687, 0.07725279998779297, 0.07683302307128906, 0.07660476684570312, 0.07642384338378906, 0.07618764495849609, 0.07601356506347656, 0.07729663848876953, 0.07691379547119141, 0.07649238586425781, 0.0761747817993164, 0.07589974212646484, 0.07746089935302734, 0.0770709457397461, 0.07665654754638672, 0.0870830078125, 0.07697612762451173, 0.07624861145019532, 0.075855712890625, 0.07554112243652343, 0.07566146850585938, 0.07691452789306641, 0.07644732666015625, 0.07591088104248046, 0.07550422668457031, 0.0769291229248047, 0.07658828735351562, 0.07627442932128906, 0.07616457366943359, 0.07587689971923828, 0.07710514831542968, 0.07658700561523438, 0.07626137542724609, 0.07583948516845704, 0.07562035369873046, 0.07705123138427734, 0.07678966522216797, 0.07630899047851562, 0.07613056182861327, 0.07568915557861328, 0.07710189056396484, 0.07712092590332031, 0.07669734191894531, 0.0762663345336914, 0.07584563446044922, 0.0769269790649414, 0.07757727813720704, 0.07678662109375, 0.07659043121337891, 0.07595279693603516, 0.07593574523925781, 0.07759053039550781, 0.07841382598876953, 0.07635968017578125, 0.07594303894042968, 0.07585459136962891, 0.07720134735107421, 0.07787474822998047, 0.07679974365234375, 0.07639129638671875, 0.0758292465209961, 0.0759705581665039, 0.07788953399658204, 0.07694960021972656, 0.07649270629882812, 0.07585587310791016, 0.07581696319580078, 0.07735657501220704, 0.07702985382080078, 0.07647545623779296, 0.07704262542724609, 0.07661363220214844, 0.07626751708984375, 0.07804083251953126, 0.07704342651367188, 0.0768067169189453, 0.07602754974365235, 0.07599136352539063, 0.08730470275878906, 0.07690854644775391, 0.07625727844238281, 0.07600454711914062, 0.07551625823974609, 0.079636962890625, 0.07640870666503906, 0.07620182037353515, 0.07608505249023438, 0.07606114959716796, 0.07571459197998047, 0.07706777954101562, 0.07668169403076172, 0.07619766235351562, 0.07582508850097656, 0.07548777770996094, 0.07773702239990235, 0.07679865264892578, 0.07618128204345703, 0.07555644989013671, 0.07593638610839844, 0.07870256042480468, 0.07706342315673828, 0.07634409332275391, 0.07579622650146485, 0.07565974426269531, 0.07708191680908204, 0.07682915496826172, 0.07644086456298828, 0.07615151977539063, 0.07576576232910157, 0.07684300994873047, 0.07657062530517578, 0.076328125, 0.07608812713623046, 0.0770927963256836, 0.0765440673828125, 0.07781990051269531, 0.07684710693359376, 0.07635939025878906, 0.0758786849975586, 0.07587747192382813, 0.07903324890136719, 0.07728153228759765, 0.07633049774169921, 0.07592889404296875, 0.07587462615966797, 0.0785551986694336, 0.07713849639892578, 0.07659315490722657, 0.07596851348876953, 0.07681433868408204, 0.07656578826904296, 0.07800905609130859, 0.0770723876953125, 0.07661772918701172, 0.07585382080078125, 0.07585804748535156, 0.07768857574462891, 0.07735334777832031, 0.07678540802001953, 0.07639180755615234, 0.07595878601074219, 0.08681472015380859, 0.07695929718017579, 0.07632121276855469, 0.07590294647216797, 0.07558147430419922, 0.0770355224609375, 0.07664844512939453, 0.076025634765625, 0.07576137542724609, 0.07554303741455078, 0.07745897674560547, 0.07674723052978516, 0.07614179229736329, 0.07671024322509766, 0.07678406524658203, 0.07816000366210937, 0.07772045135498047, 0.07659616088867187, 0.07611746978759766, 0.07558000183105469, 0.07718431854248047, 0.07758509063720703, 0.07679151916503907, 0.07639068603515625, 0.07576774597167969, 0.07589008331298829, 0.07829161834716797, 0.07693318176269531, 0.07647225952148437, 0.07739801788330078, 0.07697756958007812, 0.07626541137695313, 0.07768460845947266, 0.07682537841796876, 0.07629618835449219, 0.07582710266113281, 0.0770499496459961, 0.07762902069091797, 0.07720703887939454, 0.07661660766601562, 0.0762060775756836, 0.0768511962890625, 0.07747379302978516, 0.0784910430908203, 0.07702121734619141, 0.07651561737060547, 0.07599443054199219, 0.07583197021484375, 0.07790326690673828, 0.07718800354003906, 0.07669145965576171, 0.07708265686035157, 0.07661769866943359, 0.07623474884033203, 0.07860633850097656, 0.07745536041259765, 0.07690396881103516, 0.07711698913574219, 0.07666371154785157, 0.07630233764648438, 0.07862831878662109, 0.07726886749267578, 0.07677808380126953, 0.08860489654541015, 0.07716579437255859, 0.07632323455810547, 0.07718310546875, 0.07648461151123047, 0.07606259155273437, 0.07555280303955078, 0.07676322937011719, 0.07757766723632813, 0.07652816009521485, 0.07609104156494141, 0.07551529693603516, 0.07688438415527343, 0.0785306854248047, 0.07689017486572265, 0.07616681671142578, 0.07602249908447266, 0.07648051452636719, 0.0770723876953125, 0.07708207702636719, 0.07661811065673828, 0.07607516479492188, 0.0764927978515625, 0.07652137756347656, 0.07743004608154297, 0.07679264068603515, 0.07702342224121093, 0.07709062194824219, 0.07603609466552734, 0.07806156921386719, 0.07690377807617188, 0.07631529235839844, 0.07597465515136718, 0.0759928970336914, 0.07745555114746094, 0.07686547088623047, 0.07707654571533203, 0.07692031860351563, 0.07643142700195313, 0.07658950042724609, 0.07879235076904297, 0.07723811340332032, 0.07687324523925781, 0.07660643005371094, 0.07650669097900391, 0.07665299224853515, 0.07846502685546874, 0.0770389404296875, 0.0764463653564453, 0.07693516540527344, 0.07664435577392578, 0.07724237060546875, 0.0790296630859375, 0.07724838256835938, 0.07650521850585937, 0.07697004699707032, 0.07693949127197265, 0.076429443359375, 0.07894643402099609, 0.077242431640625, 0.07666022491455078, 0.07607961273193359, 0.07688150024414063, 0.08782438659667968, 0.07761859130859375, 0.07732899475097656, 0.0766578598022461, 0.07632978820800781, 0.07566751861572266, 0.07550355529785156, 0.07683277130126953, 0.07638015747070312, 0.07604185485839844, 0.07571433258056641, 0.07714262390136718, 0.07914701080322266, 0.07704950714111328, 0.07716899108886718, 0.07689539337158204, 0.0763031997680664, 0.0772669448852539, 0.07762242889404297, 0.07656944274902344, 0.07617536163330078, 0.07560765075683594, 0.07569190216064453, 0.07746819305419922, 0.0769454116821289, 0.07719017791748047, 0.07730480194091797, 0.07680818939208985, 0.07734627532958985, 0.07723001861572265, 0.07723683166503906, 0.07684915161132813, 0.07630242919921874, 0.07604214477539062, 0.07784563446044922, 0.07682310485839844, 0.07645011138916015, 0.07640882873535157, 0.07695539093017578, 0.07750272369384766, 0.0786534423828125, 0.07723190307617188, 0.07651862335205079, 0.07649718475341796, 0.07706419372558594, 0.07730249786376953, 0.07832563018798828, 0.07706208038330079, 0.07656034851074218, 0.07613871765136719, 0.07603734588623047, 0.07855628967285157, 0.07855068969726563, 0.07703072357177734, 0.07649702453613282, 0.07607353973388672, 0.07661158752441406, 0.07813120269775391, 0.07837081909179687, 0.07712358093261719, 0.07653517150878907, 0.07630006408691406, 0.07614502716064453, 0.0876445083618164, 0.07744921875, 0.07702294158935546, 0.07635382080078125, 0.07599129486083984, 0.07655801391601562, 0.07688521575927734, 0.07704239654541016, 0.07647456359863282, 0.07607315063476562, 0.07551155090332032, 0.07675289916992188, 0.07798899078369141, 0.07740268707275391, 0.07683103942871093, 0.07635558319091797, 0.07659910583496093, 0.07761023712158203, 0.0770241928100586, 0.07665634918212891, 0.0763680648803711, 0.07603968048095704, 0.07708029174804687, 0.07649983978271484, 0.07630403137207031, 0.07771376037597656, 0.07715020751953125, 0.076621826171875, 0.07762124633789062, 0.07710514831542968, 0.07664844512939453, 0.07610572814941406, 0.07667097473144531, 0.077412353515625, 0.0783946533203125, 0.07699971008300781, 0.07629901123046876, 0.07586720275878907, 0.07698419189453125, 0.07790982055664063, 0.0776909408569336, 0.07687181091308594, 0.07641043090820313, 0.07601139068603516, 0.0769454116821289, 0.07841273498535156, 0.07718643188476562, 0.07670604705810546, 0.07701913452148437, 0.07657437133789062, 0.07704201507568359, 0.07858585357666016, 0.0773359375, 0.07663270568847656, 0.07672329711914062, 0.07652793884277344, 0.07651286315917968, 0.07934639739990235, 0.07734054565429688, 0.07689398193359374, 0.07652585601806641, 0.07700310516357421, 0.07672978973388672, 0.08698470306396484, 0.0774103012084961, 0.07649075317382813, 0.07603711700439453, 0.07545049285888672, 0.07683360290527344, 0.0775285415649414, 0.07652413177490235, 0.07606665802001954, 0.07551606750488281, 0.07559986877441406, 0.07736118316650391, 0.07853052520751953, 0.07754946899414063, 0.07714530944824219, 0.07671427154541016, 0.07617151641845703, 0.07762262725830078, 0.07700377655029297, 0.07627142333984376, 0.07591919708251953, 0.07558937835693359, 0.07819644927978515, 0.0769578857421875, 0.07636038208007813, 0.07756390380859375, 0.0770334701538086, 0.07721756744384765, 0.07820438385009766, 0.07690930938720703, 0.07648255920410156, 0.07607465362548828, 0.0757476806640625, 0.07766204833984375, 0.07684317016601562, 0.07640268707275391, 0.0764211196899414, 0.07754342651367188, 0.0772894744873047, 0.07864934539794922, 0.07737689971923828, 0.07679859161376953, 0.07686508941650391, 0.07681664276123047, 0.07635334777832031, 0.07896511840820312, 0.07704713439941406, 0.07637586975097656, 0.07600418853759766, 0.07678675079345704, 0.0774701156616211, 0.07952438354492188, 0.07784150695800782, 0.07691702270507812, 0.07652210998535157, 0.07609910583496093, 0.07700054168701172, 0.07884015655517577, 0.0778584976196289, 0.0772655029296875, 0.07657791900634765, 0.07592623901367188, 0.0767484130859375, 0.088609375, 0.07684124755859376, 0.07692694091796876, 0.07628803253173828, 0.07604608154296875, 0.07655449676513672, 0.07636093139648438, 0.07707430267333984, 0.07664937591552734, 0.07635670471191407, 0.07588444519042968, 0.07583795166015625, 0.07933561706542969, 0.07913299560546876, 0.07682048034667968, 0.07634944152832031, 0.07686573028564453, 0.07669229125976562, 0.07708735656738282, 0.07824422454833985, 0.07672739410400391, 0.076223388671875, 0.07600128173828125, 0.07563878631591797, 0.07875993347167969, 0.0775167999267578, 0.0768532485961914, 0.07719936370849609, 0.07684703826904297, 0.07638224029541016, 0.0794974365234375, 0.07777852630615234, 0.07685507202148438, 0.07633689880371093, 0.07585353851318359, 0.07581999969482422, 0.0786903076171875, 0.07701238250732421, 0.07678604888916016, 0.07759808349609375, 0.07691897583007813, 0.07640335845947266, 0.07884185791015624, 0.07714985656738281, 0.07665708923339844, 0.07752870178222657, 0.07695516967773437, 0.07644351959228515, 0.07783309173583984, 0.07729081726074219, 0.0767227554321289, 0.07647245025634766, 0.0770703353881836, 0.07667916870117188, 0.07942963409423828, 0.0773240966796875, 0.07702127838134766, 0.07668547058105468, 0.07685472106933594, 0.07667913818359375, 0.07882339477539063, 0.07868431854248047, 0.07712400054931641, 0.08898012542724609, 0.07724556732177734, 0.07629872131347656, 0.07613868713378906, 0.077082275390625, 0.07686589050292969, 0.07648198699951173, 0.07606739044189453, 0.0763125762939453, 0.07710307312011719, 0.07702256011962891, 0.07647711944580078, 0.07704370880126953, 0.07732633972167968, 0.07682252502441406, 0.077412353515625, 0.07670374298095703, 0.07628950500488281, 0.07657881927490234, 0.07630902099609375, 0.07734188842773437, 0.07704045104980468, 0.07690239715576172, 0.07632646179199219, 0.07692127990722657, 0.0771624984741211, 0.07830118560791016, 0.07692082977294921, 0.0764559326171875, 0.07622860717773437, 0.07674838256835938, 0.0770604476928711, 0.07801222229003907, 0.07686579132080078, 0.07632691192626953, 0.0760442886352539, 0.07704370880126953, 0.07878575897216797, 0.0785927963256836, 0.07682844543457032, 0.07684323120117187, 0.07652732849121094, 0.07649842834472656, 0.07820390319824219, 0.07765379333496093, 0.07698432159423828, 0.07712153625488281, 0.07669462585449219, 0.07665961456298828, 0.07834009552001953, 0.07777629089355469, 0.07709552001953125, 0.07735091400146485, 0.07707965087890625, 0.07648963165283203, 0.07781558227539062, 0.0784754867553711, 0.07729561614990234, 0.07654370880126952, 0.07694537353515625, 0.07664672088623047, 0.07765196990966797, 0.07791603088378907]",tokens/s,12.9873707273089,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.13 GiB is free. Process 567417 has 13.61 GiB memory in use. Of the allocated memory 13.36 GiB is allocated by PyTorch, and 136.95 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1173, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 68.12 MiB is free. Process 508337 has 14.67 GiB memory in use. Of the allocated memory 14.27 GiB is allocated by PyTorch, and 293.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,2582.818816,4626.18624,0.0,4223.664128,4030.321664,s,1,12.5106005859375,12.5106005859375,0.0,12.5106005859375,12.5106005859375,12.5106005859375,12.5106005859375,[12.5106005859375],,kWh,0.00015521636474171221,1.711438013303066e-05,5.171115248001068e-05,0.00022404189735475357,,MB,2354.212864,5028.839424,0.0,4613.7344,4385.215488,s,10,4.5934455261230465,0.45934455261230467,0.0008305202456782502,0.4592939453125,0.460198779296875,0.46042984924316405,0.4606147052001953,"[0.45979345703125, 0.45859808349609377, 0.459170166015625, 0.45749017333984376, 0.4593348388671875, 0.4591549377441406, 0.4592530517578125, 0.4601474304199219, 0.4606609191894531, 0.45984246826171876]",tokens/s,557.3158504745974,kWh,1.3671175511742398e-05,1.5076822423530126e-06,9.044047639273558e-06,2.422290539336897e-05,tokens/kWh,10568509.26190217,MB,2387.12832,5033.033728,0.0,4615.831552,4385.218048,s,10,46.93081201171874,4.693081201171875,0.01984113759550168,4.685891357421875,4.7142654785156255,4.723721850585938,4.731286948242188,"[4.7121640625, 4.71120458984375, 4.70286962890625, 4.73317822265625, 4.671486328125, 4.6679462890625, 4.68562060546875, 4.6809072265625, 4.686162109375, 4.67927294921875]",tokens/s,13.42401661072234,kWh,0.00013743562804283504,1.5159832540801457e-05,7.346279614392693e-05,0.00022605825672756348,tokens/kWh,278689.22335327533,,s,630,46.92798857116696,0.07448887074788411,0.000949850986280024,0.07431006622314454,0.07516897506713867,0.0757241886138916,0.07897708831787112,"[0.0762449951171875, 0.07549465942382813, 0.07464832305908203, 0.0746250228881836, 0.07463324737548828, 0.07499884796142578, 0.0748554916381836, 0.07441391754150391, 0.07525965118408204, 0.07435628509521484, 0.07478521728515625, 0.0746846694946289, 0.07477814483642578, 0.07448793792724609, 0.07455289459228516, 0.07442256164550781, 0.07435724639892578, 0.07426048278808593, 0.07416384124755859, 0.07484454345703125, 0.07414169311523437, 0.07439155578613281, 0.07514454650878906, 0.07418537902832031, 0.07435596466064454, 0.07452134704589844, 0.07478681945800782, 0.07462854766845703, 0.07682038116455078, 0.07413833618164062, 0.0750219497680664, 0.07477279663085938, 0.0751308822631836, 0.07497452545166015, 0.0745305633544922, 0.0746666259765625, 0.07473535919189453, 0.075102783203125, 0.07469261169433594, 0.07419084930419922, 0.07460166168212891, 0.07477945709228516, 0.07495065307617188, 0.0747496337890625, 0.07536048126220703, 0.0748109130859375, 0.07516374206542968, 0.07484169769287109, 0.07494541168212891, 0.0748031997680664, 0.07488102722167969, 0.0752266845703125, 0.07556095886230468, 0.07485814666748047, 0.07561500549316406, 0.07500508880615234, 0.07468025970458984, 0.0746589126586914, 0.07456956481933594, 0.07481954956054687, 0.07453660583496094, 0.07433792114257813, 0.07431053161621094, 0.07696189117431641, 0.07457494354248047, 0.07440608215332031, 0.07448649597167968, 0.07548662567138673, 0.07425440216064454, 0.07438384246826171, 0.07423011016845703, 0.07550924682617187, 0.07473379516601562, 0.07441613006591796, 0.07514112091064454, 0.07521804809570312, 0.07425318145751954, 0.07433401489257813, 0.07421279907226562, 0.07455001831054688, 0.07483731079101562, 0.07462351989746094, 0.07452044677734375, 0.07464070129394532, 0.07457686614990235, 0.07531724548339844, 0.07437497711181641, 0.07452281951904297, 0.07483392333984375, 0.0743403549194336, 0.07439462280273437, 0.07555174255371094, 0.07548931121826172, 0.07456764984130859, 0.076042236328125, 0.07455280303955078, 0.0745927963256836, 0.0743232650756836, 0.07492668914794921, 0.07487619018554688, 0.07451321411132812, 0.074519775390625, 0.074982177734375, 0.07444480133056641, 0.07427251434326172, 0.07430169677734375, 0.07434444427490235, 0.07460470581054687, 0.07461856079101563, 0.07812828826904297, 0.07477532958984374, 0.07489353942871094, 0.07493401336669922, 0.07511196899414062, 0.0747547836303711, 0.07465142059326171, 0.07471945953369141, 0.07444070434570313, 0.07445708465576172, 0.07441788482666016, 0.07461917114257813, 0.07470626831054687, 0.07450691223144532, 0.074676513671875, 0.07443609619140625, 0.0750409927368164, 0.07488102722167969, 0.07442227172851562, 0.07456368255615234, 0.07445891571044921, 0.07443878173828125, 0.07426457977294922, 0.07510345458984374, 0.07477721405029297, 0.07499177551269531, 0.07504067230224609, 0.07475990295410156, 0.07481996917724609, 0.0745656967163086, 0.07447273254394532, 0.07443510437011719, 0.07442240142822265, 0.07469007873535156, 0.07439817810058594, 0.07460969543457031, 0.07440819549560547, 0.07502413177490234, 0.07436592102050782, 0.07493212890625, 0.0748661117553711, 0.07537542724609375, 0.0741456298828125, 0.07423900604248047, 0.07437206268310546, 0.07430454254150391, 0.07480973052978515, 0.07465225219726562, 0.07461827087402344, 0.07469340515136719, 0.07516550445556641, 0.07437926483154297, 0.0744120330810547, 0.07500800323486329, 0.07458211517333985, 0.07435254669189453, 0.07421721649169923, 0.07483158111572266, 0.07422825622558593, 0.07409868621826173, 0.07453900909423829, 0.07422566223144532, 0.07423932647705078, 0.07499635314941407, 0.07420880126953125, 0.07413510131835938, 0.07468275451660156, 0.08027398681640625, 0.07456732940673828, 0.07416867065429687, 0.07406358337402344, 0.07424224090576172, 0.0739922866821289, 0.07444601440429688, 0.07430329895019532, 0.07407289886474609, 0.07429872131347656, 0.07510514831542969, 0.07432128143310547, 0.0755074234008789, 0.07534320068359375, 0.07511865234375, 0.07719586944580079, 0.07460044860839844, 0.07559919738769531, 0.07490582275390625, 0.07488585662841797, 0.07503024291992187, 0.0759903335571289, 0.07493702697753907, 0.07497856140136719, 0.0750456314086914, 0.07531110382080078, 0.07472937774658203, 0.0750340805053711, 0.07525196838378906, 0.07485382080078125, 0.0749372787475586, 0.07565516662597656, 0.07500521850585938, 0.07515824127197265, 0.07536582183837891, 0.07496527862548828, 0.07495286560058594, 0.07696806335449219, 0.07487798309326171, 0.07465673828125, 0.07458611297607422, 0.07611392211914063, 0.07469004821777343, 0.07448220825195312, 0.07517398071289062, 0.07467167663574219, 0.07476461029052735, 0.07540531158447265, 0.07492556762695313, 0.07497315216064453, 0.0750980453491211, 0.07460924530029298, 0.07468166351318359, 0.075257568359375, 0.07552105712890625, 0.07968966674804688, 0.07477862548828125, 0.0752315216064453, 0.07484124755859375, 0.07451209259033204, 0.07579529571533203, 0.07535411071777344, 0.07428857421875, 0.0748283233642578, 0.0749090576171875, 0.07524976348876954, 0.07452934265136718, 0.07473133087158203, 0.07464569854736328, 0.0745344009399414, 0.0744372787475586, 0.07510368347167969, 0.07482144165039062, 0.07444159698486329, 0.07526793670654297, 0.07460441589355468, 0.0751033935546875, 0.07446144104003906, 0.07417689514160156, 0.07440201568603516, 0.07432630157470703, 0.07415321350097656, 0.07427645111083984, 0.07402912139892578, 0.0745951385498047, 0.07444457244873047, 0.0736954574584961, 0.07353343963623046, 0.07363744354248047, 0.07363200378417968, 0.07358902740478515, 0.07424809265136718, 0.0736312026977539, 0.07369305419921875, 0.07516841888427735, 0.07575552368164062, 0.07418902587890625, 0.07333660888671875, 0.07855104064941407, 0.07376902770996094, 0.07371539306640625, 0.07329312133789062, 0.07667750549316406, 0.07407878112792969, 0.07362681579589844, 0.07357113647460937, 0.07376611328125, 0.07398300933837891, 0.0740943374633789, 0.07380989074707031, 0.07363372802734375, 0.07421056365966797, 0.07389910125732421, 0.07361714935302735, 0.07381606292724609, 0.07387308502197265, 0.0742669448852539, 0.0738590087890625, 0.07332633972167969, 0.07350685119628907, 0.07410857391357421, 0.07415360260009765, 0.07404441833496093, 0.07341260528564453, 0.0734411849975586, 0.07322742462158204, 0.07358048248291016, 0.0732968978881836, 0.07407615661621093, 0.07412940979003907, 0.07363961791992188, 0.07400685119628907, 0.07359410858154297, 0.07349263763427734, 0.07686726379394532, 0.0784925765991211, 0.07383586883544922, 0.07343170928955078, 0.0733655014038086, 0.07492876434326172, 0.07399337768554687, 0.07387532806396484, 0.07415312194824218, 0.07437497711181641, 0.07376399993896485, 0.07369216156005859, 0.07368688201904297, 0.07407548522949219, 0.07380854034423828, 0.07364224243164062, 0.07453670501708984, 0.07408172607421876, 0.07367324829101562, 0.07377645111083984, 0.07448649597167968, 0.07412249755859375, 0.07411334228515624, 0.07356371307373047, 0.07352790069580079, 0.07696985626220704, 0.07394656372070313, 0.073681884765625, 0.07384678649902343, 0.07368646240234375, 0.07441056060791015, 0.07385292816162109, 0.07384268951416016, 0.07389389038085938, 0.07381600189208984, 0.07402828979492188, 0.07472803497314454, 0.07368931579589844, 0.07374982452392578, 0.0735239028930664, 0.07356620788574218, 0.07363913726806641, 0.073934814453125, 0.0739254379272461, 0.07776461029052735, 0.07381977844238281, 0.07362175750732422, 0.07365225219726562, 0.07796745300292969, 0.07394713592529296, 0.07418019104003906, 0.07383462524414063, 0.0736702117919922, 0.07365500640869141, 0.07359101104736328, 0.07374636840820313, 0.07411043548583984, 0.07463922882080078, 0.07382022094726562, 0.07419334411621094, 0.07369436645507813, 0.07410160064697266, 0.07390966033935546, 0.07413410949707032, 0.07382128143310547, 0.07360809326171874, 0.07378125, 0.0738015365600586, 0.07492607879638671, 0.0742318115234375, 0.07456358337402344, 0.07422361755371094, 0.07450418853759766, 0.07388326263427734, 0.07413184356689453, 0.07433773040771484, 0.07382691192626953, 0.07441382598876953, 0.07467849731445313, 0.07404541015625, 0.07464969635009766, 0.07368694305419922, 0.07391439819335938, 0.07472742462158204, 0.07513497924804688, 0.07404147338867187, 0.07403711700439453, 0.07398579406738282, 0.07414419555664062, 0.07413081359863281, 0.07402508544921875, 0.07429971313476562, 0.07482787322998047, 0.0741817626953125, 0.0742633285522461, 0.07426780700683594, 0.07541814422607422, 0.07400611114501954, 0.07384502410888671, 0.07367276763916016, 0.0743509750366211, 0.07389727783203125, 0.07388025665283203, 0.07374147033691406, 0.07408726501464843, 0.0742047348022461, 0.07491248321533203, 0.07439718627929688, 0.07416035461425781, 0.07497872161865235, 0.07425494384765625, 0.0748031997680664, 0.0739144287109375, 0.07377913665771485, 0.07382550048828125, 0.07381065368652344, 0.07373139190673828, 0.07390489959716796, 0.07398194885253906, 0.07434393310546875, 0.07435110473632812, 0.07342694091796875, 0.0736522216796875, 0.07915110778808594, 0.07773184204101563, 0.07351091003417969, 0.07345680236816406, 0.07323487854003906, 0.07378371429443359, 0.07370082855224609, 0.0793477783203125, 0.0756858901977539, 0.07396761322021485, 0.07409868621826173, 0.07407615661621093, 0.0741905288696289, 0.0739865951538086, 0.07390332794189453, 0.07449068450927734, 0.07398374176025391, 0.07455926513671875, 0.07431926727294921, 0.07403791809082032, 0.07434255981445312, 0.07400764465332031, 0.07398492431640626, 0.07475730895996094, 0.07787398529052734, 0.07419036865234375, 0.07396739196777344, 0.07401542663574219, 0.07385088348388671, 0.07418899536132813, 0.07392237091064453, 0.07479705810546874, 0.07416425323486328, 0.07390409851074219, 0.07395532989501953, 0.07476019287109376, 0.07389532470703125, 0.07362416076660157, 0.07386112213134766, 0.07501824188232421, 0.07383586883544922, 0.07384130859375, 0.0740772476196289, 0.08226620483398438, 0.0767025604248047, 0.07429497528076172, 0.07400224304199218, 0.07373056030273438, 0.07401062774658203, 0.07386112213134766, 0.0735880355834961, 0.07350345611572266, 0.07385494232177735, 0.07377046203613281, 0.07357839965820312, 0.0737635498046875, 0.07382582092285156, 0.07366694641113282, 0.07437506866455078, 0.07393920135498047, 0.07412057495117187, 0.07461321258544922, 0.07416627502441406, 0.07403929901123046, 0.07357151794433593, 0.07416700744628907, 0.07403488159179687, 0.07434896087646484, 0.07341875457763672, 0.07363744354248047, 0.07363833618164063, 0.07461065673828125, 0.07384422302246094, 0.0736526107788086, 0.07418879699707032, 0.07364559936523438, 0.07432630157470703, 0.07445113372802735, 0.07396556854248047, 0.07439993286132812, 0.07418630218505859, 0.07400678253173829, 0.0741928939819336, 0.07542355346679687, 0.07381011199951172, 0.07366646575927735, 0.073668701171875, 0.07354386901855468, 0.0740137939453125, 0.07380652618408202, 0.07399222564697265, 0.07393689727783204, 0.07413065338134765, 0.07458076477050782, 0.07389183807373047, 0.07425129699707031, 0.07398294067382813, 0.074136962890625, 0.07388838195800781, 0.07425638580322266, 0.07368399810791015, 0.0736993637084961, 0.07392726135253906, 0.07404377746582032, 0.07430960083007812, 0.07410591888427734, 0.07453196716308594, 0.07438224029541016, 0.07420317077636719, 0.07597724914550781, 0.07953587341308593, 0.0744161605834961, 0.07436678314208985, 0.07420800018310547, 0.07396947479248046, 0.07405996704101563, 0.07426662445068359, 0.07440528106689454, 0.07405423736572266, 0.07470230102539062, 0.07478326416015625, 0.07450326538085937, 0.0778920669555664, 0.07787974548339843, 0.07415193939208985, 0.07409458923339844, 0.07422156524658204, 0.0741030044555664, 0.07411711883544922, 0.07401187133789063, 0.07423571014404297, 0.0741421127319336, 0.07388511657714844, 0.0745337905883789, 0.07471564483642579, 0.07416156768798828, 0.07383721923828125, 0.07389904022216796, 0.07400672149658204, 0.07412754821777344, 0.07388979339599609, 0.07447142028808594, 0.07375933074951171, 0.07387526702880859, 0.07407225799560548, 0.07394438171386719, 0.07917033386230468, 0.07389759826660157, 0.07407030487060547, 0.07388972473144531, 0.07357881927490234, 0.07388102722167969, 0.07367302703857422, 0.07397990417480468, 0.07435043334960938, 0.0741640625, 0.07379385375976563, 0.07427891540527344, 0.07458815765380859, 0.07470694732666015, 0.07373824310302735, 0.07367884826660157, 0.07390207672119141, 0.07382777404785157, 0.0739203872680664, 0.07397650909423828, 0.07808345794677735, 0.07418511962890625, 0.07474752044677735, 0.07402761840820313, 0.07434444427490235, 0.07437651062011719, 0.07439635467529297, 0.07424409484863281, 0.07402496337890625, 0.0740282211303711, 0.07392339324951172, 0.0741049575805664, 0.07437606048583985, 0.07408332824707031, 0.07404297637939453, 0.07402742767333985, 0.07496812438964844, 0.07407302093505859, 0.07433561706542968, 0.07427340698242188, 0.07424205017089844, 0.07411241912841797, 0.07451503753662109, 0.07483503723144531, 0.07408914947509766, 0.0739268798828125, 0.0741580810546875, 0.07413145446777344, 0.07407206726074218, 0.07439177703857422, 0.07401654052734374]",tokens/s,13.424824271864873,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,14764.527616,10150.084608,0.0,9747.562496,9611.730944,s,1,33.00772265625,33.00772265625,0.0,33.00772265625,33.00772265625,33.00772265625,33.00772265625,[33.00772265625],,kWh,0.0007506120465666451,8.278632638392777e-05,0.00025792909523197904,0.0010913274681825518,,MB,5056.622592,10535.960576,0.0,10112.466944,9989.955584,s,10,1.3040770111083981,0.13040770111083982,0.0007863629769333038,0.1306118850708008,0.13111258239746093,0.13113518676757813,0.13115327026367188,"[0.13017738342285157, 0.13060476684570313, 0.13110755920410155, 0.13095603942871092, 0.13115779113769532, 0.13097958374023438, 0.12936029052734374, 0.13061900329589843, 0.12860243225097656, 0.13051216125488282]",tokens/s,1963.0742495982902,kWh,3.82578182802993e-06,4.2191391339585085e-07,2.5282018782603386e-06,6.775897619686119e-06,tokens/kWh,37780972.25912022,MB,5060.886528,10540.15488,0.0,10114.564096,9989.958144,s,10,78.35289404296874,7.835289404296875,0.0429330536046556,7.846109863281249,7.874216650390625,7.883861987304687,7.891578256835937,"[7.8644130859375, 7.89350732421875, 7.8425986328125, 7.84962109375, 7.8720732421875, 7.80279541015625, 7.7684638671875, 7.7576123046875, 7.83447021484375, 7.8673388671875]",tokens/s,8.04054537736038,kWh,0.00022912569818030253,2.527366168440728e-05,0.0001079054420241364,0.00036230480188884616,tokens/kWh,173886.73755234462,,s,630,78.35011090087886,0.12436525539822048,0.0013079940196362436,0.12438577651977539,0.12536883850097658,0.12621311111450195,0.12855764724731447,"[0.12502079772949218, 0.12456947326660156, 0.12466300964355469, 0.12464800262451171, 0.12456790161132812, 0.12517574310302734, 0.12477241516113281, 0.1248727035522461, 0.12447138977050781, 0.12328950500488281, 0.12377497863769531, 0.12453273773193359, 0.12476416015625, 0.12460403442382813, 0.12420464324951172, 0.12501683044433592, 0.12453890991210938, 0.12476825714111328, 0.12458930969238281, 0.1249178237915039, 0.12465631866455078, 0.12500582122802734, 0.12507750701904297, 0.12500291442871095, 0.12683164978027345, 0.12731168365478515, 0.12433833312988281, 0.12493193817138672, 0.12489673614501953, 0.12505142211914064, 0.1248358383178711, 0.12432383728027344, 0.12433817291259766, 0.1244056625366211, 0.1247927017211914, 0.12492412567138672, 0.12419071960449218, 0.124999267578125, 0.12547650909423827, 0.12460467529296874, 0.12430777740478516, 0.1247418212890625, 0.12432160186767578, 0.12457593536376953, 0.12419087982177734, 0.1246431655883789, 0.1294285430908203, 0.12531721496582032, 0.12453727722167969, 0.12462525177001953, 0.1268060836791992, 0.12434226989746094, 0.12446310424804688, 0.12451360321044921, 0.12508338928222656, 0.1255516128540039, 0.12445484924316406, 0.12453638458251953, 0.1243465576171875, 0.12417462158203126, 0.12464739227294921, 0.12427587127685547, 0.1245059814453125, 0.127859619140625, 0.12504064178466798, 0.12456345367431641, 0.12438227081298828, 0.12468482971191407, 0.12433859252929688, 0.12486166381835938, 0.1254101791381836, 0.12452365112304688, 0.12692355346679687, 0.12536831665039064, 0.12446514892578125, 0.12461011505126954, 0.12489513397216796, 0.12532323455810546, 0.1255818862915039, 0.12471705627441407, 0.12447331237792969, 0.12441190338134765, 0.12481455993652343, 0.1250414733886719, 0.12540108489990234, 0.12537353515625, 0.12495059204101562, 0.12444464111328125, 0.1245921630859375, 0.12454998779296875, 0.12482559967041015, 0.12540108489990234, 0.1252616958618164, 0.12515955352783203, 0.1252147216796875, 0.12548483276367187, 0.12618978881835938, 0.12549244689941405, 0.12501481628417968, 0.12489933013916016, 0.1250273895263672, 0.12508870697021485, 0.12631651306152344, 0.12508982086181641, 0.1285877685546875, 0.1245462417602539, 0.1250742416381836, 0.12468006134033204, 0.12502438354492187, 0.12531712341308593, 0.12529254150390626, 0.12489321899414063, 0.12505455780029298, 0.12455683135986328, 0.12469110107421875, 0.12482694244384765, 0.12457254028320312, 0.12811453247070312, 0.12526403045654297, 0.12480921936035157, 0.12484754943847656, 0.1250655975341797, 0.12472265625, 0.1250535659790039, 0.12497232055664062, 0.1332027587890625, 0.12497917175292969, 0.12425148773193359, 0.1242581787109375, 0.12390393829345703, 0.1241809310913086, 0.12416639709472656, 0.1249936981201172, 0.12454707336425781, 0.12420703887939454, 0.12429459381103515, 0.12402889251708984, 0.12425078582763673, 0.12417542266845703, 0.12409292602539063, 0.12408415985107422, 0.12469229125976562, 0.12761158752441407, 0.12472115325927734, 0.12411494445800782, 0.12415289306640626, 0.1240987548828125, 0.12403174591064453, 0.12428060913085938, 0.12468780517578125, 0.12366633605957031, 0.12473638153076172, 0.12368281555175781, 0.12392652893066407, 0.12380678558349609, 0.12419084930419921, 0.12382291412353516, 0.12445081329345703, 0.12380569458007812, 0.12395315551757813, 0.12373401641845704, 0.12375682830810547, 0.12369257354736328, 0.12397996520996094, 0.1243156509399414, 0.12503040313720704, 0.12511436462402345, 0.12414157104492188, 0.123991455078125, 0.12438604736328125, 0.12384435272216797, 0.12421539306640625, 0.12451372528076173, 0.12494496154785156, 0.12405939483642578, 0.12447068786621093, 0.12437999725341797, 0.12434579467773438, 0.124652099609375, 0.12588963317871094, 0.1252545623779297, 0.12514918518066406, 0.12601139068603515, 0.1293695068359375, 0.12390573120117188, 0.12433500671386719, 0.12421324920654297, 0.12491728210449218, 0.12485577392578125, 0.1254578857421875, 0.12443907165527343, 0.12378675079345704, 0.12387737274169922, 0.12435507202148438, 0.12425625610351562, 0.12478585815429688, 0.12486275482177735, 0.12446112060546875, 0.1256530227661133, 0.12457619476318359, 0.12444630432128906, 0.124279296875, 0.12458553314208984, 0.13551036071777345, 0.12522418975830077, 0.12434304046630859, 0.1242767333984375, 0.12414771270751954, 0.1240821762084961, 0.1238814697265625, 0.12384255981445312, 0.1251205139160156, 0.1260212173461914, 0.12437328338623047, 0.12454720306396484, 0.12397264099121094, 0.12435964965820312, 0.12408831787109376, 0.12417183685302734, 0.12411949157714844, 0.12451158142089844, 0.12848390197753906, 0.1238521957397461, 0.12373004913330078, 0.12354412841796875, 0.12473139190673828, 0.12394806671142578, 0.12431459045410156, 0.12395315551757813, 0.12392038726806641, 0.12485951995849609, 0.12455731201171875, 0.12463398742675781, 0.12406774139404297, 0.12430710601806641, 0.1266336669921875, 0.12455398559570313, 0.12410880279541016, 0.12419379425048828, 0.12357734680175782, 0.12409037017822265, 0.12374614715576172, 0.12435008239746094, 0.1238369598388672, 0.12425775909423828, 0.124799072265625, 0.12433657836914062, 0.1236644515991211, 0.12429631805419922, 0.12367935943603516, 0.12395539093017578, 0.12395110321044922, 0.12506028747558592, 0.12381062316894531, 0.123608642578125, 0.12395359802246093, 0.12400755310058593, 0.12387213134765625, 0.12412313842773437, 0.12393062591552734, 0.1254993896484375, 0.12553932952880859, 0.1263439712524414, 0.12458415985107422, 0.12443852996826171, 0.12463308715820312, 0.12471398162841797, 0.12448249816894531, 0.12455328369140625, 0.12462454223632813, 0.12429507446289062, 0.12430790710449219, 0.12472525024414062, 0.1247984619140625, 0.12471552276611328, 0.12565503692626953, 0.12451634979248047, 0.12471481323242188, 0.1240855712890625, 0.12461350250244141, 0.12466381072998046, 0.12458745574951172, 0.13558840942382813, 0.12511811065673828, 0.12454249572753906, 0.12520291137695314, 0.12428905487060547, 0.12492422485351562, 0.12606854248046875, 0.12483324432373047, 0.12497574615478516, 0.12456124877929688, 0.12470297241210937, 0.12508342742919923, 0.12413974761962891, 0.12447449493408203, 0.12462911987304688, 0.12433888244628906, 0.12451545715332031, 0.12454598236083984, 0.12687769317626954, 0.12505088043212892, 0.12452998352050781, 0.1248795166015625, 0.1255564193725586, 0.12493859100341796, 0.1252103042602539, 0.1252007064819336, 0.12554048156738282, 0.12497398376464844, 0.12476905822753906, 0.1250750045776367, 0.12535443115234374, 0.12509756469726563, 0.12475148773193359, 0.12547891235351563, 0.12519423675537109, 0.1251102752685547, 0.1253191680908203, 0.1253006057739258, 0.12562854766845702, 0.12562201690673827, 0.1252209243774414, 0.12443852996826171, 0.12513836669921874, 0.12523702239990234, 0.12489212799072266, 0.12510617828369142, 0.12487010955810547, 0.12525161743164062, 0.12355570983886718, 0.12303218841552735, 0.12313600158691407, 0.12283058929443359, 0.12296198272705078, 0.12309481811523437, 0.12274524688720703, 0.12286566162109375, 0.12382617950439453, 0.12391375732421875, 0.1227432632446289, 0.12673664093017578, 0.12312754821777344, 0.1233317108154297, 0.12272025299072266, 0.12330278778076172, 0.12360089874267578, 0.1235775375366211, 0.123744384765625, 0.12314214324951171, 0.12330818939208985, 0.12301366424560548, 0.12333798217773438, 0.12650764465332032, 0.12382425689697266, 0.12376268768310547, 0.12302492523193359, 0.1228210906982422, 0.12290662384033203, 0.12281855773925782, 0.12265676879882813, 0.12472319793701171, 0.12687913513183594, 0.12308541107177734, 0.12302950286865234, 0.12405350494384766, 0.12359270477294922, 0.12313906860351563, 0.12330290985107421, 0.12300492858886719, 0.12389702606201172, 0.12308563232421875, 0.12275507354736329, 0.12311122894287109, 0.12302355194091796, 0.12282675170898437, 0.122611328125, 0.12261743927001953, 0.12450796508789062, 0.12420841979980468, 0.12272322845458984, 0.12239644622802734, 0.12243762969970703, 0.12270409393310547, 0.1226743392944336, 0.12282966613769532, 0.12293119812011719, 0.12274073791503906, 0.12362681579589843, 0.12249362945556641, 0.12286975860595703, 0.12309056091308594, 0.1268637466430664, 0.12319744110107422, 0.12359490966796875, 0.1236744613647461, 0.12304998779296875, 0.12288204956054688, 0.1228448028564453, 0.12339647674560547, 0.12284873962402344, 0.12377289581298828, 0.12389177703857422, 0.12310530853271484, 0.12300077056884766, 0.12372022247314453, 0.12429312133789062, 0.12336678314208985, 0.12266547393798828, 0.1236911392211914, 0.12405718231201172, 0.12364022064208985, 0.12410800170898438, 0.12396797180175781, 0.1236824951171875, 0.12320012664794922, 0.12325590515136718, 0.12414617919921875, 0.12373206329345703, 0.12452076721191406, 0.12292038726806641, 0.12795279693603515, 0.12256294250488281, 0.12271849822998047, 0.12267520141601562, 0.12314828491210937, 0.12332032012939453, 0.12261376190185547, 0.12260147094726563, 0.12293257904052735, 0.12265673828125, 0.12279264068603515, 0.12274278259277344, 0.12314947509765625, 0.12317782592773438, 0.12239008331298828, 0.12273478698730468, 0.12266316986083985, 0.122650146484375, 0.1229909439086914, 0.1223904037475586, 0.12644297790527342, 0.12235667419433593, 0.1224785919189453, 0.12284912109375, 0.12287709045410156, 0.12364288330078126, 0.12303327941894532, 0.12238060760498047, 0.12295168304443359, 0.12272844696044923, 0.1226783676147461, 0.12272322845458984, 0.12336236572265626, 0.12270687866210937, 0.12248818969726563, 0.12277619171142579, 0.12316409301757812, 0.12326969909667969, 0.12428854370117187, 0.12292070770263672, 0.12332889556884766, 0.12296556854248047, 0.12325494384765626, 0.12290643310546875, 0.12387820434570312, 0.12300902557373047, 0.12240652465820312, 0.12308723449707032, 0.12324396514892579, 0.1230335693359375, 0.12307107543945313, 0.12252774047851563, 0.12294969940185548, 0.12669872283935546, 0.12226351928710938, 0.12304665374755859, 0.12257215881347656, 0.12281648254394531, 0.12264412689208984, 0.12296089935302734, 0.1231994857788086, 0.12295126342773438, 0.12332054138183594, 0.12271635437011719, 0.12229222106933593, 0.12363951873779297, 0.12299849700927734, 0.12283347320556641, 0.12332563018798828, 0.12224979400634765, 0.12363766479492187, 0.12257520294189453, 0.12321353912353515, 0.12321772766113281, 0.12297468566894532, 0.12292505645751953, 0.12298649597167968, 0.12280124664306641, 0.12412611389160157, 0.12289555358886718, 0.12520326232910156, 0.12318924713134766, 0.12326486206054688, 0.1234329605102539, 0.12252365112304688, 0.12385420989990234, 0.1230035171508789, 0.12352291107177735, 0.12315564727783203, 0.12333360290527344, 0.1237834243774414, 0.1237870101928711, 0.12371456146240234, 0.12295680236816406, 0.12267667388916016, 0.1232099838256836, 0.12360940551757812, 0.1237176284790039, 0.12318515014648437, 0.1276269760131836, 0.12582736206054687, 0.12350425720214844, 0.12333299255371094, 0.12361254119873047, 0.12438182067871094, 0.12323206329345703, 0.12299040222167969, 0.12332479858398437, 0.12440774536132812, 0.12487407684326172, 0.12483577728271485, 0.12451100921630859, 0.12448684692382812, 0.12468921661376953, 0.12443427276611328, 0.1244051513671875, 0.12443520355224609, 0.12494825744628907, 0.12623219299316407, 0.12450057220458985, 0.1244610595703125, 0.12459417724609376, 0.12411494445800782, 0.12468224334716797, 0.12490547180175782, 0.12483174133300781, 0.12496896362304688, 0.12603187561035156, 0.12487474822998047, 0.12501811218261719, 0.12463104248046875, 0.1254905242919922, 0.12484060668945313, 0.12511968231201173, 0.12438550567626953, 0.12472102355957031, 0.12462921905517578, 0.1251677474975586, 0.12503062438964843, 0.12516754913330078, 0.12461456298828125, 0.1251487045288086, 0.1241126708984375, 0.12539971160888672, 0.12459574127197266, 0.12457849884033204, 0.12628585815429688, 0.12460073852539062, 0.12483174133300781, 0.1248290557861328, 0.12506380462646485, 0.12454707336425781, 0.12520652770996094, 0.12471910095214844, 0.12461001586914063, 0.12506716918945313, 0.12596083068847655, 0.12454710388183594, 0.12468803405761719, 0.12810595703125, 0.1253302688598633, 0.1245709457397461, 0.12424006652832031, 0.12449795532226562, 0.12444652557373047, 0.12403718566894531, 0.12418236541748047, 0.1244793930053711, 0.12468899536132813, 0.12408979034423828, 0.12448880004882812, 0.1288858184814453, 0.12497897338867188, 0.12450905609130859, 0.12428697967529297, 0.12430531311035156, 0.12477654266357421, 0.12420416259765625, 0.12739017486572266, 0.12833628845214845, 0.12486450958251953, 0.12440902709960938, 0.12474147033691406, 0.12562940979003906, 0.1248473892211914, 0.12455599975585938, 0.1243457260131836, 0.12461734771728515, 0.12512655639648437, 0.12432598114013672, 0.12498262023925781, 0.12452275085449219, 0.12469475555419922, 0.12423590087890625, 0.12456550598144531, 0.12423379516601563, 0.12453683471679687, 0.12586313629150392, 0.12458882904052734, 0.12413951873779297, 0.12492185974121094, 0.12460237121582031, 0.12416802978515624, 0.12513910675048828, 0.12448767852783203, 0.12389778900146485, 0.12428601837158203, 0.12408729553222657, 0.12383846282958984]",tokens/s,8.040830992530642,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,14764.07296,10150.084608,0.0,9747.562496,9611.730944,s,1,33.15367578125,33.15367578125,0.0,33.15367578125,33.15367578125,33.15367578125,33.15367578125,[33.15367578125],,kWh,0.0007568983909082968,8.348434744159867e-05,0.00026100854214000657,0.001101391280489902,,MB,3915.055104,10514.989056,0.0,10091.495424,9989.955584,s,10,1.245292938232422,0.12452929382324218,0.0012170905094748274,0.12440388870239258,0.1251871162414551,0.1264851749420166,0.12752362190246583,"[0.12439920043945313, 0.12444028472900391, 0.12438365173339844, 0.12454233551025391, 0.12440857696533203, 0.12489865875244141, 0.12412953948974609, 0.12294931030273437, 0.12778323364257813, 0.12335814666748048]",tokens/s,2055.7412006476834,kWh,3.6459944179525947e-06,4.019716594609737e-07,2.4074367681976217e-06,6.45540284561119e-06,tokens/kWh,39656704.02336637,MB,3915.055104,10517.086208,0.0,10093.592576,9989.958144,s,10,75.30547900390624,7.530547900390625,0.03694728337539499,7.540749755859375,7.563253271484375,7.579426684570313,7.592365415039063,"[7.59560009765625, 7.5596591796875, 7.53667138671875, 7.5565517578125, 7.544828125, 7.55194140625, 7.5084033203125, 7.4729228515625, 7.4875234375, 7.49137744140625]",tokens/s,8.365925140285219,kWh,0.00021797785446787325,2.4044112212092088e-05,0.00010387834270480004,0.00034590030938476537,tokens/kWh,182133.40170771972,,s,630,75.30266870117181,0.11952804555741568,0.0012873347932934084,0.11934614562988281,0.12068046951293945,0.12131256561279297,0.12491760063171388,"[0.12112989044189452, 0.12136863708496094, 0.12097334289550782, 0.12020931243896485, 0.11989142608642578, 0.12003699493408203, 0.12011408233642579, 0.12008448028564453, 0.12500991821289062, 0.12086067199707032, 0.12154812622070313, 0.11984284973144531, 0.11993260955810547, 0.11999311828613281, 0.12013510131835937, 0.11975548553466797, 0.11983446502685546, 0.12111670684814453, 0.12115980529785156, 0.1270305633544922, 0.12007689666748046, 0.12007135772705078, 0.1197352294921875, 0.1229332504272461, 0.11997388458251954, 0.12113919830322266, 0.12087500762939453, 0.12153395080566406, 0.11980582427978516, 0.11991219329833984, 0.11974642944335938, 0.1194582748413086, 0.11936822509765625, 0.11974246215820313, 0.11989606475830078, 0.12079488372802734, 0.11972383880615234, 0.12304185485839844, 0.12124403381347656, 0.12051046752929688, 0.12026025390625, 0.1205098876953125, 0.12040499114990234, 0.12101417541503906, 0.12068045043945312, 0.12010454559326172, 0.12016403198242187, 0.12012413024902344, 0.12072297668457031, 0.12037100982666016, 0.1221368637084961, 0.12027532958984374, 0.12053497314453125, 0.11946604919433594, 0.11969126129150391, 0.12021965026855469, 0.11959081268310547, 0.11984905242919922, 0.1202640609741211, 0.11988956451416016, 0.12040281677246094, 0.1197323226928711, 0.1192959976196289, 0.11934076690673828, 0.11919945526123046, 0.11942707061767578, 0.11927597045898437, 0.11986511993408203, 0.11971663665771484, 0.11903778839111329, 0.11920396423339844, 0.11927977752685547, 0.11914141082763671, 0.11928352355957031, 0.11943369293212891, 0.11986752319335937, 0.11977059173583984, 0.11911264038085938, 0.12028108978271485, 0.11944140625, 0.1192438735961914, 0.11923548889160156, 0.11952326202392578, 0.11974435424804687, 0.12121315002441406, 0.12024012756347656, 0.12009085083007813, 0.11986934661865234, 0.11974028778076172, 0.11948441314697265, 0.12067145538330078, 0.1269542694091797, 0.12255590057373048, 0.12087276458740234, 0.11968895721435546, 0.11915792083740234, 0.11956816101074219, 0.11896012878417969, 0.119144287109375, 0.11967689514160157, 0.11979206085205078, 0.1295721893310547, 0.12012924957275391, 0.1201789779663086, 0.11976131439208984, 0.11922637176513672, 0.11941273498535156, 0.11982185363769532, 0.11934467315673829, 0.12006813049316406, 0.12000758361816406, 0.1196292495727539, 0.1198331527709961, 0.11915583801269532, 0.11925740814208985, 0.11975676727294922, 0.11976150512695312, 0.11993497467041016, 0.11951718139648437, 0.11928550720214844, 0.12283519744873046, 0.11959500885009766, 0.11924889373779297, 0.11917660522460938, 0.11937792205810546, 0.11938262176513671, 0.11993084716796874, 0.11991030120849609, 0.1198679656982422, 0.12020098876953125, 0.11936396789550781, 0.11941238403320313, 0.11920416259765625, 0.12091600036621093, 0.11981120300292969, 0.11925373077392579, 0.12031606292724609, 0.11917926025390625, 0.1187265625, 0.11928543853759765, 0.11898003387451171, 0.1191388168334961, 0.11963868713378906, 0.1191401596069336, 0.11925708770751953, 0.11968511962890625, 0.12074310302734376, 0.1198656005859375, 0.11979203033447265, 0.11910160064697266, 0.11939215850830077, 0.119370849609375, 0.11934207916259766, 0.11960934448242187, 0.1184869155883789, 0.11877798461914063, 0.11905580902099609, 0.11868358612060546, 0.12033235168457031, 0.11946451568603515, 0.12005078125, 0.11892582702636718, 0.11954377746582032, 0.11879203033447265, 0.11895849609375, 0.1190704345703125, 0.11926563262939453, 0.11887619018554688, 0.11955824279785156, 0.11899852752685547, 0.11897277069091797, 0.11921833801269531, 0.11910553741455078, 0.12469158172607422, 0.11890777587890625, 0.1204112319946289, 0.11991337585449219, 0.11906195068359375, 0.11865555572509766, 0.11935743713378906, 0.11907276916503906, 0.11998822021484375, 0.11891506958007812, 0.12446924591064454, 0.12446883392333985, 0.11924041748046875, 0.11871711730957031, 0.1187154541015625, 0.1192407989501953, 0.12394003295898437, 0.1191739501953125, 0.11964620971679688, 0.11958271789550781, 0.12034864044189453, 0.11946339416503907, 0.12012947082519532, 0.11947891235351563, 0.11954537963867187, 0.11961808013916016, 0.11940812683105469, 0.11901996612548828, 0.12502559661865234, 0.1210532455444336, 0.1199151382446289, 0.12117810821533204, 0.11941232299804687, 0.11961740875244141, 0.11966038513183594, 0.11956294250488281, 0.11996160125732422, 0.11964415740966797, 0.11954752349853516, 0.1204532470703125, 0.11956483459472657, 0.11943907165527344, 0.11948204803466797, 0.1218663330078125, 0.11956243133544922, 0.11956227111816406, 0.11948265838623047, 0.11944866943359375, 0.12009331512451171, 0.11952668762207032, 0.11954659271240234, 0.12104672241210937, 0.11919734191894531, 0.12036918640136719, 0.11933097839355469, 0.1192760009765625, 0.11926732635498047, 0.12084611511230468, 0.11949430084228516, 0.11978809356689453, 0.11933900451660157, 0.119609375, 0.11979769897460937, 0.11952649688720703, 0.11940959930419921, 0.11970127868652344, 0.12007193756103515, 0.1195810546875, 0.11982857513427735, 0.11937792205810546, 0.12119039916992187, 0.11925872039794921, 0.11934761810302734, 0.1193524169921875, 0.12015100860595704, 0.11955181121826172, 0.11987161254882812, 0.11997532653808594, 0.12074454498291015, 0.1200479965209961, 0.11989810943603516, 0.11980332946777343, 0.12413996887207031, 0.12120195007324219, 0.120432861328125, 0.12017862701416016, 0.11979436492919922, 0.11980745697021485, 0.11967526245117187, 0.11985049438476562, 0.12112694549560547, 0.12195244598388671, 0.12357868957519531, 0.11947840118408203, 0.11981414031982422, 0.11933062744140625, 0.11939155578613281, 0.11918630218505859, 0.11986886596679687, 0.11995177459716796, 0.12110160064697266, 0.11921292877197266, 0.11958271789550781, 0.11931414031982422, 0.1192957763671875, 0.11937868499755859, 0.11933213043212891, 0.1193108139038086, 0.11971695709228515, 0.1189889907836914, 0.12226223754882813, 0.11969318389892578, 0.11940406036376953, 0.11907952117919922, 0.11946598052978516, 0.11922022247314454, 0.11962358093261719, 0.11914214324951172, 0.11894153594970704, 0.11953202819824219, 0.11990835571289063, 0.1192816619873047, 0.11908470153808594, 0.11919599914550781, 0.11902735900878907, 0.11998448181152344, 0.1187732162475586, 0.11887836456298828, 0.11949708557128906, 0.11984502410888671, 0.1191258544921875, 0.1190868148803711, 0.11872633361816406, 0.11947837066650391, 0.11964672088623046, 0.11884912109375, 0.11947869110107422, 0.11904560089111328, 0.11897705841064453, 0.1191233901977539, 0.12013407897949219, 0.11927145385742187, 0.11990863800048829, 0.11918153381347656, 0.11978546905517579, 0.11923046112060547, 0.11993724822998048, 0.11919747161865234, 0.11930003356933594, 0.12048799896240234, 0.11935049438476562, 0.12040857696533203, 0.1199864959716797, 0.11944137573242188, 0.11910050964355469, 0.11940547180175781, 0.11988323211669921, 0.11998876953125, 0.11964620971679688, 0.11965446472167969, 0.11932870483398438, 0.11973426818847656, 0.11958041381835938, 0.11979801940917968, 0.11985052490234376, 0.12189743804931641, 0.12448153686523437, 0.11930009460449219, 0.11900109100341796, 0.1195704345703125, 0.11904115295410156, 0.1195076141357422, 0.119050048828125, 0.11935568237304688, 0.12555686187744142, 0.120368896484375, 0.11980210876464843, 0.11972329711914062, 0.1193702392578125, 0.11937404632568359, 0.11928883361816406, 0.11935641479492187, 0.11980550384521485, 0.11964665222167968, 0.11962745666503906, 0.1193636474609375, 0.11937955474853515, 0.11954582214355469, 0.11933356475830079, 0.11920918273925782, 0.11919404602050782, 0.12093440246582031, 0.11919599914550781, 0.12361702728271484, 0.11982463836669922, 0.11933213043212891, 0.11948834991455078, 0.11939315032958984, 0.11948851013183594, 0.12035481262207032, 0.12052582550048828, 0.11941580963134765, 0.11970992279052735, 0.11950450897216797, 0.11955010986328125, 0.11923046112060547, 0.11978083038330078, 0.11987404632568359, 0.11915471649169922, 0.11957453155517578, 0.11989923095703126, 0.11928896331787109, 0.11940022277832031, 0.11946189117431641, 0.11971132659912109, 0.11994294738769531, 0.11953411102294922, 0.11962995147705079, 0.11930400085449219, 0.11896768188476563, 0.11925558471679687, 0.11943523406982422, 0.11924441528320312, 0.12005010986328125, 0.11893577575683593, 0.1189027862548828, 0.11912601470947265, 0.11924639892578125, 0.11895398712158203, 0.11876131439208984, 0.11909590148925782, 0.12109004974365234, 0.11915673828125, 0.11882048034667969, 0.12008281707763672, 0.11935948944091797, 0.11908038330078125, 0.11943788909912109, 0.11940041351318359, 0.11940678405761719, 0.11952726745605469, 0.11887808227539062, 0.11817308807373048, 0.11783395385742187, 0.11851007843017577, 0.11791935729980468, 0.11808806610107422, 0.12650905609130858, 0.11930220794677734, 0.11851884460449219, 0.11816844940185547, 0.11816140747070313, 0.11868569946289062, 0.11866726684570313, 0.11817984008789062, 0.1192959976196289, 0.1187799072265625, 0.12362751770019531, 0.11845772552490234, 0.11788761901855468, 0.11767596435546875, 0.11854022216796875, 0.11836224365234375, 0.11860787200927735, 0.1178082275390625, 0.11833232116699219, 0.11816102600097657, 0.11784435272216796, 0.11875331115722657, 0.11867273712158204, 0.11825849914550782, 0.11821241760253906, 0.11860787200927735, 0.11868099212646484, 0.11826850891113282, 0.1212252197265625, 0.118287841796875, 0.11892176055908203, 0.1188823013305664, 0.11844198608398437, 0.11814911651611328, 0.11886182403564453, 0.11872051239013671, 0.11777587127685547, 0.11776255798339844, 0.11857484436035157, 0.11836009979248047, 0.11820873260498047, 0.11836732482910156, 0.11861494445800781, 0.12193791961669923, 0.11879833221435547, 0.11808767700195312, 0.1183843231201172, 0.11881094360351563, 0.11871577453613281, 0.11847254180908204, 0.11849174499511719, 0.11891935729980468, 0.11867279815673829, 0.11832176208496094, 0.11846451568603515, 0.11872278594970703, 0.11939945220947265, 0.11890969848632812, 0.11866521453857422, 0.1196472625732422, 0.1181619873046875, 0.11814940643310547, 0.11802432250976562, 0.11848294067382813, 0.11860982513427734, 0.1182229461669922, 0.11929804992675781, 0.11806707000732422, 0.11845664215087891, 0.11815910339355469, 0.11935737609863281, 0.11820575714111328, 0.11864927673339844, 0.1187100830078125, 0.11850540924072266, 0.11806531524658204, 0.11836188507080078, 0.11776435089111328, 0.1182069091796875, 0.11776358032226562, 0.11784806060791016, 0.1206806411743164, 0.11822291564941406, 0.11865523529052735, 0.11966252899169921, 0.11871033477783204, 0.11814911651611328, 0.11853619384765625, 0.11916307067871093, 0.11864044952392579, 0.11939151763916016, 0.11896063995361328, 0.12455500793457032, 0.11881865692138673, 0.11837913513183594, 0.11843513488769532, 0.1184365463256836, 0.11927142333984375, 0.11851340484619141, 0.11885903930664063, 0.11887510681152344, 0.11864678192138672, 0.11875737762451172, 0.11866521453857422, 0.11854819488525391, 0.11931881713867187, 0.11928781127929687, 0.11851945495605469, 0.11982633972167969, 0.11847519683837891, 0.11821875, 0.12180274963378906, 0.11842307281494141, 0.11834210968017578, 0.11940230560302735, 0.11841763305664063, 0.11856687927246094, 0.11931597137451172, 0.1189032974243164, 0.1185054702758789, 0.1182608642578125, 0.11808038330078124, 0.11900518035888671, 0.1188823013305664, 0.11837644958496094, 0.11917440032958984, 0.11929472351074219, 0.11853129577636719, 0.11857180786132812, 0.1184942398071289, 0.11821343994140625, 0.11917529296875, 0.11875670623779297, 0.11814982604980469, 0.11893145751953126, 0.11865846252441406, 0.11848764801025391, 0.1180603485107422, 0.11808838653564453, 0.11891302490234375, 0.11977728271484375, 0.11824127960205078, 0.11846598052978516, 0.11860566711425781, 0.11840898895263671, 0.118098876953125, 0.11824127960205078, 0.11886691284179687, 0.11886297607421875, 0.11881699371337891, 0.11865564727783204, 0.11842969512939452, 0.11921167755126953, 0.11823753356933593, 0.1183536605834961, 0.11962393951416016, 0.11923046112060547, 0.11909497833251953, 0.11965471649169922, 0.11863654327392578, 0.11902877044677734, 0.11865500640869141, 0.11840812683105469, 0.11905795288085938, 0.119257568359375, 0.11833094024658203, 0.12104338836669921, 0.12085862731933594, 0.1208440933227539, 0.11861625671386719, 0.11885363006591797, 0.11858937835693359, 0.11903187561035156, 0.11917721557617188, 0.11877171325683594, 0.12405350494384766, 0.11837993621826172, 0.11804732513427735, 0.11686093139648437, 0.11748870086669921, 0.11851462554931641, 0.11939225769042969, 0.11835801696777344, 0.11818589019775391, 0.11874655914306641, 0.1182966079711914, 0.11836873626708984, 0.11842166137695312, 0.11880191802978515, 0.11901910400390625, 0.11838556671142578, 0.11834342193603516, 0.1189480972290039, 0.11848454284667968, 0.12020079803466797, 0.11876751708984375, 0.11855353546142579, 0.11888988494873047, 0.11953993225097656, 0.11881670379638672, 0.11876396942138671, 0.11899890899658203, 0.118783935546875, 0.11912416076660157, 0.11821875, 0.11861196899414063, 0.11841741180419922, 0.11863970947265624, 0.11851868438720703, 0.118957763671875]",tokens/s,8.366237357404517,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 272.12 MiB is free. Process 515315 has 14.47 GiB memory in use. Of the allocated memory 14.35 GiB is allocated by PyTorch, and 13.83 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,2059.743232,1260.25728,0.0,857.735168,829.14304,s,1,9.665447265625,9.665447265625,0.0,9.665447265625,9.665447265625,9.665447265625,9.665447265625,[9.665447265625],,kWh,6.266970654581504e-05,6.905773002406936e-06,2.0308071802005445e-05,8.988355135022742e-05,,MB,2102.984704,1549.664256,0.0,1126.170624,1096.742912,s,10,0.9069047622680665,0.09069047622680665,0.0004917463763754971,0.09059041595458983,0.09115860748291016,0.09156173706054688,0.09188424072265625,"[0.09106902313232422, 0.09028262329101562, 0.09074384307861329, 0.0919648666381836, 0.0906478042602539, 0.09047615814208984, 0.0901659164428711, 0.09069686126708984, 0.09053302764892578, 0.09032463836669923]",tokens/s,2822.788132237534,kWh,2.665264567954871e-06,2.939271111901152e-07,1.0700791388911325e-06,4.029270818036119e-06,tokens/kWh,63535069.138086714,MB,2107.121664,1570.635776,0.0,1147.142144,1096.745472,s,10,55.7698212890625,5.57698212890625,0.008624108152857834,5.5751748046875,5.582369873046876,5.590986450195312,5.597879711914062,"[5.59960302734375, 5.568833984375, 5.57936572265625, 5.57472607421875, 5.56929638671875, 5.5706767578125, 5.580455078125, 5.57562353515625, 5.571236328125, 5.58000439453125]",tokens/s,11.296432110381438,kWh,0.00016299117683995753,1.797850302208262e-05,5.732932869170658e-05,0.0002382990085537467,tokens/kWh,264373.7394559524,,s,630,55.76738735961916,0.08851966247558593,0.0010110119969188134,0.08830302429199219,0.08912313919067383,0.0899870246887207,0.09274326499938966,"[0.08904978942871093, 0.08848528289794921, 0.08839638519287109, 0.08844697570800782, 0.08860671997070313, 0.09051270294189454, 0.08908460998535156, 0.08830902099609375, 0.08826044464111328, 0.08815705871582032, 0.08826265716552735, 0.08850800323486328, 0.08867990112304687, 0.08820124816894531, 0.08846959686279297, 0.08837308502197265, 0.08912774658203125, 0.08863350677490234, 0.08826582336425781, 0.08833084869384765, 0.08861094665527344, 0.0884471664428711, 0.08841177368164063, 0.0892010269165039, 0.0887438735961914, 0.08860063934326172, 0.08864300537109375, 0.08917049407958984, 0.08858393859863281, 0.08891209411621094, 0.08864361572265625, 0.08836914825439453, 0.08832355499267579, 0.08875433349609375, 0.08858048248291016, 0.08949513244628907, 0.08826262664794922, 0.08864403533935547, 0.08825651550292969, 0.08973516845703125, 0.09714892578125, 0.08967075347900391, 0.08812841796875, 0.08818649291992188, 0.08785279846191406, 0.08816019439697266, 0.08971321868896484, 0.09143628692626952, 0.08851283264160156, 0.08914988708496094, 0.08912294769287109, 0.09000345611572266, 0.08855923461914063, 0.08864326477050781, 0.08840467071533203, 0.08898355102539063, 0.08844493103027344, 0.08842438507080078, 0.08859654235839844, 0.0889257583618164, 0.08834502410888671, 0.08936243438720703, 0.0890408935546875, 0.08873741149902344, 0.09177308654785156, 0.08869356536865235, 0.08846428680419922, 0.08831878662109376, 0.08816844940185548, 0.0894730224609375, 0.0881070098876953, 0.08827817535400391, 0.08899251556396484, 0.08897248077392578, 0.08839055633544922, 0.08878265380859375, 0.08816368103027344, 0.08818364715576171, 0.08855779266357422, 0.08795072174072266, 0.08817910766601562, 0.0879878692626953, 0.08815030670166016, 0.0900278091430664, 0.08831619262695313, 0.08777295684814453, 0.08786511993408203, 0.08770604705810547, 0.08783817291259766, 0.08788748931884766, 0.08786732482910156, 0.08769773101806641, 0.088316162109375, 0.08854774475097656, 0.08840396881103516, 0.08874803161621093, 0.08817443084716797, 0.08784912109375, 0.08778137969970704, 0.08789167785644532, 0.08829071807861329, 0.08808710479736329, 0.08806610870361328, 0.08809394836425781, 0.08903372955322265, 0.08820272064208984, 0.0891213150024414, 0.08808038330078125, 0.08804351806640626, 0.0879508819580078, 0.08855804443359375, 0.08805084991455078, 0.08803139495849609, 0.0880544662475586, 0.0881879653930664, 0.08854589080810547, 0.08845247650146484, 0.08940233612060547, 0.08944640350341797, 0.08813715362548828, 0.0879213104248047, 0.08811504364013673, 0.08863887786865235, 0.08853584289550781, 0.08841522979736328, 0.08810147094726563, 0.08862191772460938, 0.08834457397460938, 0.08877260589599609, 0.08804147338867188, 0.08813362884521485, 0.08825631713867188, 0.08801094055175782, 0.08844076538085938, 0.08861087799072266, 0.08815001678466797, 0.08824582672119141, 0.08834297943115234, 0.0887355499267578, 0.09185504150390625, 0.08906864166259766, 0.08809747314453124, 0.08804729461669922, 0.08801907348632812, 0.08838595581054688, 0.08846726226806641, 0.08818093109130859, 0.08807628631591796, 0.08809171295166016, 0.08822265625, 0.09098838043212891, 0.092397216796875, 0.0883605728149414, 0.08809152221679688, 0.08799846649169922, 0.08864284515380859, 0.08803171539306641, 0.08803139495849609, 0.08800675201416015, 0.09250816345214843, 0.08860467529296875, 0.08976383972167969, 0.08898150634765625, 0.08831292724609376, 0.08818131256103516, 0.08803517150878906, 0.08831641387939453, 0.08793421173095703, 0.08803113555908203, 0.08813651275634765, 0.08793433380126953, 0.08796422576904298, 0.08807433319091797, 0.08863484954833985, 0.08822019195556641, 0.08805375671386718, 0.092010498046875, 0.08888451385498047, 0.08820518493652343, 0.08774310302734376, 0.08788195037841796, 0.08749260711669922, 0.08916579437255859, 0.08785923004150391, 0.08833229064941406, 0.08814380645751953, 0.08814102172851562, 0.08796358489990234, 0.08783570861816406, 0.08878034973144532, 0.08854969787597657, 0.08846768188476563, 0.08820508575439454, 0.08850153350830078, 0.08893103790283204, 0.08916547393798828, 0.08923785400390626, 0.08808857727050781, 0.09002566528320312, 0.08871910095214844, 0.08843526458740235, 0.08837120056152344, 0.08856781005859375, 0.08901222229003906, 0.08811315155029296, 0.08879513549804688, 0.08876646423339844, 0.08879216003417968, 0.08803421020507812, 0.08833023834228515, 0.08851251220703125, 0.08966963195800781, 0.08840396881103516, 0.08831795501708985, 0.08865996551513672, 0.08812134552001953, 0.08798003387451171, 0.08811296081542969, 0.0881805419921875, 0.08858048248291016, 0.08804962921142578, 0.08808041381835938, 0.08835686492919922, 0.08831795501708985, 0.08841366577148438, 0.08835497283935546, 0.0891436767578125, 0.08854137420654297, 0.09022854614257812, 0.08847974395751954, 0.08856179046630859, 0.08808230590820312, 0.08814553833007813, 0.0884361572265625, 0.08808953857421875, 0.08832514953613281, 0.08814867401123047, 0.08853327941894532, 0.08825856018066407, 0.0880125732421875, 0.08841852569580078, 0.08818694305419922, 0.08846134185791016, 0.08791439819335937, 0.08802508544921875, 0.08807804870605469, 0.08921250915527344, 0.08836483001708985, 0.08832707214355469, 0.08787731170654296, 0.08801900482177734, 0.08859468841552734, 0.08810102081298828, 0.08859648132324219, 0.0877158432006836, 0.08797571563720703, 0.08772777557373047, 0.08775862121582031, 0.0874993896484375, 0.08801411437988281, 0.08755833435058594, 0.08980361938476562, 0.09263702392578126, 0.08807987213134766, 0.09044019317626953, 0.08783872222900391, 0.08760934448242187, 0.0876953582763672, 0.08762525177001954, 0.08781846618652343, 0.0951278076171875, 0.08824208068847657, 0.08815010833740235, 0.09153456115722657, 0.08771663665771484, 0.08776089477539062, 0.08817974090576172, 0.08909923553466798, 0.08795136260986328, 0.08773334503173828, 0.0885156478881836, 0.0877709732055664, 0.08714649963378907, 0.08731759643554687, 0.08789904022216796, 0.08827423858642579, 0.08822409820556641, 0.09159715270996094, 0.08931737518310547, 0.08807379150390625, 0.08807014465332032, 0.08818323516845702, 0.08805935668945312, 0.0887175064086914, 0.08762387084960938, 0.08756034851074218, 0.08781209564208985, 0.08765644836425782, 0.08768294525146485, 0.08754598236083984, 0.08775881958007813, 0.08807782745361328, 0.08793059539794922, 0.08798271942138672, 0.08801299285888672, 0.08782745361328125, 0.08810514831542969, 0.09208019256591797, 0.08841088104248047, 0.08773779296875, 0.08772390747070312, 0.08819923400878907, 0.08859894561767578, 0.08777318572998047, 0.08780332946777344, 0.08804790496826172, 0.0876671371459961, 0.08770150756835937, 0.08809292602539062, 0.08811859130859374, 0.08820985412597657, 0.08775475311279297, 0.08871116638183593, 0.08798617553710937, 0.08889548492431641, 0.08793462371826172, 0.08797628784179687, 0.08799606323242187, 0.08829782104492187, 0.08824012756347656, 0.08808448028564453, 0.0880551986694336, 0.08774620819091797, 0.08836988830566406, 0.08858646392822266, 0.08806195068359375, 0.08818003082275391, 0.08859308624267578, 0.08825856018066407, 0.08912486267089843, 0.08835446166992188, 0.08821501159667969, 0.08824102020263672, 0.0883835220336914, 0.08825647735595703, 0.08861609649658203, 0.0884758071899414, 0.08800841522216797, 0.08856265258789063, 0.08821759796142578, 0.08820259094238281, 0.08843052673339843, 0.088463134765625, 0.08833734130859375, 0.08890163421630859, 0.08834047698974609, 0.08835446166992188, 0.0899669418334961, 0.08831382751464843, 0.08853711700439452, 0.08836710357666015, 0.0885145263671875, 0.0885607681274414, 0.08900086212158204, 0.08832937622070312, 0.0885907211303711, 0.08824854278564453, 0.08811955261230468, 0.0883482208251953, 0.08841590118408203, 0.08819168090820312, 0.0882870101928711, 0.08850409698486328, 0.08984630584716796, 0.09278665924072266, 0.08835689544677734, 0.08792063903808593, 0.08816639709472657, 0.08791516876220704, 0.08825446319580078, 0.08815609741210938, 0.09460678100585937, 0.08821619415283204, 0.08826806640625, 0.08872828674316406, 0.08806320190429688, 0.08791939544677735, 0.08819709014892578, 0.0880968017578125, 0.0880148468017578, 0.08787353515625, 0.08836914825439453, 0.08827494049072265, 0.08807424163818359, 0.08800870513916016, 0.0884858856201172, 0.08844493103027344, 0.08838054656982422, 0.09302105712890625, 0.08893443298339844, 0.0888704605102539, 0.08852249908447266, 0.0883186264038086, 0.08854937744140624, 0.08854118347167969, 0.08835481262207032, 0.08828284454345703, 0.08847183990478516, 0.08818470764160156, 0.08848191833496094, 0.08852416229248047, 0.08862169647216797, 0.08830287933349609, 0.08825520324707031, 0.08825609588623047, 0.08862313842773438, 0.08871974182128907, 0.08867430114746094, 0.0884878692626953, 0.08820947265625, 0.08912486267089843, 0.08818892669677734, 0.08809881591796875, 0.08817459106445312, 0.08838349151611329, 0.08820285034179688, 0.08849868774414063, 0.08842025756835938, 0.0885555191040039, 0.08838963317871094, 0.08846540832519531, 0.0882872314453125, 0.08829078674316407, 0.08864822387695312, 0.08843059539794922, 0.08834588623046875, 0.08842518615722657, 0.08847360229492188, 0.08909209442138671, 0.08854252624511719, 0.08962322998046875, 0.0881295394897461, 0.08830316925048828, 0.08803327941894531, 0.0886287384033203, 0.08828966522216797, 0.08812806701660156, 0.08798214721679687, 0.08845651245117188, 0.08845990753173828, 0.08989807891845703, 0.08885855865478516, 0.08824931335449218, 0.08829952239990234, 0.08830912017822265, 0.08822777557373047, 0.08843539428710938, 0.08792841339111328, 0.0882896957397461, 0.0887562255859375, 0.0886087646484375, 0.08799846649169922, 0.08809001922607422, 0.08801487731933594, 0.0879288330078125, 0.08852710723876953, 0.08834281921386719, 0.08836271667480469, 0.0883872299194336, 0.08805766296386719, 0.08858710479736329, 0.08823216247558593, 0.08850819396972656, 0.08835686492919922, 0.08838355255126953, 0.09235424041748047, 0.08808214569091796, 0.08805411529541016, 0.0885282211303711, 0.08895529937744141, 0.08825215911865235, 0.0879430389404297, 0.08807020568847657, 0.08801500701904297, 0.09035017395019532, 0.0919300765991211, 0.08808847808837891, 0.08800847625732422, 0.08766140747070313, 0.08772803497314453, 0.08783062744140625, 0.08784690856933594, 0.08803289794921874, 0.08841683197021484, 0.09114582061767579, 0.08920291137695313, 0.08868370819091796, 0.08833721923828125, 0.0880025634765625, 0.08789990234375, 0.08835871887207031, 0.08821718597412109, 0.08807721710205078, 0.0892640609741211, 0.08781414031982422, 0.08824012756347656, 0.08857395172119141, 0.08826057434082031, 0.08805583953857422, 0.09144643402099609, 0.08834489440917968, 0.08819356536865235, 0.08805773162841797, 0.08807794952392578, 0.08817462158203125, 0.0884023666381836, 0.0880513916015625, 0.08811555480957031, 0.08850431823730469, 0.08828457641601563, 0.08802057647705078, 0.08788275146484376, 0.08812911987304688, 0.08849180603027344, 0.0883348159790039, 0.0884625244140625, 0.08859677124023438, 0.08839199829101563, 0.08816473388671875, 0.08821497344970704, 0.08783641815185547, 0.0879706268310547, 0.08798137664794922, 0.08814867401123047, 0.08825651550292969, 0.08845846557617187, 0.08845126342773438, 0.08867027282714844, 0.0889219207763672, 0.08934268951416016, 0.0889156494140625, 0.08890195465087891, 0.08870285034179687, 0.08819542694091796, 0.08836886596679687, 0.08898057556152343, 0.08811619567871094, 0.08812057495117187, 0.08815058898925782, 0.08851065826416016, 0.08834457397460938, 0.08848384094238282, 0.08799846649169922, 0.08823395538330078, 0.08807206726074218, 0.08830377960205078, 0.08849747467041015, 0.08851936340332031, 0.08849203491210937, 0.0883240966796875, 0.08882585906982422, 0.08846559906005859, 0.09083020782470703, 0.08924809265136718, 0.08797602844238281, 0.08808643341064454, 0.0879999008178711, 0.08788774108886718, 0.08818701171875, 0.08800601959228516, 0.08841075134277344, 0.08814326477050781, 0.08803775787353516, 0.0884217300415039, 0.08808332824707031, 0.08833023834228515, 0.08850374603271484, 0.08823772430419922, 0.08848477172851563, 0.08805811309814453, 0.08793267059326172, 0.08814134216308593, 0.08805577850341798, 0.08817305755615235, 0.08821491241455078, 0.0883812484741211, 0.0879706268310547, 0.08828092956542968, 0.08799247741699219, 0.08809990692138672, 0.08845203399658202, 0.0880926742553711, 0.0879288330078125, 0.08841120147705078, 0.08821548461914062, 0.09230028533935547, 0.09698617553710938, 0.08870310211181641, 0.08851334381103515, 0.0886087646484375, 0.08844230651855468, 0.08857209777832031, 0.08839734649658203, 0.08847856140136719, 0.08894566345214844, 0.09423907470703125, 0.08834230041503906, 0.08848883056640625, 0.08808243560791015, 0.08820713806152344, 0.08825878143310546, 0.08790425872802735, 0.08843014526367188, 0.08817298889160156, 0.08807833862304687, 0.08813772583007813, 0.08806809234619141, 0.08804147338867188, 0.08799782562255859, 0.08798454284667968, 0.08786966705322266, 0.0923463363647461, 0.08849622344970703, 0.0882011489868164, 0.08799632263183593, 0.08793097686767579, 0.08801679992675782, 0.08795101165771485, 0.08772374725341797, 0.08771810913085938]",tokens/s,11.296925135427442,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1173, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 517014 has 14.71 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 28.11 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,5300.891648,3468.558336,0.0,3066.036224,2865.160192,s,1,12.9308017578125,12.9308017578125,0.0,12.9308017578125,12.9308017578125,12.9308017578125,12.9308017578125,[12.9308017578125],,kWh,0.00016686205284578744,1.8399048625485092e-05,5.6414489575995486e-05,0.00024167559104726802,,MB,5363.314688,3806.199808,0.0,3382.706176,3158.450176,s,10,0.9708065948486327,0.09708065948486329,0.0003177704291107483,0.09705827331542968,0.09745926132202148,0.09758540840148926,0.09768632606506347,"[0.09664073944091797, 0.09697196960449218, 0.09729833221435547, 0.09743122863769531, 0.09677117156982422, 0.09716754913330078, 0.0968969955444336, 0.09714457702636718, 0.09677247619628906, 0.09771155548095703]",tokens/s,2636.982498454445,kWh,2.8527793586169e-06,3.146088095473739e-07,1.4285610133982784e-06,4.595949181562552e-06,tokens/kWh,55701225.11950055,MB,5363.314688,3808.29696,0.0,3382.706176,3158.452736,s,10,58.78186279296875,5.878186279296875,0.013481567456978065,5.879755615234375,5.892649755859375,5.893150073242188,5.8935503271484375,"[5.84985595703125, 5.878453125, 5.893650390625, 5.86680029296875, 5.865654296875, 5.8903466796875, 5.88887548828125, 5.88105810546875, 5.8746298828125, 5.89253857421875]",tokens/s,10.717591618674563,kWh,0.00017101808527387953,1.8863943261135787e-05,6.551829809960019e-05,0.0002554003266346155,tokens/kWh,246671.5717640016,,s,630,58.779540252685486,0.09330085754394532,0.0009311101758182043,0.09317382431030273,0.09380593872070313,0.09458361701965332,0.09713524108886719,"[0.09163980865478516, 0.09148524475097657, 0.09179641723632813, 0.09160643005371094, 0.09125308990478516, 0.09203030395507812, 0.09231247711181641, 0.09165618896484375, 0.09147782135009766, 0.09144684600830078, 0.09148239898681641, 0.09136275482177734, 0.09154633331298828, 0.09171340942382812, 0.09137500762939453, 0.09165430450439453, 0.09163033294677735, 0.09267737579345703, 0.09205155181884765, 0.09182288360595703, 0.09249983978271484, 0.09192176055908204, 0.09184323120117187, 0.09189785766601563, 0.09195065307617188, 0.09250656127929688, 0.09268377685546875, 0.09330124664306641, 0.09364479827880859, 0.09357686614990235, 0.0934525146484375, 0.09337664031982422, 0.09303459167480468, 0.09336803436279296, 0.0933125762939453, 0.09327629089355469, 0.09336003112792969, 0.0934508514404297, 0.09576850891113281, 0.09321887969970703, 0.09335398101806641, 0.09327206420898437, 0.09305292510986328, 0.09318332672119141, 0.09310070037841797, 0.09329869079589843, 0.0933355484008789, 0.09312655639648437, 0.0929606704711914, 0.09383100891113282, 0.09461817932128906, 0.09319795227050781, 0.09454137420654297, 0.09383382415771484, 0.09328425598144531, 0.09320662689208985, 0.09342486572265625, 0.09332611083984375, 0.09381683349609375, 0.0936263656616211, 0.09821155548095703, 0.09349558258056641, 0.0930672607421875, 0.09329692840576172, 0.09332530975341796, 0.09288400268554688, 0.09343280029296876, 0.09291366577148437, 0.09371398162841797, 0.09989933013916015, 0.09372927856445312, 0.09317375946044922, 0.0929443817138672, 0.09296438598632813, 0.09303695678710938, 0.09293215942382813, 0.09324034881591797, 0.09317388916015625, 0.09291043090820313, 0.09292537689208985, 0.0936448974609375, 0.09363504028320313, 0.092837890625, 0.09300768280029297, 0.09307564544677735, 0.09299967956542969, 0.09287625885009766, 0.09302249908447266, 0.09670066833496094, 0.09327728271484376, 0.09294678497314453, 0.09329666900634766, 0.09356304168701172, 0.0987447052001953, 0.09319542694091797, 0.09316028594970703, 0.09285001373291016, 0.09278684997558594, 0.09246924591064454, 0.09258528137207031, 0.09263760375976562, 0.09278822326660156, 0.09275676727294922, 0.09276134490966798, 0.09255817413330078, 0.0936337890625, 0.09284674835205078, 0.09285734558105468, 0.09284915161132813, 0.09286390686035156, 0.0932212142944336, 0.09272959899902344, 0.09290137481689453, 0.09311222076416016, 0.09290767669677734, 0.09275350189208985, 0.09290131378173828, 0.09286492919921875, 0.09326310729980469, 0.0929817886352539, 0.09291350555419922, 0.09298953247070313, 0.09275215911865234, 0.09375865936279297, 0.09322169494628907, 0.09321676635742188, 0.09318425750732422, 0.09332428741455077, 0.09321568298339844, 0.09318348693847656, 0.09396896362304688, 0.09307545471191406, 0.0929925765991211, 0.09317881774902344, 0.09322086334228516, 0.09386188507080079, 0.0930183334350586, 0.09301929473876953, 0.09330547332763672, 0.09344204711914063, 0.09479167938232422, 0.09338060760498047, 0.09497805023193359, 0.09448623657226562, 0.09345200347900391, 0.09381517028808593, 0.09331238555908203, 0.0933404769897461, 0.09302611541748047, 0.0932449951171875, 0.09417177581787109, 0.0935047378540039, 0.09324214172363281, 0.09354198455810547, 0.09349100494384766, 0.09332182312011719, 0.09366918182373046, 0.09326934051513672, 0.09335689544677735, 0.09311436462402344, 0.09339289855957031, 0.0932495346069336, 0.09318399810791016, 0.09306918334960937, 0.09378131103515625, 0.09344710540771485, 0.09301564788818359, 0.09324982452392579, 0.09306031799316407, 0.09307574462890625, 0.0928936996459961, 0.09338371276855469, 0.09383420562744141, 0.09312665557861328, 0.09330483245849609, 0.09744179534912109, 0.09352393341064454, 0.09315315246582032, 0.09323741149902344, 0.09332326507568359, 0.09318105316162109, 0.09323916625976562, 0.09363081359863282, 0.09605187225341796, 0.0935599365234375, 0.09365132904052734, 0.09444403076171876, 0.09370214080810547, 0.09370559692382813, 0.09337996673583984, 0.09312921905517578, 0.09303673553466797, 0.09341744232177734, 0.09363900756835937, 0.0959447021484375, 0.09328435516357422, 0.09363251495361329, 0.09336531066894531, 0.0949441909790039, 0.09305907440185547, 0.09287065887451172, 0.09296892547607422, 0.09295030212402344, 0.09311052703857423, 0.09304796600341797, 0.09295481872558593, 0.09308943939208984, 0.09287577819824219, 0.09259008026123047, 0.09287612915039062, 0.0927872314453125, 0.09283747100830078, 0.09270722961425781, 0.09275596618652343, 0.09271478271484375, 0.09304713439941406, 0.09348678588867188, 0.0954966049194336, 0.09296479797363282, 0.09276412963867188, 0.09372262573242188, 0.09301305389404296, 0.09286752319335938, 0.09282150268554687, 0.09264064025878906, 0.09300406646728515, 0.09261408233642578, 0.09322179412841797, 0.09341542053222657, 0.09290534210205079, 0.09307571411132813, 0.09280467224121093, 0.09263136291503907, 0.09266729736328125, 0.09280899047851562, 0.09278902435302734, 0.09314905548095703, 0.09294710540771485, 0.09357686614990235, 0.09309423828125, 0.0927070083618164, 0.09270841979980468, 0.09302861022949219, 0.09276576232910157, 0.09301229095458985, 0.09274176025390625, 0.09301136016845703, 0.0925804443359375, 0.09273548889160156, 0.09418956756591797, 0.09274562835693359, 0.09284819030761719, 0.0932147216796875, 0.09235635375976563, 0.09260368347167969, 0.0928855972290039, 0.09330633544921875, 0.09285692596435546, 0.09297132873535156, 0.09322815704345704, 0.0931376953125, 0.09276995086669922, 0.09299507141113281, 0.09291053009033202, 0.09316150665283203, 0.09360323333740235, 0.09316368103027343, 0.09289315032958985, 0.0930943374633789, 0.09337446594238281, 0.09347891235351563, 0.09286860656738281, 0.09287872314453124, 0.09306681823730469, 0.0925763168334961, 0.09270390319824219, 0.09269257354736328, 0.09276934051513672, 0.09263276672363281, 0.09279488372802734, 0.0965630111694336, 0.09453936004638672, 0.09267874908447266, 0.09249795532226562, 0.09338262176513672, 0.09298035430908203, 0.09348531341552735, 0.09286924743652344, 0.0930192642211914, 0.0932279052734375, 0.09288089752197265, 0.0929106216430664, 0.09296790313720703, 0.09293209838867188, 0.09280438232421875, 0.09306185913085938, 0.09255731201171875, 0.09257097625732422, 0.09276044464111328, 0.09289552307128907, 0.0944005126953125, 0.09274368286132813, 0.09290457916259766, 0.09290544128417969, 0.09571011352539062, 0.09313881683349609, 0.0930502700805664, 0.09282768249511719, 0.09347718048095703, 0.09286489868164062, 0.09274687957763672, 0.0928358383178711, 0.09284288024902344, 0.09306931304931641, 0.09332736206054687, 0.09314608001708985, 0.09324723052978516, 0.09307366180419922, 0.09308678436279297, 0.09329350280761718, 0.09317779541015625, 0.09327417755126953, 0.09361817932128906, 0.09378406524658203, 0.09322434997558594, 0.09346109008789062, 0.09324543762207031, 0.09358716583251953, 0.09307881927490234, 0.09320108795166016, 0.09306553649902344, 0.09340633392333984, 0.09357990264892578, 0.09333990478515625, 0.09346073913574218, 0.09321036529541016, 0.09336627197265625, 0.09324339294433594, 0.09332121276855469, 0.09330995178222656, 0.09332224273681641, 0.09333689880371093, 0.09319407653808594, 0.09349600219726563, 0.09356098937988282, 0.09320038604736328, 0.09307110595703125, 0.09332761383056641, 0.09336627197265625, 0.09312406158447266, 0.09302889251708985, 0.09356428527832031, 0.09380518341064453, 0.09332444763183594, 0.09522223663330077, 0.09376131439208985, 0.09357782745361329, 0.09348915100097656, 0.09403756713867187, 0.09367801666259766, 0.09360921478271485, 0.0936148452758789, 0.09399072265625, 0.09322665405273438, 0.09281926727294922, 0.0928689956665039, 0.09373526763916015, 0.0932863998413086, 0.09319760131835937, 0.09387286376953125, 0.09340249633789062, 0.09351026916503906, 0.09319171142578125, 0.0937999038696289, 0.09353318023681641, 0.09362777709960937, 0.09777830505371093, 0.0937613754272461, 0.09247129821777343, 0.0927293472290039, 0.09312870025634766, 0.09266790771484375, 0.09306697845458985, 0.09301020812988281, 0.0932126693725586, 0.09647513580322266, 0.09717327880859375, 0.09282377624511719, 0.09308159637451172, 0.09308175659179688, 0.09338569641113281, 0.0929534683227539, 0.09291065979003907, 0.09320655822753907, 0.09329135894775391, 0.09310009765625, 0.09372467041015625, 0.0949964828491211, 0.09315042877197266, 0.09346646118164062, 0.09338566589355468, 0.09373900604248046, 0.09381273651123047, 0.09322496032714844, 0.09357926177978515, 0.09415058898925781, 0.09562079620361329, 0.09335846710205079, 0.09327410888671875, 0.09641983795166016, 0.09323519897460937, 0.09333350372314453, 0.09363865661621094, 0.09294582366943359, 0.09314546966552735, 0.09311449432373047, 0.09332895660400391, 0.09318035125732421, 0.09324681854248047, 0.0928771514892578, 0.09309200286865234, 0.09294608306884766, 0.09305942535400391, 0.09322930908203125, 0.09309571075439453, 0.09322313690185546, 0.09310569763183593, 0.09298992156982422, 0.0938270721435547, 0.0935025634765625, 0.09368576049804687, 0.09331126403808594, 0.09341747283935548, 0.09339250946044922, 0.09349785614013671, 0.09311283111572266, 0.09306931304931641, 0.09312255859375, 0.09377996826171875, 0.09335919952392578, 0.09310944366455078, 0.09278217315673828, 0.09274201965332031, 0.09317584228515625, 0.09322086334228516, 0.09330604553222656, 0.09328099060058594, 0.09336163330078125, 0.09430799865722657, 0.09319321441650391, 0.09314710235595704, 0.09290470123291016, 0.0930408935546875, 0.09305449676513672, 0.09326643371582032, 0.09322927856445312, 0.09371033477783203, 0.09380480194091798, 0.0944168930053711, 0.09577638244628907, 0.09373040008544922, 0.09324326324462891, 0.09282614135742187, 0.09317414093017579, 0.09285427093505859, 0.09345200347900391, 0.09313536071777344, 0.0935360336303711, 0.09286787414550782, 0.0933239974975586, 0.0926370849609375, 0.09324540710449218, 0.09278681945800782, 0.09339289855957031, 0.09301606750488281, 0.09301324462890626, 0.09283251190185547, 0.09319734191894531, 0.093244384765625, 0.09299517059326172, 0.09337078094482422, 0.09340672302246093, 0.093032958984375, 0.09286041259765625, 0.09303858947753907, 0.09285427093505859, 0.09368370819091797, 0.09316518402099609, 0.09344012451171875, 0.09319039916992188, 0.09303772735595703, 0.09602489471435546, 0.09448870086669922, 0.09320694732666016, 0.09318402862548827, 0.09314710235595704, 0.0930665283203125, 0.09360253143310547, 0.0936213150024414, 0.0935469741821289, 0.09295692443847656, 0.09304905700683594, 0.09387744140625, 0.09375007629394531, 0.093121826171875, 0.0927216339111328, 0.09246841430664063, 0.09291654205322265, 0.09257084655761719, 0.09269660949707031, 0.0928116455078125, 0.0970421142578125, 0.092942626953125, 0.09313859558105468, 0.09607244873046875, 0.0932159652709961, 0.09315750122070313, 0.09320925140380859, 0.093404541015625, 0.09341510772705078, 0.09308573150634766, 0.09348393249511719, 0.09401548767089844, 0.09322905731201171, 0.09316556549072266, 0.09291980743408203, 0.09368364715576172, 0.09325682830810547, 0.09349164581298829, 0.09334540557861327, 0.09305587005615235, 0.0932126693725586, 0.0931319351196289, 0.09333961486816406, 0.09594764709472656, 0.0925567398071289, 0.09261933135986328, 0.09264332580566406, 0.09268019104003906, 0.09258393859863281, 0.0931409912109375, 0.09264064025878906, 0.09279961395263672, 0.09288668823242187, 0.09325612640380859, 0.09291970825195313, 0.09290751647949219, 0.09287862396240235, 0.09281251525878906, 0.092846435546875, 0.09320105743408204, 0.09344515228271484, 0.09340386962890625, 0.09306082916259766, 0.09328662109375, 0.09323551940917969, 0.09349324798583984, 0.09293011474609375, 0.09295661163330078, 0.09302207946777344, 0.0928993911743164, 0.09310419464111327, 0.09330438232421875, 0.09291548919677735, 0.09290614318847656, 0.09445785522460938, 0.09332726287841797, 0.0930576629638672, 0.09307513427734375, 0.09276038360595704, 0.09297049713134765, 0.09325619506835937, 0.09316102600097656, 0.092770751953125, 0.09306521606445313, 0.09329567718505859, 0.09347491455078125, 0.09308246612548828, 0.09301337432861329, 0.09325798034667969, 0.09305331420898437, 0.09316326141357421, 0.09323494720458984, 0.09292646026611329, 0.09321580505371094, 0.09333036804199218, 0.09399059295654297, 0.0936492156982422, 0.09341542053222657, 0.09334070587158202, 0.09343657684326172, 0.094438720703125, 0.09392752075195313, 0.09356175994873046, 0.09345340728759766, 0.09325865936279297, 0.10311679840087891, 0.09546546936035157, 0.09350553894042969, 0.09337651062011719, 0.09335740661621093, 0.09562895965576172, 0.09410662078857422, 0.0931430435180664, 0.0931041259765625, 0.09291078186035157, 0.09305757141113281, 0.09424476623535157, 0.09341990661621094, 0.09307078552246094, 0.09306082916259766, 0.09305583953857421, 0.09297261047363281, 0.09308204650878907, 0.09334371185302734, 0.09272937774658203, 0.09301519775390625, 0.09534476470947266, 0.0928939208984375, 0.09289078521728515, 0.09526512145996094, 0.09334374237060547, 0.09313075256347657, 0.09281536102294922, 0.0930508804321289, 0.09374227142333984, 0.09287120056152344, 0.09281155395507812, 0.09302835083007813, 0.0926957778930664]",tokens/s,10.718015100011202,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.13 GiB is free. Process 505480 has 13.61 GiB memory in use. Of the allocated memory 13.36 GiB is allocated by PyTorch, and 136.95 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.13 GiB is free. Process 497568 has 13.61 GiB memory in use. Of the allocated memory 13.36 GiB is allocated by PyTorch, and 136.95 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,2060.312576,1260.25728,0.0,857.735168,829.14304,s,1,9.804876953125,9.804876953125,0.0,9.804876953125,9.804876953125,9.804876953125,9.804876953125,[9.804876953125],,kWh,6.157870766255655e-05,6.7854304800202355e-06,2.0233349520004662e-05,8.859748766258145e-05,,MB,2107.711488,1549.664256,0.0,1126.170624,1096.742912,s,10,0.9507347488403322,0.09507347488403321,0.0010660708029216037,0.09474322891235351,0.09584400253295898,0.09686872138977051,0.09768849647521972,"[0.09479023742675781, 0.09513897705078125, 0.09445881652832032, 0.09789344024658203, 0.093998046875, 0.09561628723144532, 0.09441171264648437, 0.09422029113769531, 0.09469622039794921, 0.09551071929931641]",tokens/s,2692.654289876945,kWh,2.787688506111105e-06,3.0743313740324707e-07,1.1401993248571522e-06,4.235320968371504e-06,tokens/kWh,60444061.24394225,MB,2112.2048,1574.83008,0.0,1149.239296,1096.745472,s,10,57.750103515625,5.775010351562499,0.013374371094579502,5.77382470703125,5.791785009765625,5.793366381835938,5.794631479492187,"[5.7734140625, 5.79143359375, 5.7547197265625, 5.7551787109375, 5.7884521484375, 5.7803876953125, 5.79494775390625, 5.7742353515625, 5.772080078125, 5.76525439453125]",tokens/s,10.909071354816634,kWh,0.00016955441347722166,1.870178852333888e-05,5.959843259934378e-05,0.00024785463459990435,tokens/kWh,254181.24660729792,,s,630,57.74774057006837,0.09166308026994978,0.0009411134919720976,0.0914664649963379,0.09242059631347657,0.09294890403747559,0.09573302490234376,"[0.09126719665527344, 0.09133856201171875, 0.09172569274902344, 0.09140460968017577, 0.09135923004150391, 0.09125673675537109, 0.09175663757324219, 0.09226649475097656, 0.09285404968261719, 0.0923671646118164, 0.09177897644042969, 0.09127526092529296, 0.09161027526855468, 0.09148409271240235, 0.09158953857421875, 0.09155081939697265, 0.09134786987304687, 0.09142189025878907, 0.09150479888916016, 0.09170601654052735, 0.09174835205078125, 0.09161113739013672, 0.09107036590576172, 0.09125001525878906, 0.09128012847900391, 0.09157961273193359, 0.0912166748046875, 0.09104518127441406, 0.09160758209228516, 0.09187344360351563, 0.09258735656738282, 0.09109980773925781, 0.09174835205078125, 0.0915230712890625, 0.09096995544433593, 0.09152230072021485, 0.09132450866699218, 0.09130681610107422, 0.09147801971435547, 0.0912708511352539, 0.0928458251953125, 0.09152979278564453, 0.09155583953857421, 0.09172991943359375, 0.09154115295410156, 0.09164220428466797, 0.0912384033203125, 0.09164527893066406, 0.09169782257080078, 0.09144652557373047, 0.09186994934082031, 0.09197772979736328, 0.09234432220458984, 0.09168486022949218, 0.09135001373291016, 0.09204434967041016, 0.09157215881347656, 0.09184255981445312, 0.09156403350830078, 0.0919552001953125, 0.0917053451538086, 0.0922460174560547, 0.09217638397216797, 0.09586649322509766, 0.09253945922851563, 0.09188114929199219, 0.0915214385986328, 0.09139977264404296, 0.09241436767578125, 0.09210457611083984, 0.09554547119140624, 0.09219808197021484, 0.09346726226806641, 0.09494956970214843, 0.09163775634765625, 0.0915251235961914, 0.09186502075195313, 0.09205561828613282, 0.09154764556884766, 0.0915618896484375, 0.09154774475097656, 0.09145484924316406, 0.09182879638671874, 0.09133881378173828, 0.09192588806152344, 0.09130249786376952, 0.09137564849853516, 0.09161277008056641, 0.09170575714111329, 0.09151859283447265, 0.09166429138183593, 0.09131008148193359, 0.09145391845703126, 0.09203302764892578, 0.0919736328125, 0.0915599365234375, 0.09157408142089844, 0.09391129302978515, 0.09159801483154296, 0.09187785339355468, 0.09173753356933594, 0.09152803039550782, 0.0909958724975586, 0.09084809875488281, 0.0912543716430664, 0.09166659545898438, 0.09207430267333984, 0.0911502685546875, 0.09139199829101563, 0.0913255386352539, 0.09197420501708985, 0.09139440155029296, 0.09141043090820312, 0.09127731323242187, 0.09127526092529296, 0.09162137603759765, 0.09634393310546875, 0.09187526702880859, 0.09114438629150391, 0.09217638397216797, 0.09132236480712891, 0.09159065246582031, 0.0913839340209961, 0.09149139404296874, 0.09120441436767578, 0.09109324645996093, 0.09129478454589844, 0.09132233428955078, 0.09079087829589844, 0.09096192169189453, 0.09110835266113282, 0.0912701416015625, 0.09147187042236328, 0.09116178894042969, 0.0910442886352539, 0.09148358154296875, 0.09136758422851562, 0.0914480972290039, 0.09089228820800781, 0.09090025329589843, 0.0907532501220703, 0.09086361694335937, 0.0908985595703125, 0.09139363098144532, 0.0926080322265625, 0.0912616958618164, 0.09086361694335937, 0.09307545471191406, 0.0923873291015625, 0.0912015380859375, 0.09089842987060547, 0.09098649597167968, 0.09106227111816406, 0.0911231689453125, 0.09150643157958985, 0.09171430206298828, 0.09159798431396485, 0.09151577758789063, 0.09173197174072266, 0.09149030303955077, 0.0916126708984375, 0.09130207824707032, 0.09121004486083985, 0.0912916488647461, 0.0909716796875, 0.09152764892578125, 0.09156377410888672, 0.09150899505615234, 0.0911646728515625, 0.09137152099609375, 0.09150569915771484, 0.09099894714355469, 0.09095388793945312, 0.09090694427490234, 0.0907236785888672, 0.09104486083984376, 0.091166015625, 0.09154220581054688, 0.0929095687866211, 0.09123430633544923, 0.09103974151611328, 0.09406873321533203, 0.09169510650634766, 0.09089228820800781, 0.09073049926757812, 0.09091645050048829, 0.09083875274658203, 0.09077830505371094, 0.09155315399169922, 0.09135443115234375, 0.09139590454101562, 0.0920421142578125, 0.09108201599121094, 0.09088387298583984, 0.09130079650878906, 0.09092198181152343, 0.09079910278320312, 0.09101516723632813, 0.09145516967773437, 0.09107603454589844, 0.09103603363037109, 0.09155177307128906, 0.09101564788818359, 0.09093500518798828, 0.09166838073730468, 0.09241795349121094, 0.09089826965332032, 0.09105251312255859, 0.09072246551513671, 0.09151026916503906, 0.09119744110107422, 0.09101158142089844, 0.09147392272949219, 0.0912437744140625, 0.09157913970947265, 0.09074483489990234, 0.09129901123046875, 0.09106924438476563, 0.09120563507080078, 0.09104179382324219, 0.09123020935058594, 0.0912363510131836, 0.0913194580078125, 0.09168367767333985, 0.09108214569091796, 0.09504828643798828, 0.09102928161621093, 0.09124066925048828, 0.09115843200683593, 0.09113330841064453, 0.09105788421630859, 0.09151392364501953, 0.09168816375732422, 0.09207881927490234, 0.09118041229248047, 0.09138444519042968, 0.09114419555664062, 0.09104815673828125, 0.09155356597900391, 0.0911890869140625, 0.09082070159912109, 0.091274658203125, 0.09125513458251953, 0.09132064056396484, 0.09186656188964844, 0.09155129241943359, 0.0911876449584961, 0.09149702453613281, 0.09157017517089844, 0.09132032012939453, 0.09111737823486328, 0.09215634918212891, 0.09143628692626952, 0.09163404846191406, 0.09297481536865235, 0.09130252838134766, 0.09190198516845703, 0.09134687805175781, 0.0913977279663086, 0.09229564666748047, 0.09110022735595703, 0.09132332611083985, 0.09160704040527344, 0.091864990234375, 0.09206607818603516, 0.0919959716796875, 0.09191551971435546, 0.0910282211303711, 0.09145139312744141, 0.09116451263427734, 0.09117507171630859, 0.09133401489257813, 0.09171353912353515, 0.09109158325195313, 0.09157004547119141, 0.09176687622070312, 0.0924078369140625, 0.09249132537841796, 0.09224153900146484, 0.09149830627441406, 0.09162854766845703, 0.09162957000732422, 0.09150259399414062, 0.09168486022949218, 0.09148204803466797, 0.0917484130859375, 0.0954730224609375, 0.09164816284179687, 0.09550486755371093, 0.09378406524658203, 0.09106227111816406, 0.09114828491210937, 0.09126502227783204, 0.09088204956054688, 0.09120262145996094, 0.09147420501708985, 0.09390457916259766, 0.09163014221191407, 0.09161126708984375, 0.092776611328125, 0.09154886627197266, 0.09128195190429687, 0.09124291229248047, 0.09169305419921875, 0.0915763168334961, 0.09141251373291015, 0.09161491394042968, 0.09125507354736329, 0.09124358367919921, 0.09170630645751954, 0.09224524688720703, 0.0925928955078125, 0.09237014770507812, 0.09473308563232422, 0.09155532836914063, 0.09132025909423828, 0.09135116577148437, 0.09134489440917969, 0.09273753356933594, 0.0916719970703125, 0.09180524444580078, 0.09238159942626953, 0.0911632308959961, 0.09119964599609375, 0.09139183807373047, 0.09142476654052735, 0.09123165130615235, 0.09141449737548828, 0.09150323486328125, 0.0915599365234375, 0.09178316497802734, 0.09139574432373047, 0.09214396667480469, 0.09131827545166016, 0.09147545623779296, 0.09121539306640625, 0.09132495880126953, 0.09254137420654297, 0.09155165100097656, 0.09157440185546875, 0.09209548950195312, 0.09157068634033202, 0.0916812515258789, 0.09183001708984374, 0.09149260711669922, 0.09128896331787109, 0.09111561584472656, 0.09099318695068359, 0.0914713592529297, 0.09140070343017578, 0.09201875305175782, 0.09172166442871094, 0.09267135620117188, 0.09158262634277343, 0.09192700958251954, 0.09173538970947266, 0.0922438735961914, 0.09197357177734375, 0.09191506958007813, 0.09148361968994141, 0.09170159912109375, 0.09190009307861328, 0.09362226867675781, 0.0915881576538086, 0.09177951812744141, 0.09153126525878906, 0.09207603454589844, 0.0917606430053711, 0.09158246612548829, 0.0916025619506836, 0.09159465789794922, 0.09183679962158203, 0.09281954956054687, 0.09229039764404297, 0.09245916748046876, 0.09154528045654296, 0.09162220764160156, 0.09280307006835938, 0.09161702728271484, 0.09172451019287109, 0.09146163177490234, 0.09149174499511718, 0.0919106216430664, 0.0929649887084961, 0.10208870697021484, 0.09157395172119141, 0.09124486541748048, 0.09162681579589843, 0.09256816101074218, 0.09122796630859376, 0.09124406433105468, 0.0915709457397461, 0.0914717788696289, 0.09580963134765624, 0.09325977325439454, 0.09484697723388671, 0.09157017517089844, 0.09122611236572266, 0.09155174255371094, 0.09141657257080078, 0.0913469467163086, 0.09132649230957031, 0.09144303894042968, 0.0912569580078125, 0.09155174255371094, 0.09245388793945312, 0.09273856353759766, 0.09154150390625, 0.0913850555419922, 0.09153388977050782, 0.09168230438232422, 0.09157065582275391, 0.09149874877929688, 0.09155363464355469, 0.09136511993408203, 0.09176207733154297, 0.09253699493408203, 0.09155443572998047, 0.09116079711914063, 0.09450291442871094, 0.09160851287841797, 0.09159686279296875, 0.09114441680908203, 0.0914986572265625, 0.09136319732666015, 0.0913226547241211, 0.09152098846435547, 0.09249606323242188, 0.09162322998046875, 0.09114192199707032, 0.09142499542236328, 0.09149644470214843, 0.09177414703369141, 0.09155872344970703, 0.09136089324951172, 0.09134349060058594, 0.09143270111083984, 0.09178028869628906, 0.09290815734863281, 0.09149855804443359, 0.0915882568359375, 0.09114463806152344, 0.09143500518798828, 0.0912008285522461, 0.09131676483154297, 0.09135734558105468, 0.09136729431152343, 0.09164403533935547, 0.0925511703491211, 0.091580322265625, 0.09145558166503906, 0.09127903747558594, 0.0912059555053711, 0.09146473693847657, 0.09175740814208984, 0.09146790313720703, 0.09135651397705079, 0.09148477172851563, 0.09165625762939453, 0.09259008026123047, 0.09121916961669922, 0.09130681610107422, 0.09152102661132812, 0.09129571533203125, 0.09225421142578125, 0.09177907562255859, 0.09148320007324219, 0.09155244445800781, 0.09129558563232422, 0.09320406341552734, 0.09299231719970703, 0.0911994857788086, 0.09100697326660157, 0.09116441345214844, 0.09100313568115234, 0.0913667221069336, 0.09121453094482422, 0.09153740692138672, 0.09149644470214843, 0.09142784118652343, 0.09226956939697266, 0.09181375885009765, 0.09101324462890625, 0.09115647888183594, 0.09132582092285156, 0.091432861328125, 0.0911858901977539, 0.09122611236572266, 0.09155554962158204, 0.09122434997558594, 0.09140227508544922, 0.09223926544189454, 0.09208233642578124, 0.09130435180664062, 0.09111961364746093, 0.09105948638916016, 0.09115248107910157, 0.09035804748535156, 0.09042569732666016, 0.09139539337158203, 0.09192108917236329, 0.09335807800292968, 0.09484425354003906, 0.09648579406738281, 0.09128253173828126, 0.09142169952392579, 0.091074462890625, 0.09104179382324219, 0.0915163803100586, 0.09178160095214843, 0.0914097900390625, 0.09160147094726563, 0.09292924499511719, 0.09234441375732422, 0.09159961700439453, 0.09132857513427735, 0.09152912139892579, 0.09131222534179688, 0.09134432220458985, 0.0913389434814453, 0.0915889892578125, 0.09137561798095703, 0.09145101165771484, 0.0928133773803711, 0.09161350250244141, 0.09129574584960938, 0.09777935791015625, 0.09183881378173828, 0.09174015808105469, 0.09117206573486328, 0.09116957092285156, 0.09114364624023437, 0.09183193969726562, 0.09127375793457031, 0.09263961791992187, 0.0923135986328125, 0.09157331085205078, 0.09123462677001953, 0.09138649749755859, 0.09234207916259765, 0.09147142028808594, 0.09124025726318359, 0.09135801696777343, 0.09149644470214843, 0.09140633392333984, 0.09251004791259766, 0.09369615936279296, 0.09129357147216798, 0.09095497894287109, 0.09082495880126953, 0.09095449829101562, 0.09106832122802734, 0.09114975738525391, 0.09121440124511719, 0.09120703887939453, 0.09109568023681641, 0.09212214660644531, 0.09143116760253907, 0.09130262756347657, 0.09116015625, 0.09115647888183594, 0.09112207794189453, 0.09100275421142578, 0.09113142395019531, 0.09122876739501953, 0.09140019226074218, 0.09141571044921876, 0.09109990692138673, 0.09167462158203125, 0.09085932922363281, 0.09110044860839844, 0.0907779541015625, 0.09071372985839844, 0.09114220428466797, 0.09123040008544922, 0.09127568054199219, 0.09256492614746094, 0.09240969848632813, 0.09115673828125, 0.09091558074951171, 0.09090428924560547, 0.09075440216064454, 0.09084614562988282, 0.09074195098876953, 0.09105696105957031, 0.09110323333740235, 0.09146502685546876, 0.09107875061035156, 0.09274838256835938, 0.09093734741210938, 0.09108889770507812, 0.09109442901611328, 0.09221385955810547, 0.09095366668701171, 0.0910003204345703, 0.09097068786621093, 0.09112339019775391, 0.09117932891845704, 0.09118425750732422, 0.09328742218017579, 0.09166371154785156, 0.09127938842773438, 0.09130854034423828, 0.09133670043945312, 0.09136946868896484, 0.09135894775390625, 0.09135939025878906, 0.09111769866943359, 0.09112397003173828, 0.0946644515991211, 0.09778176116943359, 0.0924443817138672, 0.0913017578125, 0.09132278442382813, 0.09110905456542968, 0.09114450836181641, 0.09192470550537109, 0.0915167007446289, 0.09163571166992188, 0.09121289825439453, 0.09204351806640625, 0.09281791687011719, 0.09130614471435547, 0.09097398376464844, 0.09130166625976563, 0.09133715057373047, 0.0911278076171875, 0.09105923461914063, 0.091231201171875, 0.09118924713134766]",tokens/s,10.909517736639202,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 272.12 MiB is free. Process 513623 has 14.47 GiB memory in use. Of the allocated memory 14.35 GiB is allocated by PyTorch, and 13.83 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 272.12 MiB is free. Process 511869 has 14.47 GiB memory in use. Of the allocated memory 14.35 GiB is allocated by PyTorch, and 13.83 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 272.12 MiB is free. Process 570210 has 14.47 GiB memory in use. Of the allocated memory 14.35 GiB is allocated by PyTorch, and 13.83 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1335.07072,1100.873728,0.0,698.351616,690.178048,s,1,8.3848916015625,8.3848916015625,0.0,8.3848916015625,8.3848916015625,8.3848916015625,8.3848916015625,[8.3848916015625],,kWh,4.0510835400073117e-05,4.461366105970132e-06,1.3225566136004785e-05,5.819776764204803e-05,,MB,1499.05408,1415.446528,0.0,1000.341504,957.77792,s,10,0.6307609863281249,0.0630760986328125,0.0005004450124313797,0.06297587203979492,0.0636538803100586,0.06383413848876954,0.06397834503173828,"[0.0634854736328125, 0.06264774322509765, 0.06361382293701172, 0.06313488006591797, 0.06281686401367187, 0.06401439666748047, 0.06338217544555665, 0.06259734344482422, 0.06249641418457031, 0.06257187271118164]",tokens/s,4058.589632980686,kWh,1.8095895846967692e-06,1.995658850494194e-07,1.2030565180000174e-06,3.212211987746206e-06,tokens/kWh,79695860.97573158,MB,1532.239872,1430.126592,0.0,1012.924416,957.78048,s,10,27.895415771484373,2.7895415771484378,0.022313390263139197,2.78100048828125,2.819357275390625,2.8249354736328125,2.8293980322265626,"[2.782301025390625, 2.77834716796875, 2.779699951171875, 2.81811767578125, 2.80281005859375, 2.830513671875, 2.804597412109375, 2.76758544921875, 2.755990478515625, 2.775452880859375]",tokens/s,22.58435598023984,kWh,8.080351875529555e-05,8.912539358666396e-06,3.1167969378799055e-05,0.000120884027492761,tokens/kWh,521160.6637094605,,s,630,27.893164356231683,0.044274864057510625,0.000830198723489205,0.04409958267211914,0.045046097183227544,0.045258677864074705,0.04793188671112061,"[0.04443337631225586, 0.044561279296875, 0.04432467269897461, 0.04445750427246094, 0.04412483215332031, 0.044230655670166014, 0.044350753784179686, 0.04410646438598633, 0.04403126525878906, 0.043993473052978516, 0.043977054595947265, 0.04436809539794922, 0.04414031982421875, 0.044763137817382816, 0.04407660675048828, 0.045470142364501955, 0.04466457748413086, 0.044454143524169924, 0.0449285774230957, 0.045456993103027345, 0.04471388626098633, 0.04465580749511719, 0.04401331329345703, 0.04409958267211914, 0.04391526412963867, 0.04402380752563476, 0.04370636749267578, 0.043872257232666016, 0.043655105590820316, 0.043818431854248045, 0.043924095153808594, 0.043952350616455076, 0.04404374313354492, 0.04505427169799805, 0.04403545761108398, 0.04407759857177734, 0.04420550537109375, 0.04448044967651367, 0.04407164764404297, 0.043892288208007814, 0.0438502082824707, 0.044152191162109375, 0.044127838134765625, 0.04422348785400391, 0.043784000396728515, 0.04367379379272461, 0.04347638320922852, 0.04670320129394531, 0.04434124755859375, 0.04404207992553711, 0.04363427352905273, 0.043813438415527345, 0.043581439971923826, 0.04366236877441406, 0.043727134704589846, 0.043679519653320314, 0.04397558212280273, 0.043741119384765624, 0.04376556777954101, 0.0436861457824707, 0.043692062377929684, 0.04385993576049805, 0.043747329711914064, 0.04483251190185547, 0.04451193618774414, 0.044055999755859374, 0.04414630508422852, 0.04353734588623047, 0.043747329711914064, 0.043665374755859375, 0.04376579284667969, 0.0436321907043457, 0.043705886840820315, 0.04374825668334961, 0.04373503875732422, 0.04380057525634766, 0.04389292907714844, 0.04398060989379883, 0.04472422409057617, 0.043976703643798826, 0.04414883041381836, 0.0440544319152832, 0.04434534454345703, 0.04413030242919922, 0.04401561737060547, 0.04447747039794922, 0.043698623657226564, 0.043956768035888674, 0.044386302947998044, 0.04452758407592773, 0.04405855941772461, 0.04413040161132813, 0.044273662567138675, 0.04414486312866211, 0.04405635070800781, 0.04406272125244141, 0.04409958267211914, 0.04380051040649414, 0.043814399719238284, 0.04430486297607422, 0.044103294372558596, 0.04395574569702149, 0.04386912155151367, 0.043863422393798826, 0.04425689697265625, 0.04414976119995117, 0.04417695999145508, 0.0445546875, 0.04428595352172852, 0.0449249267578125, 0.044578815460205076, 0.04418332672119141, 0.04421376037597656, 0.044009281158447267, 0.04390121459960938, 0.04385036849975586, 0.04382230377197266, 0.043881248474121094, 0.04386406326293945, 0.043831230163574215, 0.043966014862060546, 0.043990623474121096, 0.044068031311035157, 0.0442509765625, 0.0449977912902832, 0.0445810546875, 0.04498006439208985, 0.044808353424072266, 0.044394432067871095, 0.04446623992919922, 0.04417536163330078, 0.04411779022216797, 0.044160350799560544, 0.04422540664672851, 0.04405392074584961, 0.043899486541748044, 0.04385327911376953, 0.04428035354614258, 0.04451913452148438, 0.04436764907836914, 0.04427417755126953, 0.04405657577514648, 0.04400435256958008, 0.04400406265258789, 0.044386592864990235, 0.043862014770507815, 0.04396236801147461, 0.04383916854858398, 0.04426169586181641, 0.045072288513183595, 0.04460553741455078, 0.04415283203125, 0.04377724838256836, 0.04413462448120117, 0.04390095901489258, 0.04404278564453125, 0.044108959197998045, 0.04390371322631836, 0.04411129760742188, 0.04362617492675781, 0.04381740951538086, 0.0436506233215332, 0.04382371139526367, 0.04340163040161133, 0.04343603134155274, 0.043566368103027345, 0.04374192047119141, 0.04381081771850586, 0.04390908813476563, 0.043925537109375, 0.04426137542724609, 0.0441343994140625, 0.044410240173339846, 0.043855934143066405, 0.04430207824707031, 0.04386198425292969, 0.04456057739257813, 0.043797439575195315, 0.043887775421142576, 0.04400595092773438, 0.0443263053894043, 0.04470230484008789, 0.04413411331176758, 0.044297889709472654, 0.04435977554321289, 0.044880416870117186, 0.04422246551513672, 0.04398486328125, 0.043999038696289065, 0.04474579238891602, 0.044473281860351564, 0.044584991455078125, 0.04476924896240234, 0.04449689483642578, 0.04471603012084961, 0.046010303497314456, 0.04860281753540039, 0.04453004837036133, 0.04435500717163086, 0.04447452926635742, 0.04420854568481445, 0.04427571105957031, 0.04533657455444336, 0.0444513931274414, 0.048056671142578125, 0.044267520904541016, 0.04395657730102539, 0.04407065582275391, 0.04388633728027344, 0.04412172698974609, 0.044160865783691404, 0.04459600067138672, 0.044218112945556644, 0.044453662872314455, 0.04450886535644531, 0.04461363220214844, 0.044371871948242186, 0.0453842544555664, 0.053657726287841795, 0.049856704711914064, 0.04453696060180664, 0.044378528594970705, 0.04408550262451172, 0.04407523345947266, 0.044298240661621094, 0.04484844970703125, 0.044413631439208984, 0.04435897445678711, 0.04413100814819336, 0.04417740631103516, 0.044025665283203126, 0.04431827163696289, 0.044518016815185545, 0.04458700942993164, 0.04432076644897461, 0.04426342391967773, 0.044447742462158206, 0.04450310516357422, 0.04422572708129883, 0.04425804901123047, 0.04426342391967773, 0.04419705581665039, 0.04439043045043945, 0.04422326278686523, 0.044020801544189456, 0.04400608062744141, 0.044079166412353515, 0.04393798446655273, 0.0441354866027832, 0.04425823974609375, 0.04370841598510742, 0.04367769622802734, 0.04439577484130859, 0.04424576187133789, 0.04421862411499024, 0.04439126586914063, 0.04465119934082031, 0.04457494354248047, 0.04468121719360352, 0.04476835250854492, 0.0447476806640625, 0.0450437126159668, 0.04385612869262695, 0.04417715072631836, 0.04376931381225586, 0.04377654266357422, 0.04383478546142578, 0.046744159698486325, 0.044185344696044924, 0.04416864013671875, 0.04413113784790039, 0.044101024627685545, 0.04377660751342773, 0.04386159896850586, 0.0438656005859375, 0.04432195281982422, 0.04406175994873047, 0.04436038589477539, 0.043976222991943356, 0.04410416030883789, 0.04424233627319336, 0.044036705017089846, 0.043990943908691404, 0.044099231719970704, 0.04411177444458008, 0.0441671028137207, 0.044281822204589844, 0.04399577713012695, 0.04400515365600586, 0.043968734741210935, 0.04398899078369141, 0.04406841659545899, 0.04435734558105469, 0.046226142883300785, 0.04523968124389648, 0.045103744506835936, 0.04510310363769531, 0.0450437126159668, 0.04514022445678711, 0.045098751068115235, 0.044867008209228516, 0.0449889907836914, 0.04500822448730469, 0.044987041473388674, 0.04460358428955078, 0.044607295989990234, 0.044514720916748046, 0.04425100708007813, 0.04438025665283203, 0.04556556701660156, 0.0444323844909668, 0.044424705505371094, 0.04491676712036133, 0.04508422470092773, 0.044886943817138675, 0.045510047912597655, 0.0453388786315918, 0.04507648086547852, 0.04510515213012695, 0.04488924789428711, 0.04538864135742188, 0.04515139389038086, 0.04518998336791992, 0.045072383880615234, 0.04508262252807617, 0.045139232635498044, 0.04512432098388672, 0.04504576110839844, 0.04504912185668945, 0.04520214462280273, 0.04526620864868164, 0.04524627304077149, 0.045332897186279295, 0.04542105484008789, 0.04516659164428711, 0.04494710540771484, 0.044970401763916014, 0.044824447631835934, 0.045217376708984375, 0.044445568084716794, 0.04481903839111328, 0.04489532852172851, 0.045249473571777346, 0.044934814453125, 0.04519353485107422, 0.045129726409912106, 0.04522364807128906, 0.044740478515625, 0.044683647155761716, 0.04484499359130859, 0.04443094253540039, 0.04437395095825195, 0.04430227279663086, 0.04462790298461914, 0.04424774551391602, 0.044434814453125, 0.044345985412597655, 0.044230815887451175, 0.04414243316650391, 0.0440975341796875, 0.044125503540039065, 0.044227264404296876, 0.04428799819946289, 0.04445798492431641, 0.04435148620605469, 0.04505190277099609, 0.04529929733276367, 0.044980640411376956, 0.04487168121337891, 0.04506179046630859, 0.046536865234375, 0.04604127883911133, 0.04539801788330078, 0.044991680145263675, 0.04526956939697266, 0.045201087951660154, 0.044560638427734375, 0.044426624298095706, 0.04447420883178711, 0.04442764663696289, 0.04419935989379883, 0.04430624008178711, 0.04413721466064453, 0.04454172897338867, 0.04467238235473633, 0.04454051208496094, 0.04482643127441406, 0.04477177429199219, 0.04506998443603515, 0.04495600128173828, 0.045178878784179685, 0.044857086181640624, 0.04534092712402344, 0.045004798889160154, 0.04493449783325195, 0.04521846389770508, 0.04482182312011719, 0.044931774139404294, 0.0447567024230957, 0.0445604476928711, 0.04428972625732422, 0.043907615661621095, 0.04387635040283203, 0.04376780700683594, 0.04375551986694336, 0.043720703125, 0.04396851348876953, 0.04396464157104492, 0.043994270324707034, 0.044329601287841795, 0.04385919952392578, 0.04395084762573242, 0.043952350616455076, 0.04392240142822266, 0.04424275207519531, 0.0442501106262207, 0.044181503295898435, 0.04393363189697266, 0.043882560729980466, 0.04391731262207031, 0.04403936004638672, 0.04399420928955078, 0.04391088104248047, 0.04401347351074219, 0.05187388610839844, 0.04426259231567383, 0.04391231918334961, 0.04383679962158203, 0.04380847930908203, 0.044124767303466796, 0.043954177856445314, 0.044266654968261716, 0.04436873626708984, 0.043879489898681644, 0.044182273864746095, 0.044023841857910154, 0.04445148849487305, 0.045871231079101564, 0.045017406463623046, 0.04794547271728516, 0.04446419143676758, 0.04408339309692383, 0.04406476974487305, 0.044119937896728516, 0.043840862274169924, 0.04398979187011719, 0.04374937438964844, 0.0437391357421875, 0.04376764678955078, 0.043740734100341794, 0.04384419250488281, 0.04394985580444336, 0.04416124725341797, 0.044873504638671874, 0.04545558547973633, 0.044316673278808595, 0.0439496955871582, 0.043966846466064455, 0.04375164794921875, 0.04387395095825195, 0.04385318374633789, 0.04381977462768555, 0.0436484489440918, 0.04367174530029297, 0.04365897750854492, 0.043536224365234376, 0.04414505767822265, 0.04389270401000977, 0.04359836959838867, 0.04363824081420899, 0.04365283203125, 0.043737823486328126, 0.043576961517333986, 0.04386649703979492, 0.043609729766845705, 0.044375553131103515, 0.04397673416137695, 0.04369289779663086, 0.04363689422607422, 0.04363030242919922, 0.04374070358276367, 0.044397151947021485, 0.04374435043334961, 0.04341385650634766, 0.04340934371948242, 0.04341775894165039, 0.04352201461791992, 0.043678207397460936, 0.04901385498046875, 0.044037025451660154, 0.043870208740234375, 0.04364204788208008, 0.04356294250488281, 0.043522655487060545, 0.04344863891601562, 0.04361008071899414, 0.04364847946166992, 0.043956768035888674, 0.04374528121948242, 0.04426943969726563, 0.043726974487304685, 0.04383465576171875, 0.04380361557006836, 0.043901790618896486, 0.04417603302001953, 0.043595775604248044, 0.04350566482543945, 0.043595775604248044, 0.04360105514526367, 0.04358220672607422, 0.04366950225830078, 0.0438021125793457, 0.043725025177001955, 0.04358591842651367, 0.043687934875488284, 0.04351795196533203, 0.04373299026489258, 0.043837440490722655, 0.04357120132446289, 0.04389068984985352, 0.04375676727294922, 0.044085823059082034, 0.0439169921875, 0.043517566680908205, 0.04388137435913086, 0.04373708724975586, 0.047898624420166014, 0.04382310485839844, 0.043648414611816407, 0.04351036834716797, 0.043585025787353515, 0.043420097351074216, 0.04358950424194336, 0.04383718490600586, 0.04395161437988281, 0.044065055847167967, 0.043750049591064454, 0.04439244842529297, 0.04338828659057617, 0.043487873077392575, 0.04368124771118164, 0.04356764984130859, 0.04378201675415039, 0.043552833557128905, 0.04356937789916992, 0.04395401763916015, 0.04403302383422852, 0.04352716827392578, 0.043703617095947264, 0.04393753433227539, 0.04407596969604492, 0.04333158493041992, 0.0434524154663086, 0.0433807373046875, 0.04338822555541992, 0.04341420745849609, 0.04406233596801758, 0.043759998321533204, 0.04357654571533203, 0.04331760025024414, 0.0432603530883789, 0.0433520622253418, 0.04335615921020508, 0.04335615921020508, 0.04341667175292969, 0.043786270141601566, 0.0438711051940918, 0.044521823883056644, 0.044115486145019533, 0.04398912048339844, 0.04455014419555664, 0.043954143524169924, 0.04372873687744141, 0.04376598358154297, 0.04402377700805664, 0.04365856170654297, 0.043897537231445315, 0.04371779251098633, 0.04432799911499023, 0.04403948974609375, 0.04408294296264648, 0.04381087875366211, 0.043663841247558594, 0.043724990844726565, 0.04378316879272461, 0.04405516815185547, 0.044222015380859375, 0.04426335906982422, 0.04397100830078125, 0.04419833755493164, 0.04441859054565429, 0.045521377563476566, 0.044240894317626955, 0.04399318313598633, 0.04428995132446289, 0.04399411010742187, 0.0439285774230957, 0.04373203277587891, 0.043780895233154295, 0.04407107162475586, 0.04385292816162109, 0.04462886428833008, 0.044060672760009766, 0.044109825134277345, 0.04381039810180664, 0.04395600128173828, 0.043898719787597656, 0.04378121566772461, 0.043892288208007814, 0.044193279266357424, 0.04406131362915039, 0.04408115386962891, 0.04402175903320313, 0.04401708984375, 0.044687934875488285, 0.04449689483642578, 0.044103134155273435, 0.044055072784423825, 0.043819007873535154, 0.04376732635498047, 0.04387606430053711, 0.04388236618041992, 0.04417011260986328, 0.04391139221191406, 0.04443840026855469, 0.04398172760009766, 0.043797569274902345, 0.04402067184448242, 0.04383286285400391, 0.044004993438720705]",tokens/s,22.586178891505003,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1173, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 518810 has 14.71 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 28.11 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1173, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 520488 has 14.71 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 28.11 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,9972.55168,6201.147392,0.0,5798.62528,5404.427264,s,1,20.8832421875,20.8832421875,0.0,20.8832421875,20.8832421875,20.8832421875,20.8832421875,[20.8832421875],,kWh,0.00039959976099168367,4.4069850962959136e-05,0.0001367934427679851,0.0005804630547226279,,MB,5853.335552,6501.040128,0.0,6077.546496,5755.126784,s,10,1.5826687774658204,0.15826687774658205,0.001100428725995944,0.1579693603515625,0.1585470718383789,0.16002204818725585,0.16120202926635743,"[0.1575777587890625, 0.1581375732421875, 0.1579248046875, 0.15750950622558593, 0.158013916015625, 0.15816134643554688, 0.15786553955078125, 0.1577620086669922, 0.16149702453613282, 0.15821929931640624]",tokens/s,1617.5210103652191,kWh,4.651753762500784e-06,5.130059422170315e-07,2.5391995622219418e-06,7.703959266939757e-06,tokens/kWh,33229666.867344543,MB,5857.562624,6505.234432,0.0,6079.643648,5755.129344,s,10,96.165978515625,9.6165978515625,0.01583274783621295,9.61705126953125,9.636153125,9.641619042968749,9.645991777343749,"[9.6349384765625, 9.62454296875, 9.603048828125, 9.622037109375, 9.6470849609375, 9.5940166015625, 9.59794140625, 9.608265625, 9.621341796875, 9.6127607421875]",tokens/s,6.55117339546062,kWh,0.00027933446773959196,3.0812123854251984e-05,0.00011067980767958017,0.00042082639927342407,tokens/kWh,149705.4369896289,,s,630,96.16306784057619,0.15263979022313676,0.0012389924215209494,0.15237834167480468,0.1536020004272461,0.15480113906860352,0.15832239013671878,"[0.15411190795898438, 0.15243385314941407, 0.15205673217773438, 0.15195103454589845, 0.15265155029296876, 0.15238723754882813, 0.15175552368164064, 0.15354071044921874, 0.15179798889160157, 0.1527846984863281, 0.1527869415283203, 0.1522483215332031, 0.15292997741699219, 0.15255494689941407, 0.151980224609375, 0.15186399841308593, 0.15180905151367188, 0.15265303039550782, 0.1516043243408203, 0.152476318359375, 0.15519456481933594, 0.1526444091796875, 0.15256985473632811, 0.15266201782226563, 0.15238966369628906, 0.15276028442382814, 0.15707350158691405, 0.15497001647949218, 0.1524469757080078, 0.15245106506347655, 0.152985595703125, 0.1525328369140625, 0.152044921875, 0.15270509338378907, 0.1518531494140625, 0.1525807342529297, 0.1521743927001953, 0.15235910034179687, 0.15233229064941406, 0.15355699157714844, 0.1538621368408203, 0.15573196411132811, 0.15340655517578125, 0.15326710510253908, 0.15314096069335936, 0.15358102416992186, 0.15336349487304687, 0.1528563537597656, 0.15784141540527344, 0.1524408264160156, 0.1526720275878906, 0.15244720458984376, 0.15239987182617187, 0.15275827026367186, 0.15639961242675782, 0.1528005828857422, 0.1519517059326172, 0.15241786193847656, 0.1521917724609375, 0.1532742004394531, 0.1522545623779297, 0.1527972412109375, 0.152131103515625, 0.15282345581054688, 0.15307037353515626, 0.15232400512695313, 0.15193292236328124, 0.15229747009277345, 0.15233558654785156, 0.15308470153808593, 0.15223330688476563, 0.15367645263671875, 0.15418777465820313, 0.1527574462890625, 0.15227987670898438, 0.15242364501953126, 0.15279376220703125, 0.1525984344482422, 0.15196917724609374, 0.15255839538574217, 0.15257804870605468, 0.15250022888183593, 0.15313920593261718, 0.15269273376464843, 0.15344374084472656, 0.15249058532714843, 0.15267146301269532, 0.15297001647949218, 0.15277874755859375, 0.15322927856445312, 0.154295654296875, 0.1589208984375, 0.1535431671142578, 0.15214591979980469, 0.1522178955078125, 0.152267578125, 0.15210614013671875, 0.1519920654296875, 0.15188954162597657, 0.15247193908691406, 0.15184268188476563, 0.15203910827636719, 0.1522090301513672, 0.15184979248046876, 0.151944580078125, 0.15224447631835938, 0.1541059265136719, 0.151754150390625, 0.15257852172851563, 0.15256343078613283, 0.15229411315917968, 0.15374668884277343, 0.15241238403320312, 0.15306915283203126, 0.15213226318359374, 0.1524381103515625, 0.1522484130859375, 0.15176390075683593, 0.15958416748046875, 0.15257804870605468, 0.15241165161132814, 0.1520679931640625, 0.1522983093261719, 0.15199209594726562, 0.15192848205566406, 0.15244236755371093, 0.1518385009765625, 0.152538818359375, 0.1523201904296875, 0.15194979858398439, 0.15177072143554687, 0.1520205078125, 0.1519420166015625, 0.1515123596191406, 0.15252735900878905, 0.15170127868652344, 0.15352284240722655, 0.15175071716308594, 0.1523299560546875, 0.15196957397460936, 0.1525638427734375, 0.15262319946289063, 0.15215548706054688, 0.1521691131591797, 0.15139791870117186, 0.15285501098632812, 0.15200387573242188, 0.1523814697265625, 0.15197174072265626, 0.15302120971679686, 0.15166998291015624, 0.15213970947265626, 0.15223196411132814, 0.15202397155761718, 0.15238134765625, 0.15718809509277343, 0.15479158020019532, 0.151568603515625, 0.16017010498046874, 0.1518919677734375, 0.15251046752929687, 0.15164559936523436, 0.1519858856201172, 0.15166928100585939, 0.15198153686523438, 0.1519862060546875, 0.15203330993652345, 0.15245510864257814, 0.15234751892089843, 0.1515888671875, 0.15484112548828124, 0.1519267578125, 0.15206137084960938, 0.15243116760253905, 0.15205328369140625, 0.15194160461425782, 0.15133663940429687, 0.15175491333007812, 0.15121189880371094, 0.15223837280273436, 0.15231126403808593, 0.15183926391601563, 0.15147811889648438, 0.15690476989746094, 0.1517777862548828, 0.1521946563720703, 0.1517002258300781, 0.1515335693359375, 0.15212339782714843, 0.1517154541015625, 0.15182194519042969, 0.15136160278320313, 0.1522318420410156, 0.15189170837402344, 0.15181497192382812, 0.15202508544921875, 0.15172419738769533, 0.15187936401367189, 0.15232989501953126, 0.1528260498046875, 0.15200624084472655, 0.1528973388671875, 0.15150582885742186, 0.15163932800292967, 0.15175958251953126, 0.15162319946289063, 0.1522200927734375, 0.15209405517578126, 0.15304774475097657, 0.15150083923339844, 0.15241416931152343, 0.1513861083984375, 0.15261695861816407, 0.1556842803955078, 0.15304336547851563, 0.15223004150390626, 0.1522626495361328, 0.15205325317382812, 0.1519825897216797, 0.15237120056152345, 0.16388330078125, 0.15316758728027344, 0.15342335510253907, 0.15301213073730469, 0.1524455108642578, 0.15248527526855468, 0.15387705993652342, 0.152755615234375, 0.1522571258544922, 0.15312217712402343, 0.15240570068359374, 0.15248077392578124, 0.15315560913085938, 0.152666015625, 0.1533891143798828, 0.15454147338867188, 0.15185366821289062, 0.15235481262207032, 0.15230511474609376, 0.1529217529296875, 0.1531619873046875, 0.15219161987304688, 0.15320474243164062, 0.15231353759765626, 0.15282003784179687, 0.15236863708496093, 0.15322367858886718, 0.15259567260742188, 0.15560989379882811, 0.15245872497558594, 0.15199696350097655, 0.15334144592285157, 0.15298822021484376, 0.15235481262207032, 0.1528403778076172, 0.15274783325195312, 0.15200665283203124, 0.15246739196777342, 0.15382534790039062, 0.15299711608886718, 0.15245184326171876, 0.15257737731933593, 0.15333235168457032, 0.1535957794189453, 0.15400711059570313, 0.1551099853515625, 0.15309564208984375, 0.1530959014892578, 0.1529630126953125, 0.15325888061523438, 0.1527541809082031, 0.15438233947753907, 0.15352610778808592, 0.15321641540527345, 0.152980224609375, 0.15239138793945312, 0.15254147338867188, 0.15266816711425782, 0.1532743682861328, 0.1529712677001953, 0.15230313110351562, 0.15248757934570312, 0.1534718017578125, 0.15193026733398438, 0.1569931182861328, 0.15367878723144532, 0.15255967712402344, 0.15263253784179687, 0.15234332275390625, 0.1563802947998047, 0.15268544006347656, 0.15451954650878907, 0.15232835388183594, 0.15248162841796875, 0.15192169189453125, 0.15241871643066407, 0.1518843536376953, 0.15326409912109376, 0.15190223693847657, 0.15487986755371094, 0.15237734985351561, 0.15226876831054686, 0.15250674438476564, 0.15203919982910155, 0.15327186584472657, 0.1521094970703125, 0.15291596984863282, 0.1526824951171875, 0.15215945434570313, 0.1524083251953125, 0.15222224426269532, 0.15869541931152345, 0.1561201629638672, 0.15270953369140625, 0.15282803344726562, 0.15250408935546875, 0.15380674743652345, 0.15231884765625, 0.15269468688964843, 0.15248931884765626, 0.15256387329101562, 0.15265023803710936, 0.1531023406982422, 0.15368397521972657, 0.1522443542480469, 0.15260809326171876, 0.1525499267578125, 0.15222169494628907, 0.151787109375, 0.154304931640625, 0.15181135559082032, 0.15246937561035157, 0.1528647003173828, 0.15178839111328124, 0.15255462646484375, 0.15198713684082032, 0.15295855712890624, 0.15151759338378906, 0.15193011474609375, 0.15240780639648438, 0.15206675720214843, 0.1521626281738281, 0.15290162658691406, 0.1520283203125, 0.15169827270507813, 0.15163760375976562, 0.15187107849121093, 0.15238841247558593, 0.1537755126953125, 0.15466067504882813, 0.15186358642578124, 0.15114250183105468, 0.15202674865722657, 0.15205250549316407, 0.151598876953125, 0.15239949035644532, 0.1518839111328125, 0.15120172119140626, 0.15189369201660155, 0.151498779296875, 0.1518041229248047, 0.15191334533691406, 0.15116668701171876, 0.1547442169189453, 0.15173898315429687, 0.15135040283203124, 0.15192562866210937, 0.15132467651367187, 0.15199398803710937, 0.15191693115234375, 0.15177072143554687, 0.15131280517578125, 0.15152537536621094, 0.1522483215332031, 0.1514149475097656, 0.1526180725097656, 0.15244569396972657, 0.15394610595703126, 0.1521642608642578, 0.15301043701171874, 0.15293670654296876, 0.1525389099121094, 0.15197378540039064, 0.15238710021972657, 0.1521279296875, 0.15237359619140625, 0.15226812744140625, 0.1525166473388672, 0.15241075134277343, 0.15244908142089844, 0.1527355499267578, 0.15287513732910157, 0.15313920593261718, 0.1522277069091797, 0.15357539367675782, 0.15294172668457032, 0.15157302856445312, 0.1520840606689453, 0.15210330200195313, 0.1526727752685547, 0.15175689697265626, 0.15195065307617187, 0.152501953125, 0.15200553894042967, 0.1520432586669922, 0.15177658081054687, 0.15218150329589844, 0.15581613159179689, 0.15209007263183594, 0.15191708374023438, 0.15288729858398437, 0.15203715515136718, 0.15228746032714843, 0.15872335815429686, 0.15203506469726563, 0.15183970642089845, 0.15190425109863281, 0.15170150756835937, 0.15258213806152343, 0.15296102905273437, 0.1519718475341797, 0.15215986633300782, 0.151365478515625, 0.15198873901367188, 0.15163973999023436, 0.15185125732421875, 0.15192489624023436, 0.1514998016357422, 0.15503231811523438, 0.15181216430664063, 0.15139234924316405, 0.15178306579589843, 0.151265625, 0.15233622741699218, 0.1518296661376953, 0.1517142333984375, 0.15166259765625, 0.15121875, 0.15177430725097657, 0.1515303955078125, 0.1518233947753906, 0.15617001342773437, 0.1522018280029297, 0.15170579528808595, 0.15168301391601563, 0.15210301208496094, 0.15174359130859374, 0.15260150146484375, 0.15156959533691405, 0.1515642852783203, 0.15194400024414062, 0.15200051879882812, 0.15170970153808594, 0.1517445068359375, 0.15224137878417968, 0.1517412109375, 0.15187686157226563, 0.1521848907470703, 0.15371746826171875, 0.15319036865234376, 0.15245516967773437, 0.15181199645996094, 0.15207843017578124, 0.15215312194824218, 0.15184690856933594, 0.15210389709472658, 0.15338102722167968, 0.15389622497558594, 0.15268637084960937, 0.1531544952392578, 0.15237257385253905, 0.1525067901611328, 0.15231503295898438, 0.15322618103027344, 0.153425048828125, 0.1530397186279297, 0.15344026184082032, 0.1527337646484375, 0.1575786590576172, 0.152596923828125, 0.15273992919921875, 0.15242649841308595, 0.15218893432617187, 0.15236886596679688, 0.15295053100585937, 0.15237933349609376, 0.1531352996826172, 0.15292457580566407, 0.1528807373046875, 0.15217295837402345, 0.1519588165283203, 0.15212144470214845, 0.1536579895019531, 0.153059326171875, 0.15197299194335936, 0.1514771270751953, 0.15218482971191405, 0.15215763854980469, 0.15211984252929686, 0.15172947692871094, 0.15215216064453124, 0.15135971069335938, 0.15151759338378906, 0.151843994140625, 0.1523609619140625, 0.15349690246582032, 0.1548089599609375, 0.15198623657226562, 0.15199436950683592, 0.15215000915527344, 0.1525223388671875, 0.15226438903808595, 0.15270892333984376, 0.1521583709716797, 0.15189683532714843, 0.15277670288085937, 0.15208758544921874, 0.1514213104248047, 0.15200726318359375, 0.15339724731445312, 0.1525944366455078, 0.15278652954101563, 0.15184732055664063, 0.1541959686279297, 0.1514755554199219, 0.15290354919433594, 0.153148193359375, 0.15305241394042968, 0.1525738525390625, 0.1523863983154297, 0.1518814697265625, 0.1518891906738281, 0.15340208435058594, 0.15214413452148437, 0.15164210510253906, 0.15237052917480468, 0.1538095703125, 0.1525596160888672, 0.15272073364257813, 0.1532893829345703, 0.1525431671142578, 0.1528769989013672, 0.15537916564941406, 0.15423350524902343, 0.15242771911621095, 0.15302960205078125, 0.15292819213867187, 0.15332316589355469, 0.15234828186035157, 0.15334208679199218, 0.15217280578613282, 0.1525025329589844, 0.1531259765625, 0.15366032409667968, 0.15221058654785155, 0.15264802551269532, 0.15218240356445312, 0.15424205017089843, 0.15252793884277344, 0.1524908142089844, 0.1524408264160156, 0.15239945983886719, 0.15252662658691407, 0.15279373168945312, 0.15242617797851563, 0.15331974792480468, 0.15221484375, 0.15318870544433594, 0.15242396545410156, 0.15204550170898437, 0.15642472839355467, 0.15276007080078124, 0.15229158020019531, 0.15247116088867188, 0.1521544952392578, 0.15206605529785155, 0.15164210510253906, 0.15258009338378906, 0.15180989074707033, 0.15242665100097658, 0.15237528991699217, 0.15131033325195312, 0.15148031616210939, 0.15161541748046875, 0.15212701416015625, 0.15182643127441406, 0.1519498291015625, 0.15214332580566406, 0.15347955322265625, 0.15112416076660157, 0.15234214782714844, 0.15175718688964843, 0.1519250183105469, 0.15216201782226563, 0.151762939453125, 0.15153765869140626, 0.15164122009277345, 0.15211404418945312, 0.15186944580078124, 0.15166575622558592, 0.15628994750976563, 0.15207557678222655, 0.15228384399414063, 0.15248905944824218, 0.15272642517089843, 0.15222579956054688, 0.15320841979980468, 0.1585188446044922, 0.1523863983154297, 0.151930908203125, 0.15264764404296874, 0.15273779296875, 0.15269273376464843, 0.15276031494140624, 0.15209858703613283, 0.1522628173828125, 0.1520593566894531, 0.15289161682128907, 0.15211952209472657, 0.1518095703125, 0.15275482177734376, 0.15676620483398437, 0.15287295532226564, 0.1522127685546875, 0.15270985412597657, 0.15264154052734374, 0.1524305877685547, 0.15412136840820312, 0.1523285369873047, 0.15292262268066406]",tokens/s,6.551371687147553,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1284, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1079, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 801, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 563, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,1876.426752,2906.5216,0.0,2503.999488,2349.010944,s,1,10.003130859375,10.003130859375,0.0,10.003130859375,10.003130859375,10.003130859375,10.003130859375,[10.003130859375],,kWh,8.110220203749728e-05,8.938853399752153e-06,2.7045021636001865e-05,0.0001170860770732513,,MB,2007.322624,3317.563392,0.0,2902.458368,2642.300928,s,10,1.966625259399414,0.1966625259399414,0.0009786056927778486,0.1967410430908203,0.19754459991455078,0.19765761489868164,0.19774802688598633,"[0.19671629333496093, 0.19398112487792968, 0.19654995727539062, 0.1967657928466797, 0.19659097290039063, 0.19662255859375, 0.19751948547363282, 0.1977706298828125, 0.19726002502441406, 0.19684841918945312]",tokens/s,1301.7223224224203,kWh,5.913436915165524e-06,6.521462922336376e-07,3.907886459640908e-06,1.047346966704007e-05,tokens/kWh,24442711.741041277,MB,2040.1152,3319.660544,0.0,2902.458368,2642.303488,s,10,28.76835009765625,2.876835009765625,0.00895107656602529,2.8738389892578127,2.888598974609375,2.8896068603515626,2.8904131689453125,"[2.86770751953125, 2.86381640625, 2.888375, 2.873300048828125, 2.870305908203125, 2.888079345703125, 2.8716455078125, 2.880127685546875, 2.8743779296875, 2.89061474609375]",tokens/s,21.899066086912157,kWh,8.448442035483122e-05,9.318631089870525e-06,4.018242659035831e-05,0.00013398547803506012,tokens/kWh,470200.210678912,,s,630,28.76605832290648,0.04566041003635952,0.0006874769450955102,0.04553296089172364,0.046045136642456054,0.04640618991851806,0.04821186950683594,"[0.04681014251708984, 0.04561398315429688, 0.04560604858398438, 0.04564204788208008, 0.045348831176757816, 0.04542262268066406, 0.04524294281005859, 0.04529359817504883, 0.04499795150756836, 0.0453106575012207, 0.04512153625488281, 0.04527228927612305, 0.04531075286865234, 0.04526208114624024, 0.04513049697875977, 0.045518848419189455, 0.04552054214477539, 0.04553894424438477, 0.045359039306640626, 0.04548278427124024, 0.045213695526123046, 0.045540672302246094, 0.04548473739624023, 0.0457625617980957, 0.04561100769042969, 0.045518718719482425, 0.04548223876953125, 0.04727385711669922, 0.046581760406494144, 0.045540481567382815, 0.045679489135742185, 0.04551475143432617, 0.045571807861328126, 0.04565430450439453, 0.045569408416748045, 0.045854976654052734, 0.04532057571411133, 0.04548198318481445, 0.04547174453735352, 0.04558006286621094, 0.04538508987426758, 0.045427200317382815, 0.045750625610351564, 0.04546486282348633, 0.04551948928833008, 0.04610262298583984, 0.045354175567626956, 0.045531551361083986, 0.045300128936767575, 0.0453570556640625, 0.04536428833007813, 0.04531296157836914, 0.04517388916015625, 0.0453837776184082, 0.045347614288330076, 0.045625343322753906, 0.045281280517578126, 0.04533452987670898, 0.04511129760742188, 0.04542988967895508, 0.04558227157592774, 0.04549523162841797, 0.045328384399414064, 0.046139392852783206, 0.04592876815795898, 0.04551679992675781, 0.04541766357421875, 0.04532716751098633, 0.04551065444946289, 0.04527308654785156, 0.045346336364746095, 0.045279808044433593, 0.04524585723876953, 0.045117950439453124, 0.04580966567993164, 0.0450777587890625, 0.04518313598632812, 0.045197921752929686, 0.04524358367919922, 0.045197441101074216, 0.04522576141357422, 0.04536528015136719, 0.045472286224365235, 0.045312351226806644, 0.04549606323242188, 0.0450255355834961, 0.046309375762939455, 0.04535862350463867, 0.045091297149658205, 0.04555571365356445, 0.04542668914794922, 0.04540585708618164, 0.04545142364501953, 0.04558979034423828, 0.04549280166625977, 0.04546928024291992, 0.045329086303710936, 0.045244544982910154, 0.04523737716674805, 0.045478721618652344, 0.04541164779663086, 0.04560140609741211, 0.045514816284179686, 0.045403423309326174, 0.045536991119384765, 0.04574720001220703, 0.04568473434448242, 0.04526079940795898, 0.04563705444335937, 0.045371967315673827, 0.04537558364868164, 0.04537334442138672, 0.045468830108642576, 0.04533334350585937, 0.04552425765991211, 0.045206241607666016, 0.04534272003173828, 0.045707263946533204, 0.045778942108154294, 0.04534844970703125, 0.04543446350097656, 0.04561568069458008, 0.045580257415771486, 0.04570550537109375, 0.046069759368896485, 0.04541439819335937, 0.04654982376098633, 0.04604444885253906, 0.04636310577392578, 0.04563158416748047, 0.04576067352294922, 0.04551475143432617, 0.04551679992675781, 0.04547993469238281, 0.045454368591308594, 0.04549526214599609, 0.04538163375854492, 0.045571552276611325, 0.045695518493652346, 0.04580556869506836, 0.04555571365356445, 0.045758464813232425, 0.04563763046264648, 0.045520256042480466, 0.0459634895324707, 0.045442977905273435, 0.045648384094238284, 0.0478392333984375, 0.04731631851196289, 0.04743849563598633, 0.04543283081054687, 0.04524236679077148, 0.045313697814941406, 0.045205375671386716, 0.04523011016845703, 0.04527148818969726, 0.0458092155456543, 0.04522383880615234, 0.046010719299316404, 0.04915609741210938, 0.04562489700317383, 0.04555382537841797, 0.04548246383666992, 0.04554095840454102, 0.04620534515380859, 0.046196094512939455, 0.0458656005859375, 0.04602880096435547, 0.04580963134765625, 0.04597967910766602, 0.046034942626953124, 0.04572739028930664, 0.04534662246704101, 0.04538422393798828, 0.045622623443603516, 0.04553337478637695, 0.04564787292480469, 0.04561065673828125, 0.04569580841064453, 0.04818534469604492, 0.04552812957763672, 0.045431198120117186, 0.04541289520263672, 0.046112735748291014, 0.04561923217773438, 0.04562239837646485, 0.04572659301757812, 0.04569449615478516, 0.04563811111450195, 0.04634377670288086, 0.045795745849609375, 0.04581171035766601, 0.04681318283081055, 0.04795391845703125, 0.04571340942382812, 0.04566777420043945, 0.045560382843017576, 0.04551424026489258, 0.0455623664855957, 0.04549222564697265, 0.04588339233398438, 0.04554956817626953, 0.045862911224365234, 0.04565606307983398, 0.045428737640380856, 0.04707040023803711, 0.045437759399414065, 0.045376609802246094, 0.04531702423095703, 0.04542259216308594, 0.04573388671875, 0.045989246368408204, 0.04533852767944336, 0.04528732681274414, 0.045312831878662106, 0.04534179306030273, 0.045380512237548826, 0.04524998474121094, 0.04524294281005859, 0.04522393417358399, 0.04527308654785156, 0.045285152435302733, 0.045510879516601564, 0.045229377746582033, 0.04552569580078125, 0.04580556869506836, 0.045784801483154294, 0.04554956817626953, 0.04548739242553711, 0.04534572982788086, 0.045528446197509766, 0.04544992065429688, 0.04546092987060547, 0.04554143905639649, 0.04557865524291992, 0.0452916145324707, 0.04548956680297851, 0.04558019256591797, 0.045583038330078124, 0.0452174072265625, 0.04540182495117188, 0.04525139236450195, 0.0454200325012207, 0.04531600189208984, 0.04577324676513672, 0.045653343200683594, 0.045789344787597656, 0.045852256774902345, 0.04545404815673828, 0.04567468643188476, 0.0452968635559082, 0.045322078704833985, 0.04629945755004883, 0.045276863098144535, 0.0454279670715332, 0.0456789436340332, 0.045565727233886716, 0.04540454483032227, 0.045532737731933594, 0.04551750564575195, 0.0456703987121582, 0.04561305618286133, 0.045774848937988284, 0.045557758331298825, 0.04568278503417969, 0.04575017547607422, 0.04651212692260742, 0.04566220855712891, 0.045651454925537106, 0.04558310317993164, 0.04563328170776367, 0.04552703857421875, 0.045524543762207034, 0.045448673248291015, 0.0457369270324707, 0.045413440704345706, 0.04552524948120117, 0.045502464294433595, 0.04554227066040039, 0.04539753723144531, 0.045490463256835936, 0.04538982391357422, 0.045385726928710936, 0.045606910705566404, 0.04541439819335937, 0.04534179306030273, 0.045381790161132814, 0.046053184509277346, 0.045769023895263675, 0.045528961181640626, 0.04539254379272461, 0.04540367889404297, 0.04694278335571289, 0.04611686325073242, 0.04537343978881836, 0.045518848419189455, 0.04556185531616211, 0.04558028793334961, 0.045568000793457034, 0.04538496017456055, 0.04553561782836914, 0.04480758285522461, 0.04488684844970703, 0.04491017532348633, 0.04451299285888672, 0.04498102569580078, 0.04576287841796875, 0.045652896881103515, 0.04571033477783203, 0.045991073608398436, 0.04583084869384765, 0.045445121765136716, 0.045451072692871096, 0.04542278289794922, 0.04555980682373047, 0.046739681243896485, 0.04619241714477539, 0.04553420639038086, 0.04560755157470703, 0.04551308822631836, 0.04553932952880859, 0.04548624038696289, 0.04546688079833985, 0.04558457565307617, 0.04545987319946289, 0.04610857772827148, 0.04571555328369141, 0.04543078231811523, 0.046147167205810545, 0.04543289566040039, 0.04540860748291015, 0.04559807968139649, 0.05617318344116211, 0.046139102935791015, 0.04543849563598633, 0.04546614456176758, 0.045338207244873044, 0.0455230712890625, 0.04594739151000977, 0.04560076904296875, 0.04541350555419922, 0.045287776947021484, 0.045233760833740234, 0.04515871810913086, 0.04525324630737305, 0.04538163375854492, 0.045297409057617186, 0.04530815887451172, 0.04555571365356445, 0.04537139129638672, 0.04575641632080078, 0.04535910415649414, 0.045499454498291014, 0.047635391235351564, 0.0471973762512207, 0.04769055938720703, 0.04565116882324219, 0.04559747314453125, 0.04558009719848633, 0.045719745635986325, 0.04576422500610351, 0.04560835266113281, 0.045429630279541014, 0.04535004806518555, 0.04538447952270508, 0.04536131286621094, 0.04554342269897461, 0.04561510467529297, 0.04559872055053711, 0.04554524612426758, 0.046444126129150394, 0.045701377868652346, 0.04561548614501953, 0.045475841522216794, 0.04532403182983399, 0.04522848129272461, 0.045624862670898436, 0.045700958251953125, 0.046539264678955077, 0.045675678253173826, 0.045699199676513674, 0.04820159912109375, 0.045564159393310544, 0.04549488067626953, 0.04605132675170898, 0.045401313781738284, 0.045410400390625, 0.04561955261230469, 0.045432254791259764, 0.045461471557617185, 0.04544716644287109, 0.045335487365722654, 0.045419807434082034, 0.04543971252441406, 0.045559009552001956, 0.04518582534790039, 0.04528332901000977, 0.04523564910888672, 0.045838111877441405, 0.04930025482177734, 0.045568000793457034, 0.04535036849975586, 0.04530435180664062, 0.045338623046875, 0.04529708862304688, 0.04519379043579102, 0.045569183349609375, 0.04525888061523437, 0.04572848129272461, 0.045333728790283204, 0.04528384017944336, 0.04553551864624023, 0.045352958679199216, 0.04516659164428711, 0.04537548828125, 0.04530380630493164, 0.04590943908691406, 0.04537606430053711, 0.045350910186767575, 0.04511129760742188, 0.04515964889526367, 0.04500764846801758, 0.04516780853271484, 0.04512031936645508, 0.045461505889892576, 0.045265087127685545, 0.045454753875732425, 0.0452644157409668, 0.04548697662353516, 0.045295520782470705, 0.04540016174316406, 0.04548803329467774, 0.045425022125244144, 0.045798721313476565, 0.04570767974853516, 0.045385726928710936, 0.04561481475830078, 0.046626270294189455, 0.045426849365234376, 0.04592614364624024, 0.045634464263916014, 0.04628656005859375, 0.04577062225341797, 0.04564364624023438, 0.04541904067993164, 0.045655616760253905, 0.04570771026611328, 0.04538982391357422, 0.045481697082519534, 0.04564608001708984, 0.04566748809814453, 0.04644144058227539, 0.04571267318725586, 0.04619731140136719, 0.04543084716796875, 0.045977375030517575, 0.04576483154296875, 0.04553318405151367, 0.04549017715454102, 0.04532342529296875, 0.0455975341796875, 0.045800960540771485, 0.04560947036743164, 0.04558607864379883, 0.045901313781738284, 0.04566435241699219, 0.04586368179321289, 0.04556560134887695, 0.04574390411376953, 0.04549849700927734, 0.04567494583129883, 0.04577807998657227, 0.045798240661621095, 0.04584828948974609, 0.045555999755859375, 0.04589158248901367, 0.04580752182006836, 0.04541974258422852, 0.04555574417114258, 0.04548204803466797, 0.04542319869995117, 0.04540399932861328, 0.04554902267456055, 0.04539846420288086, 0.04554150390625, 0.04571920013427734, 0.04609084701538086, 0.04544435119628906, 0.045755199432373043, 0.04551651382446289, 0.04561539077758789, 0.04532428741455078, 0.04582915115356445, 0.04555820846557617, 0.045462047576904294, 0.04598748779296875, 0.04558662414550781, 0.04564131164550781, 0.04831615829467773, 0.04607907104492188, 0.04544483184814453, 0.046266368865966793, 0.0454389762878418, 0.0453092155456543, 0.046198463439941405, 0.04536137771606445, 0.04568841552734375, 0.0456569938659668, 0.04550582504272461, 0.04539670562744141, 0.045819873809814456, 0.04526208114624024, 0.04568054580688476, 0.04562419128417969, 0.04562944030761719, 0.046061569213867185, 0.04547379302978516, 0.045297664642333986, 0.04534476852416992, 0.04508659362792969, 0.04532595062255859, 0.045146625518798826, 0.045428737640380856, 0.045207263946533204, 0.0459389762878418, 0.045271041870117185, 0.04546559906005859, 0.0452567024230957, 0.04548198318481445, 0.04567407989501953, 0.04550300979614258, 0.0453098258972168, 0.045606910705566404, 0.04529151916503906, 0.045418144226074215, 0.04541030502319336, 0.04542499160766601, 0.04589977645874024, 0.045522270202636717, 0.04547446441650391, 0.04528665542602539, 0.04519382476806641, 0.0452507209777832, 0.045266529083251954, 0.045349281311035154, 0.04524972915649414, 0.04530995178222656, 0.04513670349121094, 0.045442943572998044, 0.04520054244995117, 0.04531244659423828, 0.045574913024902346, 0.0456517448425293, 0.04619673538208008, 0.04590723037719727, 0.045334911346435545, 0.04560521697998047, 0.04560265731811523, 0.04544681549072266, 0.04784998321533203, 0.04728742218017578, 0.05007558441162109, 0.045773761749267575, 0.045211326599121096, 0.0454925422668457, 0.045313854217529294, 0.045684833526611325, 0.04619673538208008, 0.04561100769042969, 0.04591526412963867, 0.04629779052734375, 0.04592211151123047, 0.04993638229370117, 0.04600822448730469, 0.045674976348876954, 0.04562739181518555, 0.04605132675170898, 0.04611663818359375, 0.04604131317138672, 0.04597942352294922, 0.04568905639648437, 0.045844287872314454, 0.045746208190917965, 0.045913726806640624, 0.04593923187255859, 0.045685791015625, 0.0458004150390625, 0.04574003219604492, 0.048216064453125, 0.045657726287841795, 0.04582233428955078, 0.0456519660949707, 0.04581990432739258, 0.0458438720703125, 0.04574678421020508, 0.04592435073852539, 0.04565401458740234, 0.04572979354858398, 0.04571955108642578, 0.04562115097045898, 0.04559183883666992, 0.045445953369140625, 0.045620670318603514, 0.045500225067138675, 0.04612787246704102, 0.04695840072631836, 0.045832382202148435, 0.045671680450439456, 0.04558905410766602, 0.045504703521728515, 0.04552486419677734, 0.0455230712890625, 0.045939872741699216, 0.045711902618408205, 0.045801792144775394, 0.046071807861328126, 0.04557363128662109, 0.0456258544921875, 0.04578713607788086, 0.04566425704956055, 0.04572159957885742, 0.04596284866333008, 0.04572201538085938, 0.045764606475830076, 0.045538784027099606, 0.04538556671142578, 0.04534294509887695, 0.045786911010742185, 0.04549907302856445, 0.045445121765136716]",tokens/s,21.900810772476575,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,True,MB,2574.04928,11834.097664,0.0,11431.575552,10953.091072,s,1,21.32569140625,21.32569140625,0.0,21.32569140625,21.32569140625,21.32569140625,21.32569140625,[21.32569140625],,kWh,0.0004127731994750168,4.552470792884508e-05,0.00014014066766798727,0.0005984385750718492,,MB,1913.511936,12729.581568,0.0,12314.476544,11624.261632,s,10,17.88545300292969,1.7885453002929688,0.007052868846592741,1.7899406127929687,1.793634375,1.796508837890625,1.798808408203125,"[1.7724200439453126, 1.78068310546875, 1.7871923828125, 1.788359375, 1.7883863525390624, 1.79299560546875, 1.791494873046875, 1.7922349853515624, 1.79938330078125, 1.792302978515625]",tokens/s,143.13308136957252,kWh,5.2087349381661925e-05,5.744864048438923e-06,3.456158320480074e-05,9.23937966349016e-05,tokens/kWh,2770748.7875143387,MB,1917.718528,12733.775872,0.0,12316.573696,11624.264192,s,10,88.2394228515625,8.82394228515625,0.019337127051921383,8.827191406250002,8.849871484375,8.8501720703125,8.8504125390625,"[8.792271484375, 8.8004736328125, 8.8059296875, 8.8147294921875, 8.82693359375, 8.8283779296875, 8.82744921875, 8.84298046875, 8.85047265625, 8.8498046875]",tokens/s,7.139665918484011,kWh,0.00025836470534292553,2.8498624754668718e-05,0.00017143852603959675,0.000458301856137191,tokens/kWh,137463.9861400456,,s,630,88.23565536499034,0.1400565958174448,0.0016459393385597626,0.13979374694824218,0.14130850524902341,0.14164627838134766,0.1496572999572754,"[0.15126284790039063, 0.13933612060546874, 0.1395047607421875, 0.1374945526123047, 0.13721685791015625, 0.13739529418945312, 0.13787863159179686, 0.13963449096679686, 0.13877247619628907, 0.14012416076660156, 0.13992140197753905, 0.1395173797607422, 0.13774610900878906, 0.13791305541992188, 0.13813679504394533, 0.13900486755371094, 0.13928598022460936, 0.13930519104003905, 0.14024940490722657, 0.13995933532714844, 0.14004118347167968, 0.1390592041015625, 0.13814784240722655, 0.13878643798828125, 0.13949888610839845, 0.13924403381347655, 0.1392051239013672, 0.13885215759277345, 0.13888905334472657, 0.13911257934570312, 0.1400813751220703, 0.14075289916992187, 0.14017849731445312, 0.139772705078125, 0.13894831848144532, 0.13916409301757812, 0.1386616668701172, 0.1392716522216797, 0.13960269165039063, 0.13888851928710938, 0.13932000732421876, 0.13906533813476563, 0.13970809936523437, 0.13927456665039062, 0.14082882690429688, 0.14103330993652344, 0.14031625366210937, 0.14118339538574218, 0.14047158813476562, 0.1388976287841797, 0.13851724243164062, 0.13957093811035157, 0.13977743530273437, 0.13952879333496093, 0.13965904235839843, 0.13976121520996093, 0.13831546020507812, 0.13971270751953124, 0.1396293487548828, 0.1397568054199219, 0.1404502410888672, 0.14015029907226562, 0.14112034606933593, 0.15122840881347657, 0.13782383728027345, 0.13867240905761719, 0.13772921752929687, 0.137500732421875, 0.13889741516113283, 0.14084095764160157, 0.14260092163085938, 0.13950965881347657, 0.13978854370117189, 0.13960397338867186, 0.1374229736328125, 0.13839996337890625, 0.13883273315429687, 0.14032569885253907, 0.1399579162597656, 0.13848597717285155, 0.1393088684082031, 0.13866738891601563, 0.1396003875732422, 0.13979898071289062, 0.140400634765625, 0.14064845275878907, 0.13886668395996093, 0.13909811401367186, 0.13821292114257813, 0.13867868041992187, 0.13975144958496094, 0.1390098876953125, 0.13950787353515626, 0.13883123779296874, 0.13957577514648437, 0.13998915100097656, 0.1400665283203125, 0.14080569458007813, 0.14025497436523438, 0.13971299743652343, 0.1390207977294922, 0.13929676818847656, 0.13923942565917968, 0.13956857299804687, 0.13884678649902343, 0.13896607971191408, 0.13909613037109375, 0.13882048034667968, 0.14019970703125, 0.13890943908691405, 0.13974774169921875, 0.14086151123046875, 0.14025526428222657, 0.14110890197753906, 0.14024928283691407, 0.1410496368408203, 0.13993814086914064, 0.13921664428710936, 0.1398602294921875, 0.13916767883300782, 0.13979653930664063, 0.13996646118164063, 0.13959120178222656, 0.13952047729492187, 0.13979443359375, 0.13962240600585937, 0.1505482635498047, 0.138060546875, 0.13843064880371095, 0.13848576354980469, 0.13982003784179686, 0.1395476531982422, 0.13971379089355468, 0.1410089569091797, 0.13928005981445313, 0.13816021728515626, 0.13862185668945312, 0.13875932312011718, 0.13887983703613282, 0.13914665222167968, 0.14049302673339845, 0.13996797180175782, 0.1394144287109375, 0.1408185577392578, 0.1397658233642578, 0.13867193603515626, 0.13887641906738282, 0.14136810302734376, 0.13937443542480468, 0.13911004638671876, 0.13918167114257812, 0.1385396423339844, 0.1390873565673828, 0.1389366455078125, 0.1407628173828125, 0.14031718444824218, 0.13937210083007812, 0.14060794067382812, 0.13992291259765624, 0.1403080596923828, 0.13954745483398437, 0.14029814147949218, 0.13904710388183594, 0.13995625305175782, 0.13903805541992187, 0.1394439697265625, 0.13919465637207032, 0.139214599609375, 0.13956185913085936, 0.13968988037109376, 0.13959379577636719, 0.13960595703125, 0.14032086181640624, 0.1397178955078125, 0.1401411895751953, 0.14120358276367187, 0.14109219360351563, 0.14075970458984374, 0.1393687286376953, 0.13972041320800782, 0.13927606201171874, 0.1397733154296875, 0.13978486633300782, 0.13924111938476563, 0.1404438781738281, 0.13917010498046875, 0.1396071319580078, 0.1390885467529297, 0.1392911376953125, 0.1517322235107422, 0.1376847686767578, 0.1386928253173828, 0.1384099884033203, 0.1393868408203125, 0.13964495849609376, 0.14037606811523437, 0.1413447723388672, 0.1385287628173828, 0.1389954833984375, 0.13862054443359376, 0.13867376708984375, 0.13869679260253906, 0.13926829528808593, 0.14050172424316407, 0.13888922119140626, 0.13988233947753906, 0.14020828247070313, 0.13978419494628908, 0.13867782592773437, 0.1399607696533203, 0.140384033203125, 0.13912908935546875, 0.138725341796875, 0.13869874572753907, 0.1396282501220703, 0.1385055389404297, 0.13946365356445312, 0.14125875854492187, 0.13936137390136719, 0.13969815063476562, 0.14064326477050781, 0.14029417419433593, 0.14107235717773436, 0.14025942993164062, 0.1412545623779297, 0.13935836791992187, 0.14009225463867186, 0.1391907196044922, 0.13935877990722656, 0.13977110290527345, 0.13947526550292969, 0.139536865234375, 0.13897276306152342, 0.1389654083251953, 0.14083482360839844, 0.1397227478027344, 0.14038386535644531, 0.13969036865234374, 0.14196726989746095, 0.14091477966308594, 0.14033868408203126, 0.14094796752929686, 0.1405849609375, 0.14007296752929688, 0.14021632385253907, 0.13899183654785155, 0.14057859802246095, 0.13909744262695312, 0.13979306030273436, 0.1390447998046875, 0.1393828430175781, 0.1407406005859375, 0.15225856018066405, 0.13833010864257814, 0.138472900390625, 0.138596923828125, 0.13865571594238282, 0.1390738525390625, 0.1411227569580078, 0.1425106201171875, 0.13987983703613283, 0.13994985961914064, 0.13927711486816408, 0.13866773986816405, 0.13864151000976563, 0.1394824981689453, 0.14096287536621094, 0.1394685363769531, 0.13878863525390625, 0.13892332458496093, 0.13910108947753907, 0.1390970916748047, 0.14084608459472656, 0.14196694946289062, 0.14080860900878905, 0.140103271484375, 0.13978460693359376, 0.13918559265136718, 0.1389674835205078, 0.13943565368652344, 0.14134527587890625, 0.13958143615722657, 0.1387642822265625, 0.13948313903808593, 0.1392262420654297, 0.1391051483154297, 0.14064358520507814, 0.14066969299316406, 0.14018559265136718, 0.13962034606933593, 0.14088192749023437, 0.14085530090332032, 0.13975074768066406, 0.13994841003417968, 0.13967593383789062, 0.14023269653320314, 0.139280029296875, 0.140308837890625, 0.13894451904296876, 0.1394790344238281, 0.1396636199951172, 0.13962629699707033, 0.140328857421875, 0.13926956176757813, 0.1407596435546875, 0.13909181213378907, 0.1403456268310547, 0.13972857666015626, 0.14147193908691405, 0.14137318420410155, 0.14048489379882811, 0.1413918151855469, 0.140553955078125, 0.14118978881835936, 0.14095225524902344, 0.14968832397460938, 0.13862924194335938, 0.13950349426269532, 0.13993472290039063, 0.14003712463378906, 0.13851443481445314, 0.1399680633544922, 0.14114656066894532, 0.1388987274169922, 0.13889208984375, 0.1389812774658203, 0.13902847290039064, 0.13791976928710936, 0.1404281921386719, 0.14050653076171876, 0.13910057067871093, 0.14054400634765624, 0.1405992889404297, 0.1399603271484375, 0.13942726135253905, 0.13993222045898437, 0.14078976440429689, 0.139040771484375, 0.13986944580078126, 0.13973129272460938, 0.13861225891113282, 0.13872825622558593, 0.1401262664794922, 0.14034880065917968, 0.13911514282226561, 0.1401155242919922, 0.13875244140625, 0.14067097473144533, 0.1404067840576172, 0.14089788818359375, 0.14143119812011717, 0.14018765258789062, 0.1413058624267578, 0.14091059875488282, 0.14062521362304686, 0.13974803161621094, 0.13996450805664062, 0.14048445129394532, 0.13965866088867188, 0.14005516052246095, 0.13943350219726564, 0.1406817321777344, 0.139114501953125, 0.13957656860351564, 0.14021507263183594, 0.1393901824951172, 0.13968415832519532, 0.13972454833984374, 0.14053622436523439, 0.13933164978027343, 0.14055856323242188, 0.1403740234375, 0.14077923583984375, 0.14075935363769532, 0.14135894775390626, 0.14093734741210937, 0.14052351379394531, 0.14181318664550782, 0.14958134460449218, 0.13882176208496094, 0.13903439331054687, 0.14010028076171874, 0.14033088684082032, 0.14011199951171874, 0.13994790649414063, 0.14096383666992188, 0.13879855346679687, 0.13938882446289064, 0.13907327270507813, 0.1388921661376953, 0.1389911651611328, 0.13930335998535157, 0.14049241638183593, 0.13946713256835938, 0.139406982421875, 0.14012045288085936, 0.14045989990234375, 0.14070176696777345, 0.14064988708496093, 0.14142652893066407, 0.13955667114257814, 0.13996339416503906, 0.13994207763671876, 0.13919337463378906, 0.13928544616699218, 0.14002774047851563, 0.13966131591796874, 0.13962789916992188, 0.13937461853027344, 0.13861033630371095, 0.140391357421875, 0.13920460510253907, 0.14051533508300781, 0.14023619079589844, 0.14007557678222657, 0.14059027099609375, 0.1404649963378906, 0.14080204772949217, 0.14102345275878905, 0.14143055725097656, 0.14019789123535156, 0.14098431396484376, 0.13924966430664062, 0.1399746551513672, 0.14014035034179687, 0.13960211181640625, 0.14054428100585936, 0.13914083862304688, 0.14048252868652344, 0.13930703735351563, 0.14045132446289063, 0.13923788452148436, 0.13907101440429687, 0.14073085021972656, 0.1394354248046875, 0.13912124633789064, 0.1406459503173828, 0.14085311889648439, 0.1400709686279297, 0.1401835479736328, 0.14164183044433593, 0.15030415344238282, 0.13770608520507813, 0.13867213439941406, 0.1390239715576172, 0.14013859558105468, 0.14000775146484376, 0.14064434814453125, 0.14218812561035157, 0.13903709411621093, 0.13890354919433595, 0.13902383422851564, 0.13869725036621094, 0.13869424438476563, 0.1404788818359375, 0.1409986572265625, 0.13913270568847655, 0.13889888000488282, 0.1391597442626953, 0.13936614990234375, 0.13991818237304687, 0.14123820495605469, 0.14146060180664063, 0.14020498657226563, 0.1403863067626953, 0.14081639099121093, 0.14050918579101562, 0.13964083862304688, 0.13975961303710938, 0.14086553955078124, 0.14004579162597655, 0.14001206970214844, 0.1393173370361328, 0.13952604675292968, 0.1387704315185547, 0.13970130920410156, 0.13981532287597656, 0.14038479614257812, 0.1401282501220703, 0.13912261962890626, 0.1392230987548828, 0.14040640258789064, 0.14040512084960938, 0.14092477416992188, 0.14133229064941405, 0.14084066772460938, 0.14076559448242187, 0.14160508728027343, 0.14074879455566405, 0.14114201354980468, 0.14164991760253906, 0.14151609802246093, 0.1408146514892578, 0.14112396240234376, 0.141412353515625, 0.14115362548828125, 0.141068359375, 0.14107414245605468, 0.14158528137207033, 0.1411024932861328, 0.139442138671875, 0.1406757049560547, 0.140505126953125, 0.1393387908935547, 0.14953471374511718, 0.13876634216308595, 0.13838531494140624, 0.1393050537109375, 0.1399357452392578, 0.1398497314453125, 0.14080426025390624, 0.14211453247070313, 0.1401729278564453, 0.13908595275878907, 0.1393125762939453, 0.13902432250976562, 0.1390642547607422, 0.14044105529785156, 0.14113833618164062, 0.1392887725830078, 0.13867779541015626, 0.1386398468017578, 0.13854515075683593, 0.14025318908691406, 0.1415167999267578, 0.1417954559326172, 0.14077325439453126, 0.13953228759765626, 0.1404058837890625, 0.1405793914794922, 0.13978656005859375, 0.14048031616210938, 0.14158047485351563, 0.14029930114746095, 0.13918678283691407, 0.1396432647705078, 0.13916326904296875, 0.13922752380371095, 0.14035958862304687, 0.1399910430908203, 0.14020831298828124, 0.13878367614746093, 0.13893116760253907, 0.1402344665527344, 0.13897305297851562, 0.1416790008544922, 0.1408076171875, 0.14023663330078126, 0.13989654541015625, 0.1413765106201172, 0.14066278076171876, 0.1410600891113281, 0.14145535278320312, 0.14183628845214843, 0.1412413787841797, 0.1407211151123047, 0.14151434326171874, 0.14077583312988282, 0.14103692626953124, 0.14238117980957032, 0.14069810485839843, 0.14162669372558595, 0.14166685485839844, 0.14156536865234376, 0.14146742248535157, 0.14107469177246093, 0.14149885559082032, 0.14943263244628907, 0.13751705932617186, 0.13928038024902345, 0.14001560974121094, 0.13949031066894532, 0.138861572265625, 0.14110739135742187, 0.14177420043945313, 0.13942008972167969, 0.13855538940429687, 0.13887423706054688, 0.1384147186279297, 0.13863935852050782, 0.14033836364746094, 0.1412431945800781, 0.13940940856933592, 0.13913113403320312, 0.13952557373046875, 0.140548095703125, 0.13988485717773438, 0.1416782684326172, 0.1419021453857422, 0.14097772216796875, 0.1411486053466797, 0.14001930236816407, 0.1397026824951172, 0.1392189483642578, 0.1395404815673828, 0.14155775451660157, 0.13998658752441406, 0.1388949432373047, 0.14022732543945313, 0.13885031127929687, 0.13870230102539063, 0.1404175720214844, 0.14021023559570311, 0.14125238037109375, 0.13955906677246094, 0.13941964721679687, 0.13882908630371094, 0.13950650024414063, 0.14107638549804688, 0.1409986572265625, 0.14171852111816408, 0.1400731201171875, 0.14074147033691406, 0.14071192932128906, 0.14010896301269532, 0.1417095947265625, 0.14220755004882812, 0.14162725830078124, 0.14170066833496095, 0.14064697265625, 0.1412752685546875, 0.1409901123046875, 0.14135877990722656, 0.14167913818359376, 0.14112873840332033, 0.14178111267089843, 0.14066773986816405, 0.1413692169189453, 0.1411988525390625, 0.14159730529785156]",tokens/s,7.1399707679846705,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 58.12 MiB is free. Process 133581 has 14.68 GiB memory in use. Of the allocated memory 14.19 GiB is allocated by PyTorch, and 384.43 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpad9saovq/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmplu1colqe/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,True,MB,1097.732096,4937.678848,0.0,4542.431232,4484.571136,s,1,14.5617099609375,14.5617099609375,0.0,14.5617099609375,14.5617099609375,14.5617099609375,14.5617099609375,[14.5617099609375],,kWh,0.00021752510441666904,2.398741052359524e-05,8.017478636199865e-05,0.00032168730130226295,,MB,1305.165824,5480.841216,0.0,5073.010688,4884.617216,s,10,11.0216416015625,1.10216416015625,0.006930244767834649,1.1029554443359375,1.1094821044921876,1.110569970703125,1.111440263671875,"[1.0856644287109376, 1.0958511962890625, 1.10051123046875, 1.10182177734375, 1.1063885498046875, 1.102702392578125, 1.1045953369140624, 1.10320849609375, 1.1116578369140624, 1.1092403564453126]",tokens/s,232.27029988319325,kWh,3.2022201245416584e-05,3.5315273165968545e-06,2.1340822628199835e-05,5.689455119021327e-05,tokens/kWh,4499552.147693818,MB,1325.572096,5495.52128,0.0,5087.690752,4884.619776,s,10,49.30262548828125,4.930262548828125,0.014049781425734228,4.934433837890625,4.943747021484374,4.946774633789063,4.949196723632812,"[4.9013173828125, 4.91222119140625, 4.9211572265625, 4.92892138671875, 4.93288525390625, 4.935982421875, 4.93823681640625, 4.93902734375, 4.94307421875, 4.94980224609375]",tokens/s,12.778224156637354,kWh,0.00014493874330833318,1.598788314719173e-05,9.619927140380006e-05,0.000257125897859325,tokens/kWh,245016.15949423984,,s,630,49.29829949951177,0.07825126904684399,0.0019029491224974552,0.07785262298583984,0.07942870635986328,0.07996926078796386,0.09095911315917969,"[0.088295166015625, 0.07616742706298828, 0.0763904037475586, 0.07641622161865234, 0.07685814666748046, 0.07706623840332032, 0.07695974731445313, 0.0764698257446289, 0.07737347412109374, 0.07706224060058593, 0.07746915435791016, 0.07827750396728515, 0.08050685119628906, 0.07918592071533204, 0.07841935729980469, 0.07729779052734374, 0.07696227264404297, 0.07748198699951171, 0.07705187225341797, 0.07739395141601563, 0.07659439849853515, 0.07655213165283203, 0.07665869140625, 0.07739862060546875, 0.07899571228027344, 0.07919206237792968, 0.0789764175415039, 0.07762319946289062, 0.07730191802978516, 0.07709136199951172, 0.07741849517822266, 0.07758643341064453, 0.0769617919921875, 0.07723622131347656, 0.07670310211181641, 0.07712783813476562, 0.0777814712524414, 0.07867951965332032, 0.0790574722290039, 0.07897612762451171, 0.0784454116821289, 0.07828265380859376, 0.07877232360839843, 0.07707647705078124, 0.07778665924072266, 0.07673648071289063, 0.07729158020019532, 0.07774166107177734, 0.07769379425048828, 0.07769292449951172, 0.07813529968261719, 0.07839334106445313, 0.07820492553710938, 0.07805951690673828, 0.0780083236694336, 0.07752025604248047, 0.07744576263427734, 0.07756800079345703, 0.07719526672363282, 0.07719664001464843, 0.07750313568115234, 0.07736729431152344, 0.07775958251953125, 0.09045753479003907, 0.07656505584716797, 0.07712710571289062, 0.07751123046875, 0.0776785888671875, 0.07757209777832032, 0.07709696197509766, 0.07725254058837891, 0.07684307098388672, 0.07678361511230469, 0.07685648345947266, 0.07873212432861328, 0.08012582397460938, 0.07865897369384765, 0.07805967712402344, 0.07757433319091797, 0.07689206695556641, 0.0774210205078125, 0.07734272003173828, 0.0769241943359375, 0.07732281494140625, 0.07736057281494141, 0.07667990112304687, 0.07739318084716797, 0.07861670684814454, 0.07844310760498047, 0.07882137298583984, 0.07800012969970703, 0.07762124633789062, 0.07819670104980468, 0.07763970947265625, 0.07723417663574218, 0.07732182312011719, 0.07764771270751954, 0.07721836853027343, 0.07784857940673828, 0.07943577575683594, 0.07818649291992187, 0.07863497924804687, 0.078570556640625, 0.07804003143310546, 0.07809024047851562, 0.07794892883300782, 0.077412353515625, 0.07766015625, 0.0772300796508789, 0.0773201904296875, 0.07739801788330078, 0.07817625427246094, 0.07806918334960937, 0.07816454315185548, 0.0789109115600586, 0.07834403228759766, 0.07806845092773437, 0.07780556488037109, 0.07756095886230469, 0.07790681457519531, 0.07780127716064453, 0.07737945556640625, 0.07758060455322266, 0.07831116485595703, 0.07746790313720703, 0.07746336364746094, 0.09297280120849609, 0.07664051055908203, 0.07712118530273437, 0.07736937713623047, 0.07733280181884766, 0.07750653076171875, 0.07703350067138671, 0.077264892578125, 0.07704985809326172, 0.07705804443359375, 0.07720140838623046, 0.07857904052734375, 0.08072643280029297, 0.07805903625488281, 0.07732201385498047, 0.07747698974609375, 0.07816361236572265, 0.07756610870361329, 0.07748131561279296, 0.07718080139160156, 0.07689295959472656, 0.07732428741455079, 0.07686144256591797, 0.07823974609375, 0.08058048248291015, 0.0786690902709961, 0.0782992935180664, 0.07831581115722656, 0.07822525024414062, 0.07774665832519531, 0.0778094711303711, 0.07734300994873047, 0.07763961791992187, 0.07747385406494141, 0.07770521545410156, 0.07857766723632813, 0.0786903076171875, 0.0786527328491211, 0.07812300872802734, 0.07813190460205079, 0.07779737854003907, 0.07773945617675782, 0.07822569274902344, 0.07754959869384766, 0.07719757080078125, 0.07733980560302735, 0.07744566345214844, 0.0777383041381836, 0.07806976318359375, 0.07841177368164062, 0.07911804962158203, 0.0787376937866211, 0.07777279663085937, 0.0778239974975586, 0.07768678283691406, 0.078183837890625, 0.07772835540771485, 0.07744921875, 0.07739186859130859, 0.07832371520996094, 0.07783158111572265, 0.07806393432617187, 0.0786475830078125, 0.09281330871582032, 0.0767179183959961, 0.07716470336914062, 0.07717683410644531, 0.07724569702148437, 0.07711820983886719, 0.07719907379150391, 0.07700099182128907, 0.07733452606201172, 0.07715990447998047, 0.07711920166015625, 0.07876486206054688, 0.08106393432617187, 0.07851622772216797, 0.07762886047363281, 0.0777119369506836, 0.07741645050048829, 0.07742054748535156, 0.07754137420654297, 0.077517822265625, 0.07753011322021484, 0.07746694183349609, 0.07735935974121094, 0.07856377410888672, 0.07993138885498047, 0.07923916625976563, 0.07834210968017578, 0.07814527893066406, 0.07743721771240235, 0.07750450897216797, 0.07763542175292969, 0.07759878540039063, 0.07782614135742187, 0.07729961395263672, 0.07724655914306641, 0.0789683837890625, 0.07828012847900391, 0.078501953125, 0.07896774291992187, 0.07799603271484375, 0.07803286743164063, 0.07724240112304688, 0.0776437759399414, 0.07810006713867187, 0.07743878173828125, 0.07749078369140625, 0.07731814575195313, 0.0780902099609375, 0.07855852508544922, 0.07878729248046876, 0.0791756820678711, 0.07869411468505859, 0.07853903961181641, 0.07804029083251954, 0.07786348724365234, 0.07796144104003906, 0.07850534057617188, 0.07756406402587891, 0.07768742370605469, 0.07780335998535157, 0.07917680358886718, 0.07990959930419922, 0.0793683853149414, 0.0916098861694336, 0.07732390594482422, 0.07715382385253906, 0.07678243255615234, 0.07695286560058594, 0.07729561614990234, 0.0772553939819336, 0.07744905853271485, 0.07726028442382812, 0.07695782470703125, 0.07718147277832031, 0.0796610565185547, 0.08236032104492187, 0.07861017608642579, 0.07817036437988281, 0.07769497680664063, 0.07771488189697266, 0.07759705352783203, 0.07813308715820312, 0.07728368377685547, 0.07717059326171875, 0.0774267807006836, 0.0771747817993164, 0.07813865661621094, 0.07990959930419922, 0.079556640625, 0.07869436645507813, 0.07860797119140625, 0.07725456237792969, 0.07756623840332032, 0.07751907348632812, 0.07726080322265624, 0.07723161315917969, 0.07719987487792969, 0.07726898956298828, 0.07805644989013671, 0.07848851013183594, 0.07927200317382813, 0.07892991638183594, 0.07823564910888672, 0.07796640014648437, 0.07789798736572266, 0.07795782470703125, 0.07784243011474609, 0.0771379165649414, 0.07750450897216797, 0.077412353515625, 0.07812652587890626, 0.07847379302978516, 0.08005987548828125, 0.07943017578125, 0.07869843292236328, 0.07993145751953125, 0.07843635559082031, 0.07929036712646484, 0.07765196990966797, 0.07765577697753906, 0.0775433578491211, 0.07734921264648438, 0.07771705627441407, 0.07809478759765626, 0.0788479995727539, 0.08000019073486328, 0.0911673583984375, 0.07721174621582032, 0.07692082977294921, 0.07693711853027344, 0.07752508544921875, 0.07713382720947265, 0.07749222564697265, 0.07744512176513672, 0.07730118560791016, 0.07722643280029297, 0.07745731353759766, 0.07969776153564453, 0.082393310546875, 0.0787026596069336, 0.07832790374755859, 0.07780556488037109, 0.07767359924316407, 0.07785257720947265, 0.07716678619384766, 0.07697897338867188, 0.07728070068359374, 0.07717436981201171, 0.07733551788330079, 0.07905814361572265, 0.08016099548339843, 0.0795244140625, 0.07906441497802734, 0.07806003570556641, 0.07771561431884766, 0.0775263671875, 0.07770588684082032, 0.07784652709960938, 0.07742390441894531, 0.07740265655517578, 0.07810288238525391, 0.07765385437011718, 0.07855900573730469, 0.0791983642578125, 0.07904025268554687, 0.07845231628417969, 0.07823139190673828, 0.07847821044921875, 0.07833190155029297, 0.07839743804931641, 0.07828684997558594, 0.07803084564208984, 0.07761714935302734, 0.0780738525390625, 0.07842396545410156, 0.07863715362548829, 0.07932422637939453, 0.0795248031616211, 0.07808345794677735, 0.07812979125976563, 0.07797459411621094, 0.07788591766357422, 0.07769929504394531, 0.07824160003662109, 0.07823814392089844, 0.07806566619873047, 0.07799603271484375, 0.0780943374633789, 0.079098876953125, 0.09088854217529296, 0.07770317077636718, 0.0775244140625, 0.07749180603027343, 0.07710614776611328, 0.07714406585693359, 0.07766015625, 0.07748377227783203, 0.07721772766113282, 0.07724678039550781, 0.07729494476318359, 0.07949584197998047, 0.08123391723632813, 0.07971635437011719, 0.0780206069946289, 0.07753711700439453, 0.07741251373291015, 0.07748403167724609, 0.07726898956298828, 0.07767401885986328, 0.07719369506835938, 0.0771638412475586, 0.07729564666748047, 0.078695068359375, 0.07968358612060547, 0.07946646118164062, 0.07902006530761718, 0.07824793243408203, 0.07790348815917969, 0.07886876678466796, 0.07700694274902344, 0.07735033416748047, 0.07704019165039062, 0.07740415954589844, 0.07749561309814453, 0.07894905853271485, 0.079710205078125, 0.08032371520996094, 0.08017190551757812, 0.07965846252441407, 0.07845532989501953, 0.07848550415039063, 0.07763471984863281, 0.0775074234008789, 0.07748403167724609, 0.07789568328857421, 0.07746918487548828, 0.07789984130859375, 0.07888735961914063, 0.07863910675048828, 0.0795688934326172, 0.07915110778808594, 0.07821311950683593, 0.07832575988769531, 0.07810047912597656, 0.0775880355834961, 0.07768460845947266, 0.07802118682861328, 0.07743644714355469, 0.07797808074951172, 0.07806566619873047, 0.07881465911865235, 0.079276611328125, 0.09293392181396484, 0.0775406723022461, 0.07730172729492188, 0.07724742126464844, 0.07748329925537109, 0.07711360168457031, 0.07694918060302734, 0.07714672088623047, 0.0771148452758789, 0.07717459106445312, 0.07715932464599609, 0.0795893783569336, 0.08211046600341797, 0.0803082275390625, 0.07806752014160157, 0.07834015655517577, 0.07723993682861328, 0.07725020599365234, 0.07731446075439453, 0.07719366455078125, 0.07733395385742188, 0.07725933074951172, 0.07737548828125, 0.07837206268310547, 0.08006253051757813, 0.07958182525634766, 0.07901398468017579, 0.07896195220947265, 0.07797628784179687, 0.07779513549804687, 0.07840172576904297, 0.07816806030273438, 0.07836262512207032, 0.07733042907714843, 0.0775393295288086, 0.0777871322631836, 0.07889881896972656, 0.0796121597290039, 0.07929254150390624, 0.07878860473632812, 0.07822284698486329, 0.07810294342041016, 0.07794624328613281, 0.07792304229736328, 0.0779653091430664, 0.07769878387451172, 0.0772836151123047, 0.07774617767333984, 0.07848140716552734, 0.07861030578613282, 0.07903654479980468, 0.07933033752441407, 0.07813346862792969, 0.07832345581054688, 0.07792438507080078, 0.07803084564208984, 0.07822025299072266, 0.07801766204833985, 0.07733277130126953, 0.077652099609375, 0.07900534057617188, 0.07870134735107422, 0.07838307189941406, 0.09136204528808593, 0.0773214111328125, 0.0771777572631836, 0.07787433624267578, 0.07683977508544922, 0.07710720062255859, 0.0772005157470703, 0.0770807647705078, 0.07736390686035156, 0.07806976318359375, 0.07716044616699219, 0.0796917724609375, 0.0814755859375, 0.07850102233886719, 0.07795798492431641, 0.0776089630126953, 0.07791410827636719, 0.07748403167724609, 0.0771725082397461, 0.07785903930664062, 0.0772116470336914, 0.07736029052734375, 0.07752345275878907, 0.07929849243164062, 0.08053187561035156, 0.07924940490722657, 0.0789400634765625, 0.07886243438720703, 0.07914252471923829, 0.07833638763427735, 0.07826185607910156, 0.07758460998535156, 0.07735027313232422, 0.07723910522460938, 0.07750656127929688, 0.0785080337524414, 0.07920134735107422, 0.07942854309082031, 0.07930265808105469, 0.07943373107910157, 0.07816191864013672, 0.07785266876220703, 0.07819264221191406, 0.07801353454589843, 0.07782489776611329, 0.07750160217285157, 0.07791001892089844, 0.07791871643066406, 0.07896717071533203, 0.0794419174194336, 0.07837229156494141, 0.0784655990600586, 0.07900160217285156, 0.07854617309570312, 0.07824614715576172, 0.07833036804199218, 0.07867391967773438, 0.07785881805419922, 0.07792211151123046, 0.07858604431152344, 0.0782144012451172, 0.078833984375, 0.07928025817871094, 0.09098793792724609, 0.07736380767822265, 0.07695343780517579, 0.07715408325195312, 0.07711577606201171, 0.07726227569580078, 0.07718736267089844, 0.07730204772949219, 0.07867558288574218, 0.07701261138916016, 0.07727385711669922, 0.08020787048339843, 0.08214527893066406, 0.07947673797607421, 0.07853177642822265, 0.07786537933349609, 0.07744902038574218, 0.07765257263183593, 0.07761456298828125, 0.07754720306396484, 0.07778707122802735, 0.07772048187255859, 0.07773139190673828, 0.07956934356689453, 0.08051097869873047, 0.08024269104003906, 0.0789401626586914, 0.07909990692138671, 0.07810015869140625, 0.07778726196289062, 0.07812086486816407, 0.07702146911621094, 0.07784636688232421, 0.07742441558837891, 0.07770764923095703, 0.07882956695556641, 0.07952758026123047, 0.07941734313964843, 0.07906940460205078, 0.07976563262939453, 0.07954617309570312, 0.07813314819335937, 0.07795846557617188, 0.07736009979248047, 0.07733408355712891, 0.07776710510253906, 0.07856460571289063, 0.07859481811523437, 0.07883161926269532, 0.0791572494506836, 0.07943167877197266, 0.0796917724609375, 0.0787877426147461, 0.07860662078857422, 0.07819875335693359, 0.07774214172363281, 0.07785913848876953, 0.07805155181884765, 0.07818854522705078, 0.07860018920898437, 0.07886847686767579, 0.07857350158691406, 0.07860765075683594]",tokens/s,12.779345462133842,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1219, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1020, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 737, in forward self_attn_output, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 655, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1235, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1037, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 763, in forward attn_outputs, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 557, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpew97r0bj/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1174, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 894, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 507, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 436, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1219, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1020, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 737, in forward self_attn_output, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 655, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 68.12 MiB is free. Process 82517 has 14.67 GiB memory in use. Of the allocated memory 14.27 GiB is allocated by PyTorch, and 293.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpei3hxrh8/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 422.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 328.12 MiB is free. Process 212054 has 14.42 GiB memory in use. Of the allocated memory 14.22 GiB is allocated by PyTorch, and 91.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1039, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 816, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 551, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 384, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 28.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 4.12 MiB is free. Process 186237 has 14.73 GiB memory in use. Of the allocated memory 14.49 GiB is allocated by PyTorch, and 137.12 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 80.12 MiB is free. Process 129303 has 14.66 GiB memory in use. Of the allocated memory 14.13 GiB is allocated by PyTorch, and 428.13 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1349, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1142, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 852, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 604, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 232.12 MiB is free. Process 24595 has 14.51 GiB memory in use. Of the allocated memory 14.07 GiB is allocated by PyTorch, and 332.11 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp_u_01j17/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp11q02xdv/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 110.12 MiB is free. Process 170694 has 14.63 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 51.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 436.12 MiB is free. Process 177374 has 14.31 GiB memory in use. Of the allocated memory 14.14 GiB is allocated by PyTorch, and 61.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 272.12 MiB is free. Process 87691 has 14.47 GiB memory in use. Of the allocated memory 14.35 GiB is allocated by PyTorch, and 13.83 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 92643 has 14.71 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 28.11 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpa25dvub6/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 344.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 302.12 MiB is free. Process 162209 has 14.44 GiB memory in use. Of the allocated memory 14.20 GiB is allocated by PyTorch, and 132.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 58.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 54.12 MiB is free. Process 157045 has 14.69 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 203.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmptwyznak9/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1196, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 48.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 38.12 MiB is free. Process 191663 has 14.70 GiB memory in use. Of the allocated memory 14.42 GiB is allocated by PyTorch, and 176.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1039, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 816, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 551, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 384, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 272.12 MiB is free. Process 106610 has 14.47 GiB memory in use. Of the allocated memory 14.35 GiB is allocated by PyTorch, and 13.83 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1069, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 881, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 479, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpfehud26c/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1069, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 881, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 479, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.52 GiB is free. Process 115883 has 13.22 GiB memory in use. Of the allocated memory 13.10 GiB is allocated by PyTorch, and 6.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1219, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1020, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 737, in forward self_attn_output, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 655, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmphbo808zi/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,865.984512,555.614208,0.0,153.092096,140.32384,s,1,9.1468994140625,9.1468994140625,0.0,9.1468994140625,9.1468994140625,9.1468994140625,9.1468994140625,[9.1468994140625],,kWh,1.746994229166745e-05,1.9194472782554583e-06,5.280004223999924e-06,2.4669393793922832e-05,,MB,1334.321152,670.957568,0.0,255.852544,216.246784,s,10,0.23171993637084962,0.023171993637084963,0.00017436465223978186,0.02317174434661865,0.02332704315185547,0.023434913444519045,0.023521209678649905,"[0.02324323272705078, 0.023542783737182618, 0.02330307197570801, 0.0232007999420166, 0.023041952133178712, 0.023142688751220702, 0.023057279586791993, 0.023142463684082033, 0.023206079483032226, 0.02283958435058594]",tokens/s,11047.819363729326,kWh,6.720207021647524e-07,7.409068946099364e-08,4.387742973793104e-07,1.1848856890050563e-06,tokens/kWh,216054596.97547883,MB,1367.793664,691.929088,0.0,276.824064,216.249344,s,10,10.79241943359375,1.079241943359375,0.006239358743713091,1.0791397705078123,1.0867441284179689,1.0892698303222657,1.091290391845703,"[1.0917955322265624, 1.086182861328125, 1.077619873046875, 1.08065966796875, 1.0810726318359376, 1.07561279296875, 1.0825882568359375, 1.07416015625, 1.072142333984375, 1.0705853271484376]",tokens/s,58.37430650989973,kWh,3.126623801908516e-05,3.4481995665094672e-06,1.0994873738420684e-05,4.570931132401532e-05,tokens/kWh,1378274.976698244,,s,630,10.787453977584844,0.017122942821563236,0.00035109770128482294,0.01705289554595947,0.017333849334716794,0.017460949420928955,0.01839627054214478,"[0.01693503952026367, 0.017090431213378908, 0.016963455200195314, 0.01695871925354004, 0.017122432708740233, 0.01710470390319824, 0.01707993507385254, 0.017236032485961915, 0.020441471099853517, 0.018269567489624022, 0.01753766441345215, 0.017227872848510743, 0.017766208648681642, 0.017229888916015627, 0.017137632369995118, 0.0170578556060791, 0.017089920043945314, 0.0171014404296875, 0.017108991622924806, 0.017022272109985352, 0.017154752731323244, 0.01705571174621582, 0.01698409652709961, 0.01697974395751953, 0.01698963165283203, 0.017041791915893556, 0.01702889633178711, 0.017136255264282228, 0.01699203109741211, 0.016969343185424805, 0.01737993621826172, 0.017224000930786132, 0.017249984741210936, 0.0171909122467041, 0.017317184448242186, 0.01710550308227539, 0.017115232467651367, 0.017137632369995118, 0.01715167999267578, 0.01721174430847168, 0.01756787109375, 0.017327871322631836, 0.01704550361633301, 0.01736307144165039, 0.01973983955383301, 0.019286912918090822, 0.017280256271362305, 0.017256799697875976, 0.01739401626586914, 0.01728102493286133, 0.017175552368164062, 0.017335168838500975, 0.01724415969848633, 0.017307775497436523, 0.01723616027832031, 0.017216928482055666, 0.017039775848388672, 0.01707740783691406, 0.01710972785949707, 0.01716441535949707, 0.017341920852661133, 0.017339040756225586, 0.01725632095336914, 0.01715238380432129, 0.01728927993774414, 0.017354848861694337, 0.017407743453979493, 0.017333791732788085, 0.017402591705322264, 0.017612800598144532, 0.017219423294067383, 0.017176095962524413, 0.017189504623413086, 0.017104896545410156, 0.017309600830078126, 0.017427616119384766, 0.018035648345947265, 0.017115135192871094, 0.017311487197875976, 0.017067903518676757, 0.016963104248046874, 0.01695974349975586, 0.017252960205078126, 0.017187103271484375, 0.017125087738037108, 0.016996639251708984, 0.01710643196105957, 0.0170600643157959, 0.017164127349853515, 0.01719251251220703, 0.017209951400756835, 0.01709619140625, 0.017230239868164063, 0.017139808654785156, 0.017000511169433595, 0.01708435249328613, 0.017239679336547853, 0.0173470401763916, 0.01718467140197754, 0.017153791427612305, 0.01709008026123047, 0.017349023818969727, 0.017123647689819336, 0.017222751617431642, 0.01714883232116699, 0.017477664947509765, 0.017211360931396483, 0.01729497528076172, 0.01733875274658203, 0.01741619110107422, 0.017375232696533204, 0.017433919906616212, 0.01930463981628418, 0.01714419174194336, 0.01700886344909668, 0.01697932815551758, 0.017033599853515626, 0.01723936080932617, 0.017077247619628907, 0.01704547119140625, 0.01697702407836914, 0.017035743713378907, 0.016972320556640625, 0.017014047622680665, 0.017013471603393556, 0.017141727447509764, 0.01705583953857422, 0.01721308708190918, 0.017076416015625, 0.017082111358642578, 0.01706835174560547, 0.017250207901000975, 0.017213632583618164, 0.017265663146972657, 0.0169932804107666, 0.017035104751586913, 0.01713699150085449, 0.017081151962280272, 0.016970943450927735, 0.017002304077148436, 0.017906143188476564, 0.017293664932250978, 0.017125568389892577, 0.016979936599731446, 0.016906272888183593, 0.01701036834716797, 0.0171397762298584, 0.01728678321838379, 0.016985727310180665, 0.017040384292602538, 0.016961824417114257, 0.016995967864990233, 0.017016927719116212, 0.01707382392883301, 0.0169968318939209, 0.017170591354370116, 0.016996063232421876, 0.017079839706420897, 0.01705196762084961, 0.017061567306518553, 0.0170068473815918, 0.017139936447143556, 0.01701683235168457, 0.0170164794921875, 0.01746329689025879, 0.017137216567993163, 0.017089311599731444, 0.016903392791748045, 0.016982912063598633, 0.01695529556274414, 0.016893951416015626, 0.016972896575927734, 0.01691961669921875, 0.01708220863342285, 0.017309696197509765, 0.01716783905029297, 0.017113632202148437, 0.017117183685302736, 0.0169881591796875, 0.016969728469848632, 0.017009952545166015, 0.016988704681396485, 0.017105087280273438, 0.017171712875366212, 0.017357120513916014, 0.01707574462890625, 0.017285984039306642, 0.017180736541748048, 0.017147903442382813, 0.017181983947753908, 0.017517280578613282, 0.018299135208129882, 0.018339584350585938, 0.01743471908569336, 0.017327871322631836, 0.017266847610473632, 0.017868799209594728, 0.01722163200378418, 0.01720252799987793, 0.017250240325927733, 0.017118112564086914, 0.01696544075012207, 0.01694118309020996, 0.017176160812377928, 0.01707369613647461, 0.017304447174072264, 0.01717849540710449, 0.01722572708129883, 0.01731609535217285, 0.01705548858642578, 0.017121440887451173, 0.01707993507385254, 0.01699862480163574, 0.016967552185058594, 0.016975488662719727, 0.017029184341430664, 0.017078720092773437, 0.016979616165161134, 0.017017215728759767, 0.016926944732666014, 0.0169467830657959, 0.016976255416870117, 0.017043327331542967, 0.01695280075073242, 0.017000896453857422, 0.016932992935180663, 0.01696076774597168, 0.017189088821411132, 0.01710326385498047, 0.017247264862060546, 0.016946367263793945, 0.0169715518951416, 0.016951295852661134, 0.016912384033203123, 0.016959487915039064, 0.016932031631469727, 0.016996992111206054, 0.017030784606933594, 0.017001024246215822, 0.017007680892944337, 0.017345472335815428, 0.017319520950317382, 0.017000864028930664, 0.017010335922241212, 0.016999040603637695, 0.017286880493164063, 0.0174071044921875, 0.01704844856262207, 0.017106399536132813, 0.017015327453613283, 0.017064191818237304, 0.01707187271118164, 0.016672704696655275, 0.017068864822387696, 0.016968896865844726, 0.017064735412597655, 0.017061920166015626, 0.017313343048095703, 0.017293760299682617, 0.0170347843170166, 0.01703334426879883, 0.017275360107421874, 0.017168256759643556, 0.017092607498168946, 0.016990207672119142, 0.017031391143798827, 0.017059711456298827, 0.017043359756469728, 0.01748150444030762, 0.016949472427368165, 0.01690153694152832, 0.017133279800415038, 0.016935487747192383, 0.017090879440307617, 0.017057823181152343, 0.01723321533203125, 0.017033952713012696, 0.016969663619995117, 0.016987424850463867, 0.016962528228759766, 0.01690188789367676, 0.016969728469848632, 0.01690559959411621, 0.017050239562988283, 0.01692403221130371, 0.017030111312866213, 0.017133056640625, 0.017207456588745118, 0.017121280670166016, 0.017115135192871094, 0.017067520141601563, 0.017019392013549805, 0.01740336036682129, 0.017560127258300782, 0.01722979164123535, 0.017297088623046877, 0.017174848556518553, 0.018081344604492188, 0.0176378231048584, 0.017083999633789062, 0.017084831237792968, 0.01719500732421875, 0.017254240036010744, 0.01739593505859375, 0.01727686309814453, 0.017164287567138673, 0.017116640090942385, 0.017334367752075194, 0.017348127365112306, 0.017458080291748047, 0.017373184204101562, 0.017260095596313477, 0.017291711807250976, 0.017137664794921875, 0.01706617546081543, 0.01699635124206543, 0.017242111206054688, 0.017275104522705077, 0.017239839553833007, 0.017249439239501955, 0.017199968338012694, 0.017156095504760743, 0.017184255599975586, 0.017189376831054686, 0.017055744171142577, 0.01710054397583008, 0.017097087860107423, 0.01701215934753418, 0.017004064559936524, 0.016996416091918945, 0.01707241630554199, 0.01723654365539551, 0.01706188774108887, 0.016980064392089843, 0.017008703231811525, 0.017096063613891602, 0.017025503158569336, 0.01718806457519531, 0.01705449676513672, 0.01697996711730957, 0.016956832885742186, 0.0169334716796875, 0.016914432525634765, 0.01700351905822754, 0.016929792404174804, 0.01722777557373047, 0.016891904830932617, 0.017022975921630858, 0.017045024871826173, 0.016971296310424804, 0.01689232063293457, 0.017010335922241212, 0.01694937515258789, 0.016984832763671874, 0.017354751586914064, 0.017180671691894533, 0.017004735946655275, 0.016898912429809572, 0.017033824920654295, 0.01704185676574707, 0.016977855682373047, 0.017004768371582032, 0.01698307228088379, 0.017093023300170897, 0.017066015243530273, 0.01709811210632324, 0.016994432449340822, 0.017037792205810545, 0.016879968643188477, 0.017006591796875, 0.017126815795898438, 0.016947807312011717, 0.016969728469848632, 0.01704547119140625, 0.01746544075012207, 0.01696352005004883, 0.017155296325683595, 0.01734111976623535, 0.017069759368896483, 0.02122819137573242, 0.018323711395263672, 0.01776639938354492, 0.017315200805664063, 0.017158336639404297, 0.01706979179382324, 0.0171680965423584, 0.017163551330566407, 0.017152992248535157, 0.017174400329589844, 0.017113983154296877, 0.016973440170288085, 0.01707859230041504, 0.01709062385559082, 0.017033056259155275, 0.01714396858215332, 0.017102848052978514, 0.016969728469848632, 0.017115135192871094, 0.016975135803222657, 0.01700239944458008, 0.017005056381225587, 0.01707040023803711, 0.017149568557739258, 0.017099424362182616, 0.016987871170043946, 0.017049152374267577, 0.016996959686279296, 0.01704854393005371, 0.017001344680786134, 0.017012928009033205, 0.017014528274536134, 0.016913568496704102, 0.01802524757385254, 0.017033023834228514, 0.016953887939453124, 0.016931711196899416, 0.017001375198364258, 0.01702707290649414, 0.01702707290649414, 0.01701068878173828, 0.016973087310791016, 0.017060575485229493, 0.017286880493164063, 0.0172938232421875, 0.017086208343505858, 0.0173035831451416, 0.017031103134155273, 0.017137727737426757, 0.01697177505493164, 0.017069311141967775, 0.0169453125, 0.01694985580444336, 0.017012479782104493, 0.01699046325683594, 0.017209344863891602, 0.017274879455566407, 0.016991296768188478, 0.016956256866455077, 0.016983680725097657, 0.01693894386291504, 0.01708505630493164, 0.016894880294799804, 0.017117279052734375, 0.017147903442382813, 0.01700796890258789, 0.017143455505371094, 0.017140768051147462, 0.017016799926757812, 0.01745408058166504, 0.01692572784423828, 0.017053855895996093, 0.017073984146118163, 0.016952512741088867, 0.016900928497314453, 0.01702092742919922, 0.016951295852661134, 0.016975168228149415, 0.017113567352294922, 0.017053823471069336, 0.016910400390625, 0.016885440826416017, 0.016924800872802733, 0.017049215316772462, 0.017072736740112306, 0.01701478385925293, 0.01697177505493164, 0.016940095901489257, 0.017085376739501952, 0.017380800247192383, 0.01698633575439453, 0.017099103927612304, 0.01687731170654297, 0.016885248184204102, 0.01688860893249512, 0.016878591537475587, 0.017070911407470704, 0.017000608444213867, 0.016937152862548828, 0.016940799713134766, 0.016966976165771485, 0.018024511337280273, 0.017174591064453126, 0.017161888122558595, 0.017030111312866213, 0.016922527313232422, 0.01699440002441406, 0.01693267250061035, 0.01696512031555176, 0.016925632476806642, 0.016939903259277345, 0.017113983154296877, 0.017024383544921876, 0.017242752075195312, 0.01757382392883301, 0.017043039321899413, 0.01700886344909668, 0.016969728469848632, 0.016918783187866212, 0.01696335983276367, 0.016926944732666014, 0.01689753532409668, 0.01732371139526367, 0.016880447387695313, 0.016997535705566405, 0.017038143157958985, 0.01729305648803711, 0.017063936233520507, 0.016982015609741212, 0.016965631484985352, 0.017031167984008787, 0.0169835205078125, 0.01689244842529297, 0.01694054412841797, 0.01699206352233887, 0.017016767501831054, 0.01696019172668457, 0.01697091293334961, 0.017142080307006837, 0.017021888732910155, 0.017057024002075195, 0.01701888084411621, 0.016965568542480467, 0.01698863983154297, 0.01701068878173828, 0.017154048919677735, 0.01720524787902832, 0.017063871383666992, 0.017358911514282226, 0.017060096740722657, 0.017056768417358398, 0.016921375274658205, 0.01693926429748535, 0.016939775466918945, 0.01693804740905762, 0.016938911437988282, 0.016971456527709962, 0.01724179267883301, 0.016959487915039064, 0.016972415924072264, 0.016911359786987306, 0.017107967376708985, 0.017135904312133788, 0.018419424057006837, 0.017112512588500977, 0.016931615829467773, 0.01684771156311035, 0.0168306884765625, 0.016873504638671873, 0.01699295997619629, 0.016859136581420898, 0.016924224853515624, 0.017079839706420897, 0.016949407577514647, 0.016912384033203123, 0.017049856185913086, 0.01716681671142578, 0.01696886444091797, 0.01686822319030762, 0.016881664276123046, 0.016777215957641603, 0.01680179214477539, 0.01690387153625488, 0.01694291114807129, 0.016851455688476562, 0.01680940818786621, 0.01686124801635742, 0.016835071563720702, 0.016871423721313478, 0.017057472229003907, 0.016937280654907228, 0.01686076736450195, 0.016877471923828127, 0.017007104873657225, 0.017153024673461914, 0.017015680313110352, 0.017068159103393554, 0.017093856811523436, 0.01694611167907715, 0.01690313529968262, 0.016911231994628906, 0.016896032333374025, 0.016945152282714843, 0.016876544952392578, 0.017521631240844725, 0.016954719543457033, 0.017137855529785157, 0.01703987121582031, 0.016815296173095705, 0.01689193534851074, 0.016984832763671874, 0.016901792526245116, 0.016875871658325194, 0.016887359619140625, 0.01689151954650879, 0.016884063720703123, 0.01684249687194824, 0.016871551513671874, 0.016953952789306642, 0.017031167984008787, 0.01700432014465332, 0.016899456024169923, 0.017606975555419922, 0.017197599411010744, 0.016850656509399414, 0.01685327911376953, 0.016892127990722657, 0.016783136367797852, 0.016861183166503906, 0.016885759353637696, 0.016818048477172852, 0.01691007995605469, 0.016828479766845702, 0.016730432510375978, 0.016986240386962892, 0.01720921516418457, 0.01688572883605957, 0.01685862350463867, 0.016990976333618166, 0.019316511154174806, 0.016955392837524414, 0.01698412895202637, 0.01692460823059082, 0.016837888717651368, 0.016962303161621093, 0.016899648666381835, 0.016860960006713867, 0.01685081672668457, 0.016882463455200194, 0.0167956485748291, 0.016871231079101563]",tokens/s,58.40117615417612,, 8bit-bnb-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,1108.52096,608.043008,0.0,205.520896,177.265664,s,1,9.4083662109375,9.4083662109375,0.0,9.4083662109375,9.4083662109375,9.4083662109375,9.4083662109375,[9.4083662109375],,kWh,2.41029336041656e-05,2.651071137425105e-06,7.254450248000066e-06,3.4008454989590766e-05,,MB,1383.542784,710.803456,0.0,287.309824,258.169344,s,10,0.3187180519104004,0.03187180519104004,0.0006153113223070613,0.03166728019714356,0.03243062934875488,0.032954675483703615,0.0333739123916626,"[0.031807231903076175, 0.03187968063354492, 0.03145395278930664, 0.03152732849121094, 0.03150598335266113, 0.03231417465209961, 0.03205027389526367, 0.03347872161865234, 0.031325759887695315, 0.03137494468688965]",tokens/s,8032.17760856445,kWh,9.232656205300574e-07,1.0182018076667288e-07,3.592829878481025e-07,1.3843687891448327e-06,tokens/kWh,184921822.8606115,MB,1416.62208,731.774976,0.0,308.281344,258.171904,s,10,18.77638037109375,1.877638037109375,0.009959149063942116,1.8730753784179688,1.8930055419921874,1.8949558349609374,1.8965160693359375,"[1.8969061279296875, 1.8925721435546874, 1.8685574951171875, 1.8857213134765625, 1.8724354248046875, 1.8765770263671875, 1.8720084228515625, 1.87371533203125, 1.87235693359375, 1.8655301513671876]",tokens/s,33.55279279332695,kWh,5.441900205947005e-05,6.002110494250657e-06,1.796512056055188e-05,7.838623311427259e-05,tokens/kWh,803712.5589152586,,s,630,18.77135978317262,0.029795809179639057,0.0007237185251499387,0.02960520076751709,0.03019063320159912,0.03076336431503296,0.03361385467529297,"[0.029741727828979492, 0.029880319595336914, 0.029888032913208008, 0.029806079864501952, 0.029919296264648437, 0.02991139221191406, 0.029758016586303712, 0.030390272140502928, 0.030007295608520508, 0.03016499137878418, 0.03035955238342285, 0.030010847091674803, 0.030095903396606446, 0.02981052780151367, 0.03005673599243164, 0.029972543716430666, 0.03001935958862305, 0.029890592575073243, 0.02997020721435547, 0.030029407501220705, 0.030015167236328126, 0.029891519546508788, 0.029932832717895506, 0.02981324768066406, 0.02981635284423828, 0.02997327995300293, 0.030031776428222655, 0.0307589111328125, 0.02993078422546387, 0.029807008743286133, 0.030017696380615234, 0.02983103942871094, 0.029976863861083985, 0.029886463165283202, 0.031389696121215824, 0.03123734474182129, 0.029987743377685547, 0.029918752670288085, 0.029894720077514647, 0.029839040756225586, 0.029842016220092773, 0.02987606430053711, 0.029777248382568358, 0.029846336364746092, 0.03341494369506836, 0.03053913688659668, 0.030204992294311523, 0.030174943923950197, 0.030189279556274415, 0.029972095489501954, 0.029978336334228514, 0.029914112091064454, 0.029874176025390626, 0.029953792572021486, 0.0302608642578125, 0.030044031143188477, 0.03023539161682129, 0.030406656265258788, 0.03016089630126953, 0.02998476791381836, 0.03001296043395996, 0.03017366409301758, 0.02990300750732422, 0.02973891258239746, 0.030003007888793946, 0.029870880126953124, 0.029843360900878906, 0.02978620719909668, 0.029755392074584962, 0.029879999160766602, 0.02977414321899414, 0.030356800079345703, 0.030032575607299803, 0.03081216049194336, 0.03000441551208496, 0.03000934410095215, 0.029948352813720703, 0.03004150390625, 0.030317535400390627, 0.030374944686889647, 0.030546911239624025, 0.030767007827758788, 0.030056352615356444, 0.030326976776123046, 0.02989606475830078, 0.030902912139892578, 0.02995609664916992, 0.030344671249389648, 0.030054559707641603, 0.03005683135986328, 0.029814783096313476, 0.029943424224853514, 0.029794687271118163, 0.029871456146240233, 0.029934112548828124, 0.03019503974914551, 0.029610784530639648, 0.029816064834594726, 0.029554496765136717, 0.029641664505004883, 0.029599103927612305, 0.029542367935180665, 0.02947539138793945, 0.029464607238769532, 0.029596832275390624, 0.029583616256713866, 0.029542688369750977, 0.029460863113403322, 0.029507328033447265, 0.029346111297607422, 0.029777856826782228, 0.031608896255493166, 0.030125600814819336, 0.0299703369140625, 0.030288000106811524, 0.02966934394836426, 0.03045008087158203, 0.029908287048339845, 0.02966592025756836, 0.030111328125, 0.030190143585205078, 0.032784286499023436, 0.031893503189086916, 0.02991923141479492, 0.029484607696533202, 0.029467071533203125, 0.02923196792602539, 0.029468448638916015, 0.029634687423706056, 0.029402463912963868, 0.029563232421875, 0.029475231170654297, 0.02949135971069336, 0.029613088607788086, 0.0295425910949707, 0.029471168518066405, 0.029434112548828124, 0.029280384063720702, 0.02981488037109375, 0.02992918395996094, 0.029343711853027345, 0.02954447937011719, 0.030058080673217774, 0.029675935745239256, 0.029710079193115236, 0.029739200592041017, 0.029476896286010742, 0.02952400016784668, 0.02938185691833496, 0.030179935455322264, 0.029348031997680664, 0.0295314884185791, 0.02941324806213379, 0.02934649658203125, 0.02950726318359375, 0.03354665756225586, 0.029662559509277344, 0.029915807723999023, 0.02974515151977539, 0.029607456207275392, 0.029512159347534178, 0.029484607696533202, 0.029411775588989258, 0.02944000053405762, 0.029501440048217774, 0.0293253116607666, 0.029478944778442383, 0.029337568283081053, 0.0293621768951416, 0.029518112182617188, 0.02968342399597168, 0.02951372718811035, 0.029487104415893556, 0.029480224609375, 0.029360128402709962, 0.02946908760070801, 0.029333824157714843, 0.029675167083740236, 0.029915456771850587, 0.030076927185058593, 0.030127391815185547, 0.02971891212463379, 0.030285375595092773, 0.029905471801757812, 0.030140607833862305, 0.0296059513092041, 0.029433855056762694, 0.029294591903686523, 0.029531455993652343, 0.03384627151489258, 0.029698047637939453, 0.02953011131286621, 0.02957916831970215, 0.029386848449707032, 0.029387807846069335, 0.029567136764526367, 0.029598527908325196, 0.02943177604675293, 0.02964463996887207, 0.03163167953491211, 0.034911903381347656, 0.029574464797973633, 0.029604768753051756, 0.02954412841796875, 0.029903167724609374, 0.02972857666015625, 0.02965648078918457, 0.02969375991821289, 0.029698911666870116, 0.02976576042175293, 0.02963462448120117, 0.029583040237426757, 0.0299769287109375, 0.029732767105102538, 0.029574848175048827, 0.03056425666809082, 0.029785791397094728, 0.029673215866088867, 0.02968806457519531, 0.03127164840698242, 0.03098975944519043, 0.02968796730041504, 0.029544639587402343, 0.03282761764526367, 0.030301952362060548, 0.029445375442504883, 0.029424320220947264, 0.02965331268310547, 0.02954649543762207, 0.02937779235839844, 0.029516672134399412, 0.029464448928833008, 0.029537408828735352, 0.029490047454833985, 0.029443775177001953, 0.029452671051025392, 0.029476255416870118, 0.029680160522460936, 0.029460479736328125, 0.029440191268920897, 0.029669183731079102, 0.029575168609619142, 0.029575168609619142, 0.029636608123779298, 0.02945574378967285, 0.029310623168945314, 0.03070627212524414, 0.02944393539428711, 0.03012575912475586, 0.02963484764099121, 0.029850175857543945, 0.029612031936645508, 0.029366687774658205, 0.02959974479675293, 0.029599775314331056, 0.029659103393554688, 0.029683584213256835, 0.03361804962158203, 0.030019519805908203, 0.029429824829101562, 0.02934988784790039, 0.02941103935241699, 0.029331232070922853, 0.029446783065795897, 0.02938185691833496, 0.02957583999633789, 0.029431072235107422, 0.029401824951171874, 0.02951372718811035, 0.02958131217956543, 0.02983692741394043, 0.029612415313720702, 0.029474815368652343, 0.029357343673706054, 0.02949398422241211, 0.02944000053405762, 0.02969926452636719, 0.029272192001342772, 0.029633216857910157, 0.029640127182006835, 0.029431840896606446, 0.03034601593017578, 0.02983897590637207, 0.029660512924194336, 0.02941007995605469, 0.02943180847167969, 0.029423263549804686, 0.02948089599609375, 0.02961199951171875, 0.029481407165527343, 0.02949465560913086, 0.029941984176635742, 0.034124191284179685, 0.029829248428344727, 0.029570943832397462, 0.02965852737426758, 0.029782623291015626, 0.02957926368713379, 0.02967731285095215, 0.029579072952270507, 0.029477184295654296, 0.029410688400268555, 0.029426431655883788, 0.029427711486816405, 0.02954035186767578, 0.02935327911376953, 0.029498207092285157, 0.029529951095581056, 0.02942540740966797, 0.02972287940979004, 0.02962124824523926, 0.02952809524536133, 0.029739999771118165, 0.03045363235473633, 0.029569055557250975, 0.029657087326049804, 0.02982467269897461, 0.0296942081451416, 0.02948310470581055, 0.02955695915222168, 0.029599424362182616, 0.029759584426879884, 0.02935798454284668, 0.029315296173095702, 0.029275840759277343, 0.032145599365234374, 0.031055871963500976, 0.02955238342285156, 0.029419776916503906, 0.02951683235168457, 0.02943484878540039, 0.029394943237304686, 0.02939084815979004, 0.029329408645629884, 0.029448480606079103, 0.029386207580566405, 0.030198015213012696, 0.029683040618896483, 0.029624479293823242, 0.029638208389282227, 0.029448896408081054, 0.029409536361694334, 0.029609983444213867, 0.029595680236816406, 0.029505151748657227, 0.029747200012207032, 0.029584896087646483, 0.030174047470092773, 0.02973695945739746, 0.029986495971679687, 0.02952169609069824, 0.029514272689819335, 0.02946371269226074, 0.029510496139526367, 0.02944156837463379, 0.02945257568359375, 0.029626623153686523, 0.029337535858154296, 0.029378559112548826, 0.02944723129272461, 0.03347756958007812, 0.02981888008117676, 0.02961017608642578, 0.02948464012145996, 0.02957334327697754, 0.03293203353881836, 0.030547296524047852, 0.029739519119262696, 0.029800416946411133, 0.029715936660766603, 0.02947760009765625, 0.029390207290649416, 0.029396703720092773, 0.029498079299926757, 0.030443584442138672, 0.02978326416015625, 0.029634592056274413, 0.02952057647705078, 0.029146240234375, 0.029318016052246095, 0.03001753616333008, 0.029800447463989257, 0.02932649612426758, 0.029534975051879884, 0.029501535415649413, 0.029677568435668947, 0.02979020881652832, 0.029655263900756835, 0.02957219123840332, 0.029237728118896484, 0.029592992782592774, 0.02958188819885254, 0.029632768630981444, 0.029848575592041016, 0.034562591552734376, 0.029756160736083986, 0.029531200408935546, 0.029438688278198243, 0.029574464797973633, 0.02941606330871582, 0.02944000053405762, 0.029615232467651367, 0.029632575988769533, 0.0293875846862793, 0.029403135299682616, 0.029427711486816405, 0.02951532745361328, 0.029335424423217772, 0.02981340789794922, 0.02984752082824707, 0.0296790714263916, 0.030001632690429686, 0.02948512077331543, 0.029310911178588868, 0.03013151931762695, 0.029823839187622072, 0.02948080062866211, 0.029370080947875975, 0.029423711776733398, 0.029331071853637695, 0.02935625648498535, 0.029354175567626952, 0.02943414306640625, 0.02942505645751953, 0.029616544723510742, 0.029644735336303712, 0.029422784805297853, 0.029344703674316405, 0.032315521240234374, 0.031044479370117187, 0.029784736633300782, 0.02978236770629883, 0.029916736602783205, 0.029659584045410158, 0.02961408042907715, 0.029724063873291014, 0.029609600067138673, 0.02929520034790039, 0.0293603515625, 0.029308416366577147, 0.02953932762145996, 0.029923648834228517, 0.02976358413696289, 0.030266847610473633, 0.029759071350097657, 0.029449407577514648, 0.03022643280029297, 0.029904640197753907, 0.02985478401184082, 0.030127040863037108, 0.029908992767333983, 0.029845504760742186, 0.029464576721191408, 0.029345792770385744, 0.029609983444213867, 0.029421663284301756, 0.02953113555908203, 0.029326047897338867, 0.02964089584350586, 0.029493247985839844, 0.02965622329711914, 0.029741920471191407, 0.03370598220825195, 0.02969171142578125, 0.029466207504272462, 0.029438560485839843, 0.0294072322845459, 0.02944819259643555, 0.02998271942138672, 0.029650463104248046, 0.0295795841217041, 0.02960758399963379, 0.029567712783813475, 0.02936323165893555, 0.02957798385620117, 0.029321216583251954, 0.029435903549194335, 0.029487104415893556, 0.029644800186157227, 0.029550592422485353, 0.030117887496948242, 0.029578720092773438, 0.02947065544128418, 0.029436031341552735, 0.029378879547119142, 0.029732128143310547, 0.02969036865234375, 0.02951535987854004, 0.029469472885131836, 0.029446271896362303, 0.029404767990112303, 0.029605632781982423, 0.029604000091552736, 0.029618560791015627, 0.029476608276367187, 0.02964630317687988, 0.02957801628112793, 0.03360358428955078, 0.02956051254272461, 0.02938297653198242, 0.029420543670654296, 0.02936716842651367, 0.02950156784057617, 0.029431264877319337, 0.02903500747680664, 0.02948316764831543, 0.02936422348022461, 0.029624544143676757, 0.029730592727661133, 0.029829120635986327, 0.029779104232788085, 0.029565792083740234, 0.029396160125732422, 0.030096351623535158, 0.029655008316040038, 0.02973801612854004, 0.029565792083740234, 0.029609983444213867, 0.029517824172973633, 0.029689920425415038, 0.02945964813232422, 0.029460800170898437, 0.02935865592956543, 0.029417600631713867, 0.029497152328491212, 0.02962220764160156, 0.02936204719543457, 0.029626495361328126, 0.029660608291625975, 0.029493824005126953, 0.0322435188293457, 0.031000768661499024, 0.029489152908325194, 0.02959382438659668, 0.029511455535888673, 0.029744640350341797, 0.029381120681762695, 0.029284448623657228, 0.02949081611633301, 0.02950543975830078, 0.029385087966918945, 0.029589088439941406, 0.029409215927124022, 0.02953696060180664, 0.029415199279785156, 0.029359872817993165, 0.030613759994506835, 0.02952191925048828, 0.029374208450317383, 0.02947068786621094, 0.031111232757568358, 0.030412160873413085, 0.0297108154296875, 0.029595903396606445, 0.02948899269104004, 0.029563455581665038, 0.02948281669616699, 0.02939689636230469, 0.029782272338867186, 0.029368064880371095, 0.029462560653686524, 0.02947203254699707, 0.02936832046508789, 0.029364063262939454, 0.029391679763793945, 0.03366915130615234, 0.02966671943664551, 0.029130815505981445, 0.02928531265258789, 0.029384960174560548, 0.029404191970825194, 0.02929145622253418, 0.02934966468811035, 0.02975334358215332, 0.029949951171875, 0.02976563262939453, 0.02973673629760742, 0.02954607963562012, 0.02939369583129883, 0.03012563133239746, 0.029402944564819337, 0.02966192054748535, 0.02952911949157715, 0.029614816665649413, 0.029540576934814454, 0.029394464492797853, 0.029374719619750977, 0.02942323112487793, 0.029389184951782228, 0.03000691223144531, 0.02965132713317871, 0.029761728286743165, 0.029783872604370116, 0.029337600708007814, 0.02942361640930176, 0.029327360153198243, 0.029342975616455078, 0.02937900733947754, 0.029603935241699218, 0.033466175079345704, 0.029618303298950197, 0.030007583618164062, 0.029485151290893553, 0.02935593605041504, 0.029444095611572265, 0.02952134323120117, 0.029295167922973632, 0.02967353630065918, 0.029422719955444335, 0.029438783645629883, 0.029626367568969726, 0.029513887405395508, 0.02939888000488281, 0.03023686408996582, 0.029531967163085936, 0.029521631240844726, 0.029538143157958986, 0.029515680313110353, 0.029473056793212892, 0.029421695709228514, 0.029511039733886718, 0.029485696792602538, 0.029724607467651366, 0.0295831356048584, 0.029530336380004883, 0.029479007720947265, 0.029655008316040038, 0.029468799591064455, 0.02956460762023926, 0.029458816528320313]",tokens/s,33.5617668233474,, 4bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",True,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,1117.61408,608.043008,0.0,205.520896,177.265664,s,1,9.351994140625,9.351994140625,0.0,9.351994140625,9.351994140625,9.351994140625,9.351994140625,[9.351994140625],,kWh,2.4285571158334087e-05,2.671749275420339e-06,7.279728045999582e-06,3.423704847975401e-05,,MB,1371.426816,710.803456,0.0,287.309824,257.382912,s,10,0.2856099548339844,0.02856099548339843,9.082140496405073e-05,0.02856704044342041,0.028647679710388185,0.02865243215560913,0.02865623411178589,"[0.028336767196655274, 0.028571584701538085, 0.028562496185302735, 0.028644128799438475, 0.028545856475830078, 0.028505472183227538, 0.028519296646118165, 0.02862054443359375, 0.028646623611450196, 0.02865718460083008]",tokens/s,8963.273011572877,kWh,8.324729784425537e-07,9.18068742593111e-08,3.2285005251851846e-07,1.2471299052203834e-06,tokens/kWh,205271318.51173243,MB,1384.636416,731.774976,0.0,308.281344,257.58208,s,10,17.106060668945315,1.710606066894531,0.004963823747176531,1.7092789916992186,1.7149471923828126,1.7192622924804688,1.7227143725585938,"[1.71398828125, 1.711919189453125, 1.71024169921875, 1.7078294677734376, 1.705492919921875, 1.7066334228515625, 1.723577392578125, 1.7083162841796875, 1.7074493408203124, 1.7106126708984375]",tokens/s,36.82905212324628,kWh,4.981519764114089e-05,5.4942864932795125e-06,1.6550274557081523e-05,7.185975869150192e-05,tokens/kWh,876707.6476065363,,s,630,17.1010925617218,0.02714459136781238,0.0006332988425725672,0.0270272798538208,0.02739449520111084,0.027571853160858154,0.029778018474578862,"[0.02668185615539551, 0.02707391929626465, 0.02690675163269043, 0.026993600845336915, 0.02689523124694824, 0.0269935359954834, 0.02682009506225586, 0.02690278434753418, 0.026963327407836916, 0.026966079711914063, 0.026870847702026367, 0.026907615661621094, 0.026923295974731445, 0.026786399841308595, 0.026957727432250975, 0.026902528762817384, 0.027144479751586913, 0.026860927581787108, 0.028590431213378908, 0.02793606376647949, 0.026972543716430663, 0.027048351287841797, 0.026967103958129884, 0.026936447143554688, 0.02704150390625, 0.027064447402954103, 0.029988000869750977, 0.027473920822143554, 0.02719753646850586, 0.027248863220214845, 0.027210367202758788, 0.027570079803466797, 0.02710940742492676, 0.027053504943847655, 0.027083168029785155, 0.027064447402954103, 0.028104576110839843, 0.029810943603515626, 0.027520767211914064, 0.027239904403686524, 0.027646240234375, 0.027238880157470703, 0.02695350456237793, 0.026917152404785157, 0.026949344635009767, 0.026994400024414063, 0.027052064895629883, 0.027555999755859376, 0.027250112533569334, 0.02713052749633789, 0.02697420883178711, 0.027052032470703126, 0.02701312065124512, 0.027020383834838867, 0.026925983428955077, 0.026930400848388672, 0.02692550468444824, 0.026854879379272462, 0.02705295944213867, 0.027090911865234376, 0.027102848052978516, 0.02692953681945801, 0.027125823974609376, 0.027303903579711915, 0.027398239135742186, 0.02733078384399414, 0.027045888900756834, 0.027453376770019532, 0.02710086441040039, 0.027084096908569336, 0.026919647216796874, 0.027006240844726564, 0.026874624252319335, 0.026797952651977538, 0.026876224517822265, 0.026750816345214843, 0.02702332878112793, 0.026804256439208984, 0.026926624298095704, 0.026927391052246095, 0.026850976943969727, 0.02679043197631836, 0.027201536178588868, 0.02690457534790039, 0.02683091163635254, 0.026979904174804687, 0.027058719635009765, 0.027057504653930663, 0.02745395278930664, 0.027410432815551757, 0.026901567459106445, 0.02694243240356445, 0.027037664413452147, 0.027021440505981445, 0.027055295944213867, 0.026991296768188476, 0.02693529510498047, 0.02692300796508789, 0.027025440216064452, 0.027131872177124025, 0.029697408676147462, 0.027591264724731446, 0.027359264373779297, 0.02776675224304199, 0.02733695983886719, 0.027424448013305663, 0.027592384338378906, 0.02730403137207031, 0.027232576370239257, 0.027321983337402343, 0.027330944061279297, 0.027394079208374025, 0.027275232315063475, 0.027414527893066407, 0.02714591979980469, 0.027048255920410155, 0.02700111961364746, 0.026990304946899413, 0.02698758316040039, 0.02701408004760742, 0.027286975860595704, 0.026866239547729494, 0.027315391540527343, 0.027161407470703124, 0.027393983840942382, 0.027032960891723634, 0.02659129524230957, 0.027060224533081056, 0.02696406364440918, 0.027021215438842772, 0.027534719467163085, 0.027065088272094726, 0.027106719970703123, 0.02735049629211426, 0.027181535720825194, 0.02715088081359863, 0.027195167541503907, 0.027053375244140625, 0.02715667152404785, 0.02757254409790039, 0.0268887996673584, 0.02690640068054199, 0.02711759948730469, 0.027147680282592773, 0.026921440124511718, 0.027220096588134766, 0.027033824920654297, 0.027449119567871095, 0.027321567535400392, 0.027077407836914064, 0.02707219123840332, 0.02728169631958008, 0.027015199661254884, 0.02692460823059082, 0.027029151916503905, 0.02710553550720215, 0.027023359298706053, 0.02697020721435547, 0.027126304626464842, 0.027123455047607423, 0.027089311599731446, 0.027156223297119142, 0.026991968154907228, 0.02690115165710449, 0.027182336807250976, 0.027255552291870117, 0.027211936950683593, 0.02721366310119629, 0.026927295684814452, 0.027127616882324217, 0.02701312065124512, 0.027007232666015624, 0.027002880096435547, 0.02734000015258789, 0.027451936721801757, 0.027369695663452147, 0.02782748794555664, 0.027308544158935546, 0.02710246467590332, 0.02703158378601074, 0.027302080154418946, 0.02735772705078125, 0.02702441596984863, 0.027116512298583983, 0.027041919708251955, 0.027186527252197265, 0.027257375717163086, 0.026999935150146485, 0.0272042236328125, 0.02682899284362793, 0.027049760818481445, 0.02736025619506836, 0.02697622489929199, 0.026862943649291992, 0.027038400650024413, 0.02721798324584961, 0.027047903060913085, 0.026879968643188475, 0.02689344024658203, 0.027020288467407227, 0.026871679306030273, 0.026924480438232423, 0.027152479171752928, 0.02701260757446289, 0.026907007217407228, 0.026946144104003908, 0.02690457534790039, 0.026994144439697266, 0.02726911926269531, 0.02715497589111328, 0.027108671188354493, 0.02689913558959961, 0.02703481674194336, 0.026850048065185546, 0.02694870376586914, 0.02721686363220215, 0.027068416595458986, 0.02694144058227539, 0.02700444793701172, 0.028639711380004883, 0.030532800674438476, 0.0272491512298584, 0.027210176467895506, 0.027180927276611328, 0.027056127548217773, 0.02703171157836914, 0.027024608612060547, 0.026958208084106445, 0.02694169616699219, 0.02695091247558594, 0.02696460723876953, 0.027104671478271485, 0.0269869441986084, 0.026921247482299803, 0.02694758415222168, 0.026908672332763672, 0.026955551147460937, 0.027066463470458983, 0.026916608810424805, 0.026886207580566406, 0.02691926383972168, 0.026930400848388672, 0.026903072357177735, 0.026845247268676757, 0.026887744903564454, 0.027109983444213868, 0.027508480072021484, 0.026972415924072266, 0.026953920364379883, 0.027336511611938476, 0.02709440040588379, 0.027019872665405273, 0.026650815963745116, 0.02691872024536133, 0.0269803524017334, 0.027301855087280273, 0.026916511535644533, 0.026881664276123048, 0.026880128860473633, 0.02686630439758301, 0.02676950454711914, 0.026865312576293945, 0.02688627243041992, 0.026849311828613283, 0.026994623184204102, 0.02697430419921875, 0.026899967193603515, 0.026843616485595703, 0.02696201515197754, 0.02686566352844238, 0.02680019187927246, 0.02681145668029785, 0.026886240005493164, 0.026917791366577147, 0.02696579170227051, 0.02733795166015625, 0.026999679565429688, 0.026936864852905272, 0.026836639404296876, 0.027003711700439453, 0.026873664855957033, 0.02687811279296875, 0.027404319763183593, 0.02716262435913086, 0.027168928146362306, 0.027174943923950194, 0.026965824127197266, 0.026908544540405272, 0.02694576072692871, 0.026957727432250975, 0.026959871292114256, 0.026857568740844728, 0.027352895736694336, 0.027061664581298828, 0.027058879852294923, 0.027015167236328123, 0.02711769676208496, 0.027188447952270507, 0.02714691162109375, 0.02708710479736328, 0.027085983276367187, 0.03208867263793945, 0.02751679992675781, 0.02692518424987793, 0.027016576766967774, 0.026937984466552736, 0.02694384002685547, 0.026891935348510743, 0.026918912887573244, 0.026841087341308592, 0.026834911346435546, 0.02725836753845215, 0.026982944488525392, 0.026812416076660156, 0.02686479949951172, 0.026652223587036134, 0.02692550468444824, 0.026875904083251953, 0.027490304946899413, 0.02709446334838867, 0.027251232147216798, 0.027119232177734376, 0.02828553581237793, 0.027024511337280274, 0.026995391845703126, 0.02699078369140625, 0.027280672073364258, 0.027007520675659178, 0.02714361572265625, 0.026988479614257814, 0.02714691162109375, 0.02711497688293457, 0.026933759689331056, 0.02694668769836426, 0.026858623504638673, 0.02680953598022461, 0.026919488906860353, 0.027131839752197264, 0.027048000335693358, 0.026892288208007813, 0.02690892791748047, 0.026895519256591796, 0.026802783966064454, 0.02693120002746582, 0.0268984317779541, 0.026902528762817384, 0.026916191101074217, 0.0271694393157959, 0.02726911926269531, 0.026976512908935546, 0.0268984317779541, 0.02695903968811035, 0.026857440948486327, 0.02697097587585449, 0.02693667221069336, 0.02715484809875488, 0.02729267120361328, 0.02740755271911621, 0.02744121551513672, 0.027098880767822266, 0.026938400268554687, 0.026930143356323242, 0.027034751892089842, 0.027042495727539063, 0.02693769645690918, 0.026967903137207032, 0.027064319610595702, 0.027073888778686522, 0.027163232803344727, 0.02724665641784668, 0.028673023223876954, 0.027182079315185546, 0.027006975173950197, 0.027020576477050782, 0.02706038475036621, 0.02711622428894043, 0.027056127548217773, 0.02701103973388672, 0.026648576736450196, 0.02691276741027832, 0.026897695541381834, 0.02701366424560547, 0.026872224807739258, 0.026912544250488283, 0.027008224487304687, 0.02708995246887207, 0.026927040100097655, 0.02692652893066406, 0.028959199905395507, 0.02818992042541504, 0.02721878433227539, 0.02704934310913086, 0.02759891128540039, 0.02736783981323242, 0.027146240234375, 0.027133119583129882, 0.02715283203125, 0.027062847137451173, 0.027078464508056642, 0.027074079513549804, 0.038134143829345706, 0.028594560623168945, 0.02749411201477051, 0.02728550338745117, 0.02716592025756836, 0.027079456329345702, 0.02849705505371094, 0.02693612861633301, 0.026921279907226564, 0.027467456817626953, 0.027006528854370118, 0.02729417610168457, 0.027037696838378908, 0.027246591567993163, 0.026986495971679687, 0.02691481590270996, 0.026974016189575196, 0.02684332847595215, 0.02703673553466797, 0.027207807540893556, 0.02741926383972168, 0.026940864562988283, 0.02697907257080078, 0.026978111267089842, 0.02694748878479004, 0.02723459243774414, 0.027019519805908204, 0.027114688873291017, 0.027532896041870116, 0.027327455520629883, 0.02743471908569336, 0.026990976333618164, 0.027047840118408203, 0.027009023666381835, 0.027099136352539063, 0.026867712020874023, 0.026858976364135742, 0.02704803276062012, 0.026816959381103515, 0.02707164764404297, 0.026978719711303712, 0.026631519317626952, 0.027213951110839844, 0.027015520095825196, 0.02698259162902832, 0.026942623138427733, 0.026880704879760742, 0.026897823333740235, 0.027069311141967773, 0.027169824600219727, 0.027185312271118166, 0.026916799545288087, 0.026861568450927735, 0.026845951080322266, 0.026838016510009766, 0.02714112091064453, 0.027035295486450197, 0.027047840118408203, 0.026870208740234373, 0.026883136749267577, 0.02688889694213867, 0.02707891273498535, 0.0299233283996582, 0.027457759857177733, 0.027428640365600585, 0.02805740737915039, 0.0271976318359375, 0.026944543838500975, 0.027059040069580077, 0.02745097541809082, 0.027007200241088866, 0.02716908836364746, 0.027152416229248046, 0.027260095596313476, 0.02711199951171875, 0.02737379264831543, 0.027073535919189453, 0.0269486083984375, 0.026975616455078125, 0.026960512161254883, 0.026910879135131835, 0.026949760437011718, 0.02689823913574219, 0.027084287643432618, 0.026851680755615233, 0.026916927337646484, 0.027193279266357423, 0.027101247787475587, 0.02694371223449707, 0.026919904708862304, 0.02691337585449219, 0.026904544830322265, 0.026914464950561524, 0.027571008682250975, 0.027051807403564453, 0.026886144638061524, 0.0271234245300293, 0.027177248001098633, 0.026924287796020508, 0.027073280334472656, 0.026994688034057617, 0.026943328857421876, 0.027592639923095703, 0.027032896041870116, 0.026528480529785157, 0.027039743423461913, 0.026799583435058595, 0.026892383575439452, 0.026803871154785157, 0.027021184921264648, 0.02688912010192871, 0.026953727722167968, 0.02697200012207031, 0.026932640075683592, 0.026891008377075195, 0.027125951766967773, 0.02691257667541504, 0.027017215728759765, 0.027092544555664063, 0.027027519226074218, 0.026994943618774414, 0.02696931266784668, 0.02691289520263672, 0.026922880172729494, 0.02687887954711914, 0.026938943862915038, 0.027029951095581053, 0.026869375228881835, 0.026880096435546875, 0.027111839294433594, 0.027027103424072267, 0.02692336082458496, 0.02694131278991699, 0.02693120002746582, 0.02686732864379883, 0.026993024826049806, 0.027229312896728516, 0.02731916809082031, 0.02778668785095215, 0.027416608810424806, 0.027227840423583984, 0.027300703048706056, 0.026961919784545898, 0.027002687454223632, 0.0271495361328125, 0.027059104919433592, 0.027379104614257813, 0.027353759765625, 0.027031551361083983, 0.0270152645111084, 0.026904640197753907, 0.027111072540283204, 0.026906816482543946, 0.026933248519897462, 0.02697200012207031, 0.027150495529174805, 0.02693734359741211, 0.02707391929626465, 0.026815040588378906, 0.026981504440307617, 0.026938304901123047, 0.026910720825195314, 0.030879743576049806, 0.027418624877929686, 0.0268984317779541, 0.02700428771972656, 0.02779840087890625, 0.02663814353942871, 0.026955968856811525, 0.027265247344970704, 0.027225887298583985, 0.026892288208007813, 0.02686947250366211, 0.026881568908691405, 0.027237119674682616, 0.027242143630981444, 0.027117824554443358, 0.02703139114379883, 0.02725503921508789, 0.027025184631347655, 0.026965503692626954, 0.02962505531311035, 0.02813132858276367, 0.02719126319885254, 0.027043872833251954, 0.0272927360534668, 0.02707142448425293, 0.027052032470703126, 0.02698240089416504, 0.027075584411621095, 0.027077632904052733, 0.027037696838378908, 0.027461568832397462, 0.027110496520996095, 0.02699158477783203, 0.027028799057006836, 0.02695648002624512, 0.02700444793701172, 0.027074720382690428, 0.027144384384155274, 0.027084928512573242, 0.027084575653076173, 0.027000768661499024, 0.027355424880981444, 0.027043552398681642, 0.0270166072845459, 0.026995584487915038, 0.02709708786010742, 0.027027456283569336, 0.027045888900756834, 0.02736332893371582, 0.027382816314697266, 0.027278303146362304, 0.027121376037597657, 0.02712812805175781, 0.02707609558105469, 0.02704582405090332, 0.027056671142578124, 0.02692095947265625, 0.027262048721313478, 0.027204063415527342, 0.02692755126953125, 0.026885120391845704, 0.027022239685058593, 0.02687388801574707, 0.027037088394165038, 0.026886816024780272, 0.028002111434936524, 0.02699228858947754, 0.02695222473144531]",tokens/s,36.83975147939725,, 4bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,True,MB,858.341376,555.614208,0.0,153.092096,140.32384,s,1,9.18598046875,9.18598046875,0.0,9.18598046875,9.18598046875,9.18598046875,9.18598046875,[9.18598046875],,kWh,1.8138513370835105e-05,1.986694834938253e-06,5.395004315999867e-06,2.5520212521773223e-05,,MB,1307.46368,668.860416,0.0,253.755392,216.246784,s,11,0.22707766342163083,0.020643423947420983,0.00015219922100499224,0.020664991378784178,0.02075276756286621,0.020849984169006346,0.020927757453918457,"[0.020664991378784178, 0.020738239288330077, 0.02075276756286621, 0.020645280838012696, 0.0206180477142334, 0.020718719482421873, 0.020476160049438478, 0.020676544189453125, 0.020473535537719727, 0.020947200775146484, 0.02036617660522461]",tokens/s,12401.043579400137,kWh,5.955425058333355e-07,6.567810016246691e-08,3.9397763944489855e-07,1.055198245440701e-06,tokens/kWh,242608439.79425138,MB,1320.79616,691.929088,0.0,276.824064,216.249344,s,11,10.359646423339845,0.9417860384854403,0.0042437997441284675,0.9406495971679687,0.9457780151367188,0.948765380859375,0.9511552734375,"[0.9443898315429687, 0.9422705078125, 0.9388416137695312, 0.9457780151367188, 0.9517527465820312, 0.9406495971679687, 0.9393660888671875, 0.9378463745117187, 0.9369231567382813, 0.9440220336914062, 0.9378064575195313]",tokens/s,66.89417492460944,kWh,2.7147347401363583e-05,2.993927637692737e-06,9.888808364555103e-06,4.003008340361142e-05,tokens/kWh,1573816.356183667,,s,693,10.35412147426606,0.014941012228378141,0.0004238476256444503,0.014854751586914062,0.015068704223632812,0.01525132122039795,0.01754168724060059,"[0.014524736404418946, 0.014915616035461425, 0.015202303886413575, 0.015013088226318359, 0.014828543663024902, 0.014810912132263184, 0.014790656089782715, 0.014874624252319337, 0.014827872276306152, 0.01487433624267578, 0.014845888137817382, 0.01493564796447754, 0.014831040382385254, 0.01485308837890625, 0.014845888137817382, 0.014816448211669922, 0.01491152000427246, 0.014773088455200195, 0.015754655838012697, 0.014801504135131836, 0.01476198387145996, 0.014784095764160156, 0.014920096397399902, 0.014987263679504394, 0.015343615531921387, 0.015069184303283691, 0.014999615669250487, 0.014779935836791993, 0.014776479721069337, 0.014743840217590332, 0.014704575538635254, 0.014748895645141601, 0.01477734375, 0.014751551628112793, 0.014771200180053711, 0.014846976280212403, 0.015032320022583008, 0.01488691234588623, 0.01479475212097168, 0.014756863594055175, 0.014700672149658204, 0.014764927864074706, 0.014737407684326171, 0.01472697639465332, 0.014694592475891114, 0.014735360145568848, 0.015150336265563965, 0.01652128028869629, 0.015051456451416015, 0.015005279541015625, 0.015036543846130372, 0.014871168136596679, 0.014997152328491211, 0.014962688446044922, 0.014929984092712402, 0.015003583908081054, 0.014991007804870605, 0.01900374412536621, 0.014982208251953125, 0.014889920234680176, 0.015034367561340332, 0.014876416206359863, 0.014944191932678222, 0.014807168006896973, 0.015029600143432617, 0.01489475154876709, 0.014865280151367188, 0.014757887840270996, 0.014882816314697265, 0.014776320457458495, 0.014872575759887695, 0.014720735549926757, 0.01478275203704834, 0.014827520370483398, 0.01479475212097168, 0.014729087829589843, 0.014791999816894531, 0.015007871627807617, 0.014826175689697265, 0.014862336158752442, 0.014880767822265625, 0.014876864433288573, 0.014839008331298828, 0.014859040260314942, 0.014855999946594239, 0.0148274564743042, 0.014960831642150878, 0.014851872444152831, 0.01483785629272461, 0.014834815979003907, 0.01486355209350586, 0.01486515235900879, 0.014809663772583009, 0.014803168296813964, 0.014878656387329102, 0.014966015815734863, 0.014805983543395997, 0.014870528221130372, 0.01485209560394287, 0.014892704010009765, 0.014776608467102052, 0.015190239906311034, 0.01495638370513916, 0.014841216087341309, 0.014774175643920898, 0.014918239593505859, 0.014837887763977051, 0.014776320457458495, 0.014756863594055175, 0.014957119941711425, 0.014813632011413573, 0.014777503967285156, 0.01487548828125, 0.014943519592285156, 0.014876768112182618, 0.015092351913452148, 0.014845952033996582, 0.014831007957458496, 0.014824031829833984, 0.014839743614196777, 0.014848064422607422, 0.014777791976928712, 0.015066783905029297, 0.015143327713012696, 0.017773151397705078, 0.01718671989440918, 0.01451750373840332, 0.014801568031311035, 0.014815456390380859, 0.014800191879272461, 0.014797056198120117, 0.014803168296813964, 0.0148274564743042, 0.014909503936767578, 0.01489020824432373, 0.014952256202697753, 0.014916576385498048, 0.01477836799621582, 0.01483897590637207, 0.014842687606811523, 0.014802623748779296, 0.014797120094299317, 0.015041888236999512, 0.014832287788391114, 0.014745599746704101, 0.014835712432861328, 0.014835712432861328, 0.014858240127563477, 0.014923232078552246, 0.014796480178833008, 0.014825823783874512, 0.01477881622314453, 0.014823488235473633, 0.014864447593688965, 0.014849184036254882, 0.014846591949462891, 0.014928064346313476, 0.015036383628845214, 0.015230496406555176, 0.014894783973693848, 0.01507408046722412, 0.014888959884643555, 0.01481113624572754, 0.01502956771850586, 0.0149235200881958, 0.01488111972808838, 0.01486847972869873, 0.014858847618103027, 0.014919199943542481, 0.01487715244293213, 0.014807040214538575, 0.014902751922607422, 0.014850591659545899, 0.014827520370483398, 0.014982912063598633, 0.014923135757446289, 0.014974143981933594, 0.014922592163085937, 0.014944160461425781, 0.01500873565673828, 0.014960607528686524, 0.014928031921386718, 0.014915424346923828, 0.015125920295715332, 0.015030271530151367, 0.01490339183807373, 0.01489139175415039, 0.01501302433013916, 0.015250399589538574, 0.014749247550964356, 0.01518671989440918, 0.019537567138671875, 0.014952223777770996, 0.015035807609558105, 0.014846783638000488, 0.014835264205932617, 0.014928128242492676, 0.014776448249816894, 0.01478048038482666, 0.014885024070739746, 0.014790592193603515, 0.014732671737670898, 0.014950943946838378, 0.014809087753295898, 0.014796799659729003, 0.014823424339294433, 0.01480303955078125, 0.014725024223327637, 0.014694399833679199, 0.014770336151123047, 0.014775168418884278, 0.014779359817504882, 0.015117631912231446, 0.014802847862243652, 0.0147893123626709, 0.014848383903503418, 0.014784255981445312, 0.014813344001770019, 0.014679936408996582, 0.014878560066223144, 0.014764127731323242, 0.014756159782409668, 0.014742719650268554, 0.014866751670837402, 0.01481769561767578, 0.015031071662902832, 0.014740096092224122, 0.01476364803314209, 0.014919424057006836, 0.014904319763183594, 0.014907391548156738, 0.014837759971618653, 0.01520639991760254, 0.01501743984222412, 0.014792896270751954, 0.014799455642700195, 0.01591427230834961, 0.018300960540771485, 0.014875231742858886, 0.014853919982910156, 0.015145248413085937, 0.014859904289245605, 0.015410752296447753, 0.014889663696289062, 0.014821023941040038, 0.014841983795166016, 0.014778495788574219, 0.0148602876663208, 0.014798944473266601, 0.014865823745727539, 0.014727680206298829, 0.014798848152160645, 0.014741600036621094, 0.014983424186706543, 0.014981472015380859, 0.014919103622436524, 0.015289088249206544, 0.015299648284912109, 0.017950912475585938, 0.016562335968017577, 0.015321503639221192, 0.01571382427215576, 0.01640451240539551, 0.015047103881835937, 0.015132672309875488, 0.014964384078979492, 0.015020671844482423, 0.015021984100341796, 0.0150382719039917, 0.014938336372375488, 0.014929696083068848, 0.014989312171936036, 0.014905344009399414, 0.014984767913818359, 0.015028351783752442, 0.014999872207641601, 0.014962688446044922, 0.015319040298461914, 0.014976767539978028, 0.015015168190002441, 0.014945280075073243, 0.014988479614257812, 0.014956992149353027, 0.014993791580200195, 0.014849696159362793, 0.01494598388671875, 0.01483779239654541, 0.015022496223449706, 0.014905823707580566, 0.014909184455871582, 0.01487183952331543, 0.014941023826599122, 0.01491750431060791, 0.014892095565795899, 0.014952383995056152, 0.014982175827026368, 0.014938048362731934, 0.01492790412902832, 0.014980159759521484, 0.015022144317626954, 0.015013824462890625, 0.014930335998535157, 0.0151079683303833, 0.01493824005126953, 0.014950943946838378, 0.014899200439453125, 0.014931967735290527, 0.014956543922424317, 0.015058943748474121, 0.014905344009399414, 0.01501366424560547, 0.014982879638671875, 0.015079872131347656, 0.015301792144775391, 0.01496947193145752, 0.014634623527526855, 0.015052448272705079, 0.01490169620513916, 0.014880831718444825, 0.014891072273254395, 0.014876064300537109, 0.014955264091491699, 0.014901439666748046, 0.01487065601348877, 0.015027551651000977, 0.017536928176879883, 0.01638755226135254, 0.01491862392425537, 0.014923616409301758, 0.01501814365386963, 0.014925824165344239, 0.014977024078369141, 0.015044608116149903, 0.014827520370483398, 0.014761856079101562, 0.014874912261962891, 0.014761887550354003, 0.014790176391601562, 0.014942624092102052, 0.014835616111755372, 0.014798944473266601, 0.014847328186035156, 0.01480076789855957, 0.014863136291503907, 0.014773695945739747, 0.01480303955078125, 0.014826208114624024, 0.014764927864074706, 0.014784992218017578, 0.014781951904296875, 0.015141887664794922, 0.014861920356750487, 0.01484217643737793, 0.01489094352722168, 0.014772064208984375, 0.014820704460144043, 0.014869471549987794, 0.01483353614807129, 0.014825695991516114, 0.014851360321044922, 0.01476483154296875, 0.014751487731933594, 0.014999551773071289, 0.014911487579345703, 0.014751744270324708, 0.014798368453979492, 0.014968544006347657, 0.014904319763183594, 0.014982399940490722, 0.014829407691955567, 0.01482819175720215, 0.014774271965026856, 0.014734399795532227, 0.014793663978576661, 0.014741503715515136, 0.014767104148864747, 0.014789728164672852, 0.014790559768676758, 0.01452774429321289, 0.014873567581176758, 0.014819104194641114, 0.014866687774658204, 0.015002911567687988, 0.014916223526000976, 0.014902560234069825, 0.014901823997497558, 0.014775327682495117, 0.014826623916625977, 0.014704480171203613, 0.014751744270324708, 0.014780415534973144, 0.014884160041809083, 0.015104703903198242, 0.017596416473388672, 0.01639360046386719, 0.01505065631866455, 0.014903231620788574, 0.014913760185241699, 0.014817888259887696, 0.014849311828613281, 0.015649439811706543, 0.01483193588256836, 0.014833375930786133, 0.014825471878051758, 0.014764127731323242, 0.014780287742614745, 0.015190239906311034, 0.014873503684997558, 0.014854751586914062, 0.015032480239868164, 0.014841695785522462, 0.014780735969543457, 0.014780415534973144, 0.015433631896972656, 0.014714976310729981, 0.014690303802490234, 0.01471183967590332, 0.014712960243225097, 0.014689120292663574, 0.014741503715515136, 0.014936063766479492, 0.014806879997253417, 0.014737567901611327, 0.014781855583190917, 0.014686911582946778, 0.014722496032714845, 0.014703071594238282, 0.014737407684326171, 0.014939359664916992, 0.014817919731140137, 0.014732576370239258, 0.014685055732727052, 0.014680095672607422, 0.01486844825744629, 0.014768287658691407, 0.01470787239074707, 0.014773119926452636, 0.014732416152954102, 0.014713279724121094, 0.01473971176147461, 0.014693504333496093, 0.014620672225952149, 0.014823424339294433, 0.015120384216308593, 0.01478656005859375, 0.014804991722106933, 0.015892479896545412, 0.01494758415222168, 0.015054847717285156, 0.014956671714782715, 0.014871168136596679, 0.014911487579345703, 0.015106271743774415, 0.014997023582458496, 0.014860511779785156, 0.01488931179046631, 0.015046272277832031, 0.014882880210876465, 0.01481113624572754, 0.014784511566162109, 0.014851296424865722, 0.014891327857971192, 0.014915871620178222, 0.014999744415283203, 0.015119647979736328, 0.014800959587097168, 0.014901599884033202, 0.014933407783508301, 0.014922656059265137, 0.014919679641723632, 0.014854240417480468, 0.014739423751831056, 0.014779583930969238, 0.014852864265441895, 0.014809087753295898, 0.014937408447265625, 0.014952287673950196, 0.014813952445983887, 0.014697855949401855, 0.014820063591003418, 0.014749695777893066, 0.014829216003417968, 0.014678272247314453, 0.014952192306518555, 0.01474390411376953, 0.014753087997436524, 0.014834367752075195, 0.014761568069458008, 0.014756159782409668, 0.014729280471801758, 0.014911359786987304, 0.014781855583190917, 0.014786623954772949, 0.01477017593383789, 0.015573216438293456, 0.014922207832336425, 0.014788415908813477, 0.014831839561462402, 0.014746591567993165, 0.014786911964416503, 0.014729887962341308, 0.014798848152160645, 0.01471628761291504, 0.014739935874938964, 0.014439328193664551, 0.01478656005859375, 0.014747296333312988, 0.014791007995605468, 0.014796704292297362, 0.014851967811584472, 0.014739616394042969, 0.014770048141479492, 0.015047200202941894, 0.015183520317077637, 0.015171839714050292, 0.014888671875, 0.014958751678466798, 0.014857248306274414, 0.01485091209411621, 0.014854144096374512, 0.014835040092468261, 0.01470521640777588, 0.014968928337097167, 0.01490329647064209, 0.01478559970855713, 0.014728128433227539, 0.015176735877990722, 0.014880895614624023, 0.014728063583374024, 0.014949536323547364, 0.01491641616821289, 0.01484335994720459, 0.014801247596740723, 0.014811231613159179, 0.01477836799621582, 0.014788703918457031, 0.014997504234313964, 0.014831616401672363, 0.014816864013671875, 0.014843487739562988, 0.01477881622314453, 0.0148156156539917, 0.01484390354156494, 0.01496678352355957, 0.014989312171936036, 0.014837247848510742, 0.014761599540710449, 0.01485091209411621, 0.015081503868103027, 0.014875776290893554, 0.01489795207977295, 0.014841376304626465, 0.014844191551208497, 0.014816672325134277, 0.015105024337768554, 0.014947903633117677, 0.014821439743041993, 0.014801152229309082, 0.014801983833312988, 0.014785408020019531, 0.014803008079528808, 0.014964384078979492, 0.014968607902526855, 0.014817855834960937, 0.014822463989257812, 0.014906496047973632, 0.01484505558013916, 0.015564959526062011, 0.016047935485839843, 0.015527423858642577, 0.017846784591674804, 0.01491763210296631, 0.01496895980834961, 0.014794400215148926, 0.014995552062988282, 0.015252703666687012, 0.01495132827758789, 0.014782336235046387, 0.014807168006896973, 0.015022080421447754, 0.014775391578674316, 0.01475603199005127, 0.015021023750305175, 0.014950143814086913, 0.014792703628540039, 0.014827679634094239, 0.014853983879089356, 0.01481113624572754, 0.014784511566162109, 0.014899200439453125, 0.015009247779846191, 0.014815168380737305, 0.01489686393737793, 0.014822272300720215, 0.014780415534973144, 0.015048416137695312, 0.014760064125061036, 0.014942367553710938, 0.014802528381347655, 0.01489094352722168, 0.014809632301330566, 0.014831551551818848, 0.014761024475097657, 0.014870816230773926, 0.014825440406799317, 0.015284928321838379, 0.015112192153930663, 0.015081472396850586, 0.014960639953613282, 0.014888192176818848, 0.014959360122680664, 0.015146944046020508, 0.014827168464660645, 0.014725536346435546, 0.014916704177856446, 0.014782560348510743, 0.01480787181854248, 0.014763487815856933, 0.01499561595916748, 0.014794816017150879, 0.014694496154785157, 0.014790559768676758, 0.014922271728515625, 0.014858016014099121, 0.014734944343566895, 0.015311264038085937, 0.01494547176361084, 0.014791487693786621, 0.014823712348937989, 0.014787551879882812, 0.014642720222473144, 0.014803423881530761, 0.014833663940429688, 0.014972224235534667, 0.014952256202697753, 0.0150250244140625, 0.015003647804260254, 0.0149071683883667, 0.014842399597167969, 0.014823103904724121, 0.014834848403930664, 0.015534943580627441, 0.015147007942199708, 0.01488691234588623, 0.014899200439453125, 0.015131999969482422, 0.014936384201049804, 0.014822815895080567, 0.015242176055908203, 0.015089376449584961, 0.014843328475952149, 0.014959551811218262, 0.01483078384399414, 0.014744288444519043, 0.01488691234588623, 0.014706463813781738, 0.014754176139831543, 0.014800736427307129, 0.014882399559020995, 0.014946720123291016, 0.014921728134155274, 0.014899295806884766, 0.014882719993591309, 0.01487174415588379, 0.014795583724975586, 0.014897151947021485, 0.014921728134155274, 0.01489027214050293, 0.014812895774841309, 0.014771200180053711, 0.01478163242340088, 0.014893823623657227, 0.014782527923583984, 0.0148536958694458, 0.014763680458068848, 0.014737248420715332, 0.01490329647064209, 0.014797760009765624, 0.014782624244689941, 0.014888319969177246, 0.015047136306762696, 0.014923775672912597, 0.014829567909240723, 0.01485209560394287, 0.014831328392028808, 0.014784992218017578, 0.014856160163879395, 0.014772064208984375, 0.014723072052001953, 0.014710463523864747, 0.01480355167388916, 0.014811936378479005, 0.014801856040954589]",tokens/s,66.92986959080689,, 8bit-bnb-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 170, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,15873.953792,9166.52032,0.0,8763.998208,8763.843072,s,1,33.5512734375,33.5512734375,0.0,33.5512734375,33.5512734375,33.5512734375,33.5512734375,[33.5512734375],,kWh,0.0007725246796749768,8.520802042019493e-05,0.00026022993040600295,0.0011179626305011746,,MB,1357.152256,9818.734592,0.0,9395.24096,9192.912384,s,10,0.8527321319580078,0.08527321319580078,0.00025484115922508256,0.08521763229370118,0.08556246795654296,0.08564729614257813,0.08571515869140625,"[0.0849985580444336, 0.085135009765625, 0.08573212432861328, 0.08506221008300781, 0.08495436859130859, 0.08530025482177735, 0.08554361724853515, 0.08549779510498047, 0.08506291198730469, 0.08544528198242188]",tokens/s,3002.115088734648,kWh,2.5318161096264308e-06,2.7921451127183604e-07,1.6801306544481804e-06,4.491161275346446e-06,tokens/kWh,57000847.73291787,MB,1401.020416,9818.734592,0.0,9395.24096,9041.92,s,10,24.1709306640625,2.41709306640625,0.004231442906308345,2.4154610595703128,2.42240791015625,2.4234008544921872,2.424195209960937,"[2.41252978515625, 2.413452880859375, 2.420347412109375, 2.422187255859375, 2.4131962890625, 2.415519287109375, 2.412792724609375, 2.4211083984375, 2.424393798828125, 2.41540283203125]",tokens/s,26.06436668724089,kWh,7.011424271370451e-05,7.733449428236104e-06,4.657476906055328e-05,0.0001244224612024939,tokens/kWh,506339.44539538847,,s,630,24.169080314636215,0.03836361954704164,0.0004283651808112143,0.03825161552429199,0.03868288726806641,0.03896333694458008,0.040279085464477546,"[0.03837535858154297, 0.03824563217163086, 0.03812364959716797, 0.03812019348144531, 0.03803478240966797, 0.03804207992553711, 0.03827248001098633, 0.03843289566040039, 0.03812201690673828, 0.038180801391601564, 0.038221023559570313, 0.03835919952392578, 0.03875910568237305, 0.03861897659301758, 0.038481311798095705, 0.038862655639648434, 0.03970044708251953, 0.038650177001953126, 0.03836380767822266, 0.03834620666503906, 0.038367198944091796, 0.03834051132202149, 0.03816835021972656, 0.03816742324829102, 0.038304862976074217, 0.03956009674072265, 0.038725631713867184, 0.03840323257446289, 0.038222686767578125, 0.03826454544067383, 0.03803507232666015, 0.03809785461425781, 0.03807551956176758, 0.0385068473815918, 0.038035327911376954, 0.0380972785949707, 0.037976062774658204, 0.038147903442382815, 0.038297504425048826, 0.038458751678466796, 0.0381756477355957, 0.03824025726318359, 0.03818288040161133, 0.038115360260009765, 0.038061695098876955, 0.03811366271972656, 0.03803059387207031, 0.03809356689453125, 0.03804502487182617, 0.03806697463989258, 0.038209407806396485, 0.038508544921875, 0.03811529541015625, 0.03810921478271485, 0.038193153381347655, 0.0382665901184082, 0.03832787322998047, 0.03822256088256836, 0.03812351989746094, 0.03837760162353516, 0.038020992279052736, 0.03806003189086914, 0.038125694274902346, 0.038889278411865236, 0.03839347076416016, 0.038152767181396485, 0.03803340911865234, 0.038042625427246096, 0.03810611343383789, 0.03824582290649414, 0.03803020858764648, 0.03804150390625, 0.03799955368041992, 0.03800332641601562, 0.03806844711303711, 0.03867801666259765, 0.03833087921142578, 0.03805404663085937, 0.0381561279296875, 0.03816022491455078, 0.038180545806884764, 0.03808694458007812, 0.038185150146484374, 0.03814940643310547, 0.038013248443603515, 0.038101249694824216, 0.038058143615722656, 0.03803347015380859, 0.03809600067138672, 0.03836380767822266, 0.038484127044677734, 0.038723552703857425, 0.03857206344604492, 0.0384450569152832, 0.03852860641479492, 0.04195481491088867, 0.03858319854736328, 0.03835014343261719, 0.03825120162963867, 0.038084606170654296, 0.038125568389892575, 0.03807846450805664, 0.03810435104370117, 0.03808124923706055, 0.03819504165649414, 0.03844316864013672, 0.038354270935058596, 0.038427295684814455, 0.038559326171875, 0.038320545196533204, 0.038201343536376955, 0.038436065673828124, 0.03858902359008789, 0.038064319610595705, 0.03808787155151367, 0.0380918083190918, 0.038182689666748044, 0.03808243179321289, 0.03826201629638672, 0.038206336975097656, 0.03818272018432617, 0.03820284652709961, 0.03822585678100586, 0.03809987258911133, 0.03888105773925781, 0.038160350799560545, 0.03865923309326172, 0.03823494338989258, 0.03821068954467773, 0.03816960144042969, 0.038087936401367185, 0.038167488098144534, 0.03810102462768555, 0.03822108840942383, 0.03816230392456055, 0.03817740631103515, 0.0380948486328125, 0.038241695404052735, 0.038277183532714844, 0.03901065444946289, 0.03986191940307617, 0.03843945693969727, 0.03871539306640625, 0.038950431823730466, 0.038947105407714844, 0.03874425506591797, 0.03874006271362305, 0.03900406265258789, 0.038657760620117186, 0.03855593490600586, 0.03867567825317383, 0.03875715255737305, 0.03852006530761719, 0.038480384826660156, 0.038471935272216796, 0.038434814453125, 0.0384529914855957, 0.03839411163330078, 0.038266433715820315, 0.03847129440307617, 0.038210369110107424, 0.0388587532043457, 0.038332416534423826, 0.03814809417724609, 0.038184959411621096, 0.03816204833984375, 0.03825600051879883, 0.038171646118164065, 0.038088287353515625, 0.03821110534667969, 0.03840512084960938, 0.03850636672973633, 0.038510238647460934, 0.03855152130126953, 0.03850073623657226, 0.03850035095214844, 0.038407806396484376, 0.03868051147460937, 0.03806252670288086, 0.03831014251708984, 0.038169502258300785, 0.0381357421875, 0.038039745330810545, 0.038249183654785156, 0.03812255859375, 0.03823097610473633, 0.03829132843017578, 0.03821376037597656, 0.03828736114501953, 0.03857571029663086, 0.038435230255126955, 0.038266048431396485, 0.03831036758422852, 0.0381300163269043, 0.038297409057617186, 0.038426815032958986, 0.0384546890258789, 0.03835964965820313, 0.038629375457763675, 0.038242305755615234, 0.038645759582519534, 0.038338558197021484, 0.03857123184204102, 0.038518558502197264, 0.0385054702758789, 0.03855356979370117, 0.038356639862060546, 0.03851712036132812, 0.038286945343017575, 0.03845571136474609, 0.03832342529296875, 0.03834755325317383, 0.038116542816162106, 0.03839468765258789, 0.03828531265258789, 0.03817782211303711, 0.03831702423095703, 0.038606910705566405, 0.038721153259277344, 0.03906092834472656, 0.0387193603515625, 0.03848112106323242, 0.03825337600708008, 0.03818000030517578, 0.03827414321899414, 0.038010623931884764, 0.038042110443115236, 0.03822457504272461, 0.03805759811401367, 0.03804694366455078, 0.038126529693603514, 0.04012227249145508, 0.03898508834838867, 0.03819113540649414, 0.04043027114868164, 0.03999948883056641, 0.038504447937011715, 0.03840563201904297, 0.03845772933959961, 0.03822809600830078, 0.03821977615356445, 0.03826230239868164, 0.03819580841064453, 0.03832204818725586, 0.03824764633178711, 0.038238815307617184, 0.03818048095703125, 0.03821968078613281, 0.038316864013671875, 0.038309600830078124, 0.03826496124267578, 0.038258689880371094, 0.0398131217956543, 0.03877478408813476, 0.03824784088134765, 0.037992225646972654, 0.03808953475952148, 0.03812351989746094, 0.0380211181640625, 0.038150142669677735, 0.03791872024536133, 0.03802511978149414, 0.0380928955078125, 0.03803855895996094, 0.03807932662963867, 0.03798028945922852, 0.038041057586669924, 0.03886953735351562, 0.03853311920166016, 0.03828940963745117, 0.03809257507324219, 0.038019294738769534, 0.03806617736816406, 0.03835030364990234, 0.038096446990966794, 0.03811203384399414, 0.03822732925415039, 0.038574527740478516, 0.03847129440307617, 0.03860985565185547, 0.038098751068115236, 0.03823334503173828, 0.03808742523193359, 0.03809075164794922, 0.037967422485351565, 0.03808505630493164, 0.03812761688232422, 0.038085662841796875, 0.03810198211669922, 0.038047584533691406, 0.03809500885009766, 0.03813359832763672, 0.03816668701171875, 0.038262496948242186, 0.03824867248535156, 0.03827705764770508, 0.03824188613891601, 0.038162975311279296, 0.03826278305053711, 0.038193153381347655, 0.038434814453125, 0.038694911956787106, 0.03935232162475586, 0.03951180648803711, 0.038295806884765624, 0.03826483154296875, 0.0387665901184082, 0.03857561492919922, 0.039061729431152346, 0.038123199462890625, 0.03813542556762695, 0.038253089904785154, 0.03840367889404297, 0.03822611236572265, 0.038245025634765624, 0.03869375991821289, 0.038234111785888675, 0.0382437744140625, 0.03848166275024414, 0.03842745590209961, 0.03833155059814453, 0.03863843154907227, 0.038500064849853514, 0.03828275299072265, 0.03829840087890625, 0.0384901123046875, 0.038596607208251955, 0.038518241882324215, 0.03816080093383789, 0.03805180740356445, 0.03815164947509766, 0.03826144027709961, 0.038071903228759765, 0.03812188720703125, 0.03833651351928711, 0.03808665466308594, 0.03805593490600586, 0.03807846450805664, 0.038174400329589846, 0.038166046142578125, 0.03825539016723633, 0.03817881774902344, 0.038307838439941407, 0.03849625778198242, 0.03855155181884766, 0.0385904655456543, 0.03864495849609375, 0.038865886688232425, 0.039976383209228514, 0.03820582580566406, 0.038193153381347655, 0.037994495391845705, 0.03837747192382813, 0.03826073455810547, 0.03810713577270508, 0.03811942291259766, 0.0384450569152832, 0.038416385650634766, 0.03844233703613281, 0.03852355194091797, 0.0385780143737793, 0.038246463775634766, 0.03844720077514648, 0.03814809417724609, 0.03824435043334961, 0.03831808090209961, 0.03818086242675781, 0.03828838348388672, 0.03823231887817383, 0.0382490234375, 0.03820307159423828, 0.03883673477172851, 0.038217727661132815, 0.038182945251464845, 0.038096607208251955, 0.038066207885742186, 0.03833878326416015, 0.03805545425415039, 0.03910921478271484, 0.03818921661376953, 0.038067745208740233, 0.037945087432861326, 0.03812015914916992, 0.03798758316040039, 0.03797068786621094, 0.037959617614746095, 0.03808265686035156, 0.03804742431640625, 0.03800201416015625, 0.038037696838378904, 0.03797683334350586, 0.03793305587768555, 0.03798223876953125, 0.03794736099243164, 0.03809257507324219, 0.03834492874145508, 0.038614974975585935, 0.03853033447265625, 0.03883292770385742, 0.03885036849975586, 0.038491455078125, 0.03835366439819336, 0.03855542373657227, 0.03832457733154297, 0.0383631362915039, 0.03813132858276367, 0.03849049758911133, 0.038026561737060545, 0.03807904052734375, 0.03824652862548828, 0.03815423965454102, 0.03807231903076172, 0.03831398391723633, 0.03815423965454102, 0.03814390563964844, 0.03841443252563476, 0.03803955078125, 0.03827302551269531, 0.03815983963012695, 0.037945953369140625, 0.038031295776367186, 0.038182910919189454, 0.03820473480224609, 0.03850297546386719, 0.03840335845947265, 0.03925078582763672, 0.038594558715820314, 0.03859251022338867, 0.03829145431518555, 0.03891151809692383, 0.03822819137573242, 0.03834291076660156, 0.038342529296875, 0.038338687896728514, 0.03846089553833008, 0.03843878555297851, 0.038214305877685546, 0.03827916717529297, 0.03868262481689453, 0.03851264190673828, 0.038432769775390625, 0.03924991989135742, 0.03851878356933594, 0.03831193542480469, 0.03858179092407227, 0.038571552276611326, 0.03855660629272461, 0.03860671997070313, 0.03838300704956055, 0.038570720672607424, 0.03834265518188477, 0.038274112701416015, 0.03834268951416016, 0.03851971054077148, 0.038077823638916014, 0.038115230560302735, 0.03839459228515625, 0.03814976119995117, 0.0381976318359375, 0.038168575286865236, 0.03807017517089844, 0.03815843200683594, 0.03841788864135742, 0.03810323333740234, 0.03794886398315429, 0.0381030387878418, 0.038085216522216796, 0.038113601684570314, 0.038298816680908204, 0.03857609558105469, 0.03844739151000977, 0.03868524932861328, 0.03849417495727539, 0.038354625701904295, 0.03808086395263672, 0.03808982467651367, 0.038047775268554684, 0.03808345413208008, 0.037989505767822264, 0.038478721618652345, 0.03808451080322266, 0.03809689712524414, 0.038004833221435545, 0.04050534439086914, 0.03872915267944336, 0.03850067138671875, 0.04034313583374023, 0.039686206817626954, 0.03917094421386719, 0.03872943878173828, 0.038193153381347655, 0.03825664138793945, 0.03823308944702149, 0.038246849060058596, 0.039720767974853514, 0.03818963241577149, 0.038451393127441405, 0.038215679168701173, 0.03816243362426758, 0.038165824890136715, 0.03812732696533203, 0.03825148773193359, 0.038158336639404294, 0.03814012908935547, 0.040613502502441404, 0.03837699127197266, 0.03828003311157226, 0.03857612609863281, 0.03842399978637695, 0.038307838439941407, 0.03843129730224609, 0.03830374526977539, 0.03898543930053711, 0.03852236938476562, 0.0383210563659668, 0.038391681671142576, 0.03846748733520508, 0.03867657470703125, 0.03852246475219727, 0.04217897415161133, 0.03879494476318359, 0.0386192626953125, 0.03854083251953125, 0.038414241790771485, 0.03845606231689453, 0.03851993560791016, 0.038255489349365235, 0.03841766357421875, 0.03896556854248047, 0.03931999969482422, 0.03896060943603516, 0.038545345306396486, 0.03826563262939453, 0.038243488311767576, 0.038400257110595706, 0.03831219100952148, 0.03820969772338867, 0.038481182098388675, 0.03866681671142578, 0.03865353775024414, 0.038086624145507814, 0.03838422393798828, 0.0381317138671875, 0.038104095458984376, 0.03813587188720703, 0.03820569610595703, 0.03798406219482422, 0.038157150268554686, 0.03820748901367187, 0.038152191162109376, 0.03812707138061523, 0.038188865661621094, 0.03816316986083984, 0.03806412887573242, 0.03816377639770508, 0.03842540740966797, 0.03802864074707031, 0.03819987106323242, 0.038105056762695315, 0.03810713577270508, 0.03808816146850586, 0.03923212814331055, 0.03836099243164062, 0.03831718444824219, 0.038271617889404294, 0.03818521499633789, 0.0381952018737793, 0.03874012756347656, 0.03830204772949219, 0.03815359878540039, 0.03810748672485351, 0.03819948959350586, 0.03818870544433594, 0.03817311859130859, 0.038307968139648436, 0.03825174331665039, 0.038104736328125, 0.038091777801513675, 0.03843449783325195, 0.03836092758178711, 0.03814246368408203, 0.03807379150390625, 0.03810748672485351, 0.038116542816162106, 0.03822079849243164, 0.03812156677246094, 0.03836918258666992, 0.03822134399414062, 0.03823270416259766, 0.03823164749145508, 0.038244449615478515, 0.03809830474853516, 0.038236961364746094, 0.03813523101806641, 0.03844563293457031, 0.038432769775390625, 0.03846963119506836, 0.03853433609008789, 0.038417217254638675, 0.038573089599609374, 0.04164217758178711, 0.0385849609375, 0.03908553695678711, 0.038181598663330076, 0.03809276962280273, 0.03823936080932617, 0.03801177597045898, 0.038241695404052735, 0.03843337631225586, 0.03840963363647461, 0.0384284782409668, 0.03860771179199219, 0.03833750534057617, 0.038388702392578126, 0.038215679168701173, 0.03868057632446289, 0.03814303970336914, 0.03832928085327148, 0.03824025726318359, 0.03832841491699219, 0.03815209579467774, 0.03815628814697265, 0.0380497932434082, 0.03815423965454102, 0.038117599487304685, 0.03821456146240235, 0.03807897567749023, 0.038183296203613284, 0.03830579376220703, 0.0383421745300293]",tokens/s,26.066362137018785,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,13838.614528,7509.77024,0.0,7107.248128,7106.945536,s,1,31.414798828125,31.414798828125,0.0,31.414798828125,31.414798828125,31.414798828125,31.414798828125,[31.414798828125],,kWh,0.0007074765982958448,7.80267142416079e-05,0.0002291082388419935,0.0010146115513794461,,MB,1329.29536,7786.594304,0.0,7363.100672,7335.695872,s,10,1.1200724487304687,0.11200724487304688,0.000619677961036989,0.11208081436157227,0.11283147048950196,0.11290180015563965,0.1129580638885498,"[0.11136137390136719, 0.11297212982177735, 0.11135596466064453, 0.11260697937011718, 0.11143452453613281, 0.1121269760131836, 0.11203465270996094, 0.11118224334716797, 0.11281584167480468, 0.1121817626953125]",tokens/s,2285.5664407258637,kWh,3.2874109153089417e-06,3.623553600884886e-07,2.173650303213409e-06,5.823416578610839e-06,tokens/kWh,43960447.71041747,MB,1352.732672,7786.594304,0.0,7363.100672,7289.561088,s,10,70.5914638671875,7.05914638671875,0.027809606252253823,7.07342333984375,7.09086669921875,7.090956787109375,7.091028857421875,"[7.091046875, 7.08046533203125, 7.02540087890625, 7.022373046875, 7.0264228515625, 7.07876171875, 7.0292998046875, 7.0908466796875, 7.075830078125, 7.0710166015625]",tokens/s,8.924591805962509,kWh,0.00020697224591219088,2.2830160137309303e-05,9.167623033218592e-05,0.0003214786363816861,tokens/kWh,195969.4762584509,,s,630,70.58929953765865,0.11204650720263284,0.0012427698219394643,0.11175414276123047,0.11325244903564453,0.114849654006958,0.11697949172973635,"[0.11088070678710937, 0.11245164489746094, 0.11099750518798829, 0.11200511932373047, 0.11422937774658203, 0.11157917022705079, 0.11538384246826172, 0.1111370849609375, 0.11295136260986328, 0.11173056030273437, 0.11189183807373047, 0.11186761474609375, 0.11200176239013672, 0.11157324981689454, 0.11092377471923828, 0.115884033203125, 0.11151052856445312, 0.11093094635009766, 0.11067401885986328, 0.11091072082519532, 0.11055375671386719, 0.11149862670898437, 0.11093260955810547, 0.11064908599853515, 0.1157995834350586, 0.11144454193115234, 0.110968994140625, 0.11165049743652344, 0.11097325134277344, 0.11062403106689453, 0.11085804748535157, 0.11136707305908203, 0.11198576354980469, 0.11551152038574218, 0.11196896362304687, 0.11142774200439454, 0.11483888244628906, 0.11271628570556641, 0.11193958282470703, 0.11233280181884765, 0.11353667449951171, 0.11262601470947266, 0.11453353881835937, 0.11509232330322265, 0.11188543701171876, 0.11195481872558594, 0.11199295806884765, 0.11239158630371093, 0.1125728988647461, 0.11345954895019532, 0.11289564514160157, 0.11257241821289063, 0.11710873413085937, 0.1174814682006836, 0.11264205169677734, 0.11230361938476563, 0.11233087921142579, 0.1128042221069336, 0.11212550354003906, 0.11274598693847657, 0.11442066955566406, 0.11743421173095703, 0.1123463363647461, 0.11461558532714844, 0.11309731292724609, 0.11197420501708985, 0.11166751861572266, 0.11115692901611328, 0.1138314208984375, 0.11666307067871094, 0.11130419158935546, 0.1113927001953125, 0.11097119903564454, 0.11123741149902344, 0.11126579284667969, 0.11210723114013672, 0.11406364440917968, 0.11149507141113281, 0.117347900390625, 0.11235382080078125, 0.11313536071777344, 0.11199657440185547, 0.11204879760742187, 0.11232621002197266, 0.11244191741943359, 0.11214396667480468, 0.11252349090576172, 0.1122195816040039, 0.11255865478515625, 0.11280384063720703, 0.11286243438720703, 0.1129090576171875, 0.11429862213134766, 0.11454598236083985, 0.11231881713867188, 0.11382201385498047, 0.1130950698852539, 0.11261302185058594, 0.11243545532226562, 0.11211775970458984, 0.11235529327392578, 0.11314998626708984, 0.11454895782470703, 0.11434569549560547, 0.11305554962158203, 0.1121155548095703, 0.11222579193115234, 0.11176438140869141, 0.11184537506103516, 0.11195951843261719, 0.11150313568115235, 0.1114562225341797, 0.11131542205810546, 0.11102598571777343, 0.111280029296875, 0.11123567962646484, 0.11103641510009765, 0.111674560546875, 0.11125135803222656, 0.11140188598632812, 0.111857666015625, 0.11098265838623046, 0.11079936218261718, 0.11145216369628906, 0.11156479644775391, 0.1112985610961914, 0.11192642974853516, 0.11114582061767578, 0.1113885726928711, 0.11124336242675781, 0.11119411468505859, 0.11092502593994141, 0.11091238403320312, 0.11097634887695312, 0.11115907287597657, 0.11116944122314452, 0.11227164459228516, 0.11096288299560547, 0.11081970977783204, 0.11076428985595703, 0.11430844879150391, 0.11182476806640625, 0.1117988510131836, 0.11165219116210938, 0.11185014343261719, 0.11145830535888672, 0.11124877166748047, 0.11311577606201172, 0.11248355102539062, 0.11118800354003906, 0.11166156768798828, 0.11137254333496094, 0.11105999755859375, 0.11086275482177735, 0.11106771087646485, 0.11089491271972657, 0.11111392211914063, 0.11113113403320313, 0.11078006744384766, 0.11168598175048829, 0.1113702392578125, 0.11107532501220703, 0.11206358337402343, 0.11156982421875, 0.11125759887695312, 0.111388671875, 0.11132927703857422, 0.11214659118652344, 0.1111695327758789, 0.11190402984619141, 0.11151414489746093, 0.11116876983642578, 0.11116038513183593, 0.11125081634521485, 0.11091500854492188, 0.11091868591308594, 0.11078438568115234, 0.11104460906982422, 0.11057151794433594, 0.1112017593383789, 0.11125920104980469, 0.11566384124755859, 0.1113333740234375, 0.11094758605957031, 0.1123315200805664, 0.11221337890625, 0.11247046661376953, 0.11208089447021484, 0.11165907287597657, 0.11177779388427735, 0.11150745391845703, 0.11148032379150391, 0.11272243499755859, 0.1119815673828125, 0.1128182373046875, 0.11140406036376953, 0.11188204956054687, 0.11132736206054687, 0.11080016326904298, 0.11087232208251953, 0.11102031707763672, 0.1107169952392578, 0.1106909408569336, 0.11051942443847657, 0.11064022064208984, 0.11065251159667969, 0.11522882843017578, 0.11228006744384765, 0.11133952331542969, 0.111193603515625, 0.11074816131591797, 0.11082685089111328, 0.111005859375, 0.110890625, 0.11090201568603515, 0.11141951751708984, 0.11121663665771485, 0.11119382476806641, 0.11126947021484375, 0.11112313842773437, 0.11086585235595703, 0.11088748931884766, 0.11088896179199219, 0.1113780517578125, 0.11121049499511719, 0.1126292495727539, 0.11116979217529296, 0.11123519897460937, 0.11098368072509765, 0.11508326721191406, 0.1115195541381836, 0.1110263671875, 0.11122492980957031, 0.1113885726928711, 0.11120777893066407, 0.11079103851318359, 0.11301507568359374, 0.11135523223876953, 0.11198287963867187, 0.11141977691650391, 0.1114746856689453, 0.11165900421142579, 0.11145539093017579, 0.1113260498046875, 0.11105609893798828, 0.1119644775390625, 0.11111062622070313, 0.11119577789306641, 0.11170591735839844, 0.1110206069946289, 0.11112857818603515, 0.11235327911376954, 0.11171017456054687, 0.11133955383300781, 0.11224063873291015, 0.11165081787109375, 0.11115682983398438, 0.1110836181640625, 0.110797119140625, 0.11098111724853515, 0.11113881683349609, 0.11291395568847656, 0.11182128143310546, 0.11107942199707031, 0.11130470275878906, 0.11188633728027343, 0.11066368103027344, 0.11133062744140625, 0.11127238464355468, 0.11608294677734375, 0.11156294250488281, 0.11141916656494141, 0.11363536071777344, 0.11190198516845704, 0.11191506958007813, 0.11127391815185547, 0.11289488220214844, 0.11122463989257812, 0.11140620422363282, 0.11121574401855469, 0.11160160064697265, 0.11138137817382812, 0.11154499053955078, 0.11168726348876953, 0.11094086456298828, 0.1119908447265625, 0.11168310546875, 0.11120489501953125, 0.11137420654296876, 0.11146803283691406, 0.11091311645507812, 0.11118067169189454, 0.11126729583740234, 0.11173728179931641, 0.11107158660888672, 0.1112119369506836, 0.11090118408203126, 0.11219766235351562, 0.11157263946533204, 0.11099827575683593, 0.11110562896728515, 0.11207721710205078, 0.11140505981445313, 0.11164262390136719, 0.11130675506591797, 0.11114291381835938, 0.110525634765625, 0.11111897277832031, 0.11118323516845703, 0.11112735748291015, 0.11121385955810546, 0.1107786865234375, 0.11162473297119141, 0.11205846405029297, 0.11104179382324218, 0.11181465911865235, 0.11251449584960938, 0.11291935729980469, 0.11165670776367187, 0.11125254058837891, 0.11165586853027344, 0.11139276885986328, 0.11095244598388672, 0.11174912261962891, 0.11295254516601562, 0.1115307846069336, 0.11100764465332032, 0.11177378845214844, 0.11086653137207031, 0.1111387176513672, 0.11207270050048829, 0.11178591918945313, 0.1116324462890625, 0.11201741027832031, 0.11242082977294922, 0.11254342651367187, 0.11277961730957031, 0.11205859375, 0.11241244506835937, 0.11401020812988281, 0.11283353424072265, 0.11265321350097657, 0.11196729278564453, 0.11282454681396484, 0.11284732818603516, 0.11268940734863281, 0.1123737564086914, 0.1122980499267578, 0.11241260528564453, 0.11280783843994141, 0.11274454498291016, 0.11251113891601562, 0.112598876953125, 0.11277721405029296, 0.11280147552490234, 0.11276921844482422, 0.11306185913085938, 0.1137413101196289, 0.11580073547363282, 0.11263302612304688, 0.1151844482421875, 0.11248435211181641, 0.11276604461669922, 0.11242521667480469, 0.11209510040283203, 0.11148777770996093, 0.11169324493408203, 0.1115059814453125, 0.11157708740234375, 0.11129017639160156, 0.11245779418945312, 0.11324838256835937, 0.11248617553710938, 0.11299862670898438, 0.11243452453613281, 0.11223312377929688, 0.11204402923583984, 0.11206793975830077, 0.1125533447265625, 0.11268466949462891, 0.11183190155029298, 0.11209894561767578, 0.11175542449951172, 0.11296998596191406, 0.11149922943115234, 0.11092313385009765, 0.11086707305908203, 0.11090739440917968, 0.11328733062744141, 0.11169757080078126, 0.11125574493408204, 0.11098111724853515, 0.11157062530517578, 0.11117810821533203, 0.11086211395263672, 0.11067817687988281, 0.11071695709228516, 0.11160553741455079, 0.11111036682128907, 0.11141887664794922, 0.1110956802368164, 0.11240303802490234, 0.11079853057861327, 0.11079027557373047, 0.11099622344970703, 0.11534051513671875, 0.11193827056884766, 0.11180953979492188, 0.11248947143554687, 0.11129827117919922, 0.11138690948486328, 0.11115878295898438, 0.11503190612792968, 0.11125212860107422, 0.11136614227294922, 0.11120950317382812, 0.11144435119628907, 0.11127587127685547, 0.11205471801757813, 0.11112214660644532, 0.11075247955322266, 0.11099769592285157, 0.11077190399169921, 0.11128803253173829, 0.11186934661865235, 0.11156979370117187, 0.11212960052490234, 0.11103040313720704, 0.1111813735961914, 0.11096959686279297, 0.11105689239501954, 0.11191267395019532, 0.11176976013183594, 0.11187177276611328, 0.1120423355102539, 0.11153817749023437, 0.11133071899414063, 0.11138495635986329, 0.11128230285644532, 0.11076412963867187, 0.11086608123779297, 0.11196873474121094, 0.112189697265625, 0.11226850891113281, 0.11210348510742188, 0.11312732696533204, 0.11300054168701172, 0.11485846710205078, 0.11310899353027344, 0.11219535827636719, 0.11195823669433594, 0.11180217742919922, 0.11241260528564453, 0.11168943786621094, 0.11162630462646485, 0.1122083511352539, 0.11151785278320313, 0.11153372955322266, 0.11187744140625, 0.11213839721679687, 0.11216969299316407, 0.11271577453613281, 0.11260652923583984, 0.11286188507080078, 0.11175286102294922, 0.11220207977294921, 0.11189247894287109, 0.11220377349853515, 0.1157093734741211, 0.11336579132080078, 0.11367743682861328, 0.1130869140625, 0.11268732452392578, 0.11238710021972656, 0.11251165008544922, 0.11267472076416016, 0.11293328094482422, 0.11252735900878906, 0.1132965087890625, 0.11296428680419922, 0.11368790435791015, 0.11501859283447266, 0.1136537628173828, 0.11235852813720704, 0.11191007995605469, 0.11179199981689453, 0.11227903747558594, 0.11205638122558594, 0.11187193298339844, 0.11324857330322266, 0.11179122924804688, 0.11131788635253906, 0.11163404846191406, 0.11269471740722656, 0.11179103851318359, 0.111783935546875, 0.11278131103515625, 0.11209228515625, 0.11283135986328124, 0.11223369598388672, 0.1145864028930664, 0.11240025329589844, 0.1123897933959961, 0.11259935760498047, 0.11331414031982422, 0.11332198333740234, 0.11321334075927734, 0.11250083160400391, 0.11246185302734375, 0.11205248260498046, 0.11185513305664062, 0.11308255767822266, 0.11213619232177735, 0.11286118316650391, 0.11202326202392578, 0.11710902404785156, 0.11259715270996094, 0.1123817596435547, 0.11230178833007813, 0.11305923461914062, 0.11501414489746094, 0.11314755249023438, 0.11267343902587891, 0.11198818969726562, 0.11578227233886719, 0.11157094573974609, 0.11109776306152344, 0.11090544128417969, 0.11085968017578125, 0.1115816650390625, 0.11170028686523438, 0.11168470764160156, 0.11113750457763671, 0.11362918090820312, 0.12051622772216797, 0.11454236602783203, 0.11214934539794921, 0.11226441955566406, 0.11223709106445312, 0.11196739196777344, 0.11201017761230468, 0.11168943786621094, 0.11101407623291015, 0.11527769470214844, 0.11116492462158203, 0.11068070220947265, 0.1110302734375, 0.11077426910400391, 0.11080518341064453, 0.11136329650878907, 0.11086908721923829, 0.11117897796630859, 0.1155692138671875, 0.11085609436035156, 0.11080531311035156, 0.11092582702636719, 0.11110195159912109, 0.11081494140625, 0.11122822570800782, 0.11145024108886718, 0.11111302185058594, 0.1153638687133789, 0.11128627014160156, 0.11075379180908203, 0.11142124938964844, 0.11106527709960938, 0.11128729248046874, 0.11213990020751953, 0.11244316864013672, 0.11635158538818359, 0.11181980895996094, 0.11104354858398438, 0.11130470275878906, 0.11094028472900391, 0.11148070526123047, 0.11165081787109375, 0.11229296112060547, 0.11114771270751952, 0.11110012817382812, 0.11486822509765625, 0.11302694702148437, 0.11185587310791016, 0.11161116790771484, 0.11205897521972656, 0.1124163818359375, 0.1119169921875, 0.11232412719726563, 0.11299727630615235, 0.11632025909423828, 0.11169996643066406, 0.11132479858398438, 0.11118425750732422, 0.1113538589477539, 0.11188838195800781, 0.11160797119140625, 0.1113966064453125, 0.11433689880371094, 0.11710892486572265, 0.11168962860107422, 0.11179097747802734, 0.111710205078125, 0.11197618865966796, 0.1121727066040039, 0.11299024200439453, 0.11213990020751953, 0.11129952239990235, 0.11231027221679687, 0.11205836486816406, 0.11196825408935547, 0.11167056274414063, 0.11192998504638672, 0.11214374542236329, 0.11212630462646485, 0.11204646301269532, 0.11142758178710938, 0.11198464202880859, 0.11248393249511719, 0.11207843017578124, 0.1115964813232422, 0.112123779296875, 0.11240179443359374, 0.111538818359375, 0.11415481567382812, 0.1130052490234375, 0.11271561431884766, 0.11244083404541015, 0.11156956481933594, 0.11145830535888672, 0.11114915466308593, 0.11164867401123046]",tokens/s,8.924865441735985,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,9917.075456,6193.872896,0.0,5798.62528,5404.427264,s,1,21.5315625,21.5315625,0.0,21.5315625,21.5315625,21.5315625,21.5315625,[21.5315625],,kWh,0.0004143135149416745,4.569452913317382e-05,0.0001556220689419574,0.0006156301130168057,,MB,5705.5232,6493.765632,0.0,6077.546496,5755.124736,s,10,1.47804443359375,0.14780444335937498,0.001349333772418473,0.14716375732421877,0.15014812927246093,0.15027644958496092,0.15037910583496092,"[0.14720985412597656, 0.14711766052246095, 0.14669081115722657, 0.14700889587402344, 0.15040476989746093, 0.15011961364746093, 0.14687986755371094, 0.14865615844726562, 0.14744854736328125, 0.14650825500488282]",tokens/s,1732.0182951303836,kWh,4.3325030819245035e-06,4.777992596634437e-07,2.6351655068235386e-06,7.4454678484114864e-06,tokens/kWh,34383332.94993926,MB,5709.623296,6495.862784,0.0,6079.643648,5755.127296,s,10,89.78707128906251,8.978707128906251,0.01121965840479262,8.97895068359375,8.993672851562499,8.9947978515625,8.9956978515625,"[8.971361328125, 8.9959228515625, 8.9616923828125, 8.972267578125, 8.963349609375, 8.975248046875, 8.9826533203125, 8.9934228515625, 8.9868359375, 8.9843173828125]",tokens/s,7.016600396417474,kWh,0.00026102251405099374,2.8792178177252373e-05,0.00011523787323597876,0.00040505256546422485,tokens/kWh,155535.36842260612,,s,630,89.78394540405283,0.14251419905405197,0.0011855416921898443,0.1422651824951172,0.1436530029296875,0.1447293182373047,0.14692891906738284,"[0.14134521484375, 0.14241334533691405, 0.14179385375976564, 0.14496809387207032, 0.142129150390625, 0.1414185028076172, 0.14166835021972657, 0.14118655395507812, 0.14231398010253907, 0.14118655395507812, 0.14086604309082032, 0.1428683776855469, 0.14226220703125, 0.14250408935546874, 0.14135200500488282, 0.14323155212402344, 0.14167266845703125, 0.14166207885742188, 0.14345449829101561, 0.1424199676513672, 0.14226226806640624, 0.14183180236816406, 0.1427439422607422, 0.14236262512207032, 0.143067138671875, 0.1419325408935547, 0.1421859130859375, 0.1416730194091797, 0.14135891723632812, 0.14240077209472657, 0.14203298950195312, 0.14160368347167968, 0.14267964172363282, 0.14249002075195313, 0.14245887756347655, 0.14223490905761718, 0.14221151733398438, 0.14314271545410157, 0.14704832458496095, 0.1432493438720703, 0.14182371520996093, 0.14184364318847656, 0.14243113708496094, 0.143300537109375, 0.14393437194824218, 0.14280908203125, 0.14276991271972655, 0.1419983367919922, 0.14220611572265626, 0.144903076171875, 0.14223149108886718, 0.14202470397949218, 0.14189523315429686, 0.14284841918945312, 0.14200218200683593, 0.14193238830566407, 0.14225961303710938, 0.14237965393066407, 0.1421600341796875, 0.14207708740234376, 0.14354515075683594, 0.14217219543457033, 0.1418050231933594, 0.1427392578125, 0.14313221740722656, 0.14239590454101564, 0.14365855407714845, 0.14242247009277345, 0.14196531677246094, 0.1421332550048828, 0.14274124145507813, 0.14196762084960937, 0.1436708221435547, 0.14298570251464843, 0.1423155212402344, 0.14266983032226563, 0.14352102661132812, 0.14223231506347656, 0.14221693420410156, 0.14225392150878907, 0.14312867736816406, 0.14316989135742186, 0.15363072204589845, 0.14231884765625, 0.14323583984375, 0.14264643859863282, 0.14320025634765626, 0.144763427734375, 0.14331111145019532, 0.14322285461425782, 0.14287443542480469, 0.14313285827636718, 0.14240765380859374, 0.14236265563964845, 0.1430241241455078, 0.14288076782226564, 0.1428225555419922, 0.14268208312988281, 0.14255174255371095, 0.14218873596191406, 0.14238674926757813, 0.14303890991210938, 0.14195916748046875, 0.14212506103515626, 0.14212506103515626, 0.1416663055419922, 0.14186700439453126, 0.144787109375, 0.142420166015625, 0.14173977661132814, 0.14163980102539062, 0.14178536987304688, 0.14258586120605468, 0.14341325378417968, 0.14264524841308593, 0.14243161010742186, 0.1411405487060547, 0.14188064575195314, 0.14663658142089844, 0.14173898315429687, 0.1419939880371094, 0.14185061645507813, 0.14236854553222655, 0.1415554504394531, 0.1417732849121094, 0.14147975158691406, 0.14230323791503907, 0.14223974609375, 0.1411558074951172, 0.14223004150390625, 0.14207327270507814, 0.14216560363769531, 0.14205027770996093, 0.14237490844726564, 0.142781982421875, 0.1424054718017578, 0.1421334686279297, 0.1420413818359375, 0.14189785766601562, 0.1416841278076172, 0.14102383422851564, 0.1437071990966797, 0.14348793029785156, 0.142556640625, 0.14186732482910155, 0.14165373229980469, 0.141338623046875, 0.1417150421142578, 0.14203776550292968, 0.14275935363769532, 0.14355506896972656, 0.1422025604248047, 0.14196534729003907, 0.14192076110839844, 0.1418419189453125, 0.14187776184082032, 0.1422143096923828, 0.1418411865234375, 0.14212921142578125, 0.1414776611328125, 0.141650146484375, 0.14189292907714843, 0.142072509765625, 0.14228866577148438, 0.1423784942626953, 0.1422587890625, 0.142084228515625, 0.1422274627685547, 0.1416273956298828, 0.14178028869628906, 0.14337094116210938, 0.14312200927734375, 0.14254531860351563, 0.14219378662109375, 0.1413207092285156, 0.1417322235107422, 0.14160018920898437, 0.145574462890625, 0.14149171447753905, 0.1413695068359375, 0.14125244140625, 0.141366943359375, 0.14245269775390626, 0.1428942108154297, 0.14099542236328125, 0.14244137573242188, 0.14128128051757813, 0.14891183471679686, 0.1424878387451172, 0.14263066101074218, 0.14265171813964844, 0.14244645690917968, 0.14208428955078126, 0.1419489288330078, 0.14205337524414063, 0.14207180786132811, 0.14155282592773438, 0.14164012145996094, 0.14220892333984375, 0.1420042266845703, 0.1416053466796875, 0.14143693542480468, 0.14138291931152344, 0.14154733276367187, 0.1414606475830078, 0.14250985717773437, 0.14267596435546875, 0.14213909912109374, 0.14168707275390624, 0.1415509490966797, 0.14199197387695311, 0.14380633544921875, 0.1418798370361328, 0.1418918151855469, 0.14246633911132814, 0.14128201293945314, 0.14181283569335937, 0.14135081481933592, 0.14136627197265625, 0.1426507568359375, 0.14224038696289062, 0.1422704620361328, 0.14194688415527343, 0.14238021850585938, 0.14143161010742186, 0.1414817352294922, 0.14322099304199218, 0.14174412536621095, 0.14233798217773438, 0.142291015625, 0.14206259155273437, 0.14246194458007813, 0.1421946258544922, 0.14306918334960939, 0.14274771118164062, 0.14322006225585937, 0.14243612670898437, 0.1448536376953125, 0.1428625946044922, 0.14649754333496093, 0.1442586212158203, 0.14246524047851564, 0.14287484741210937, 0.14259814453125, 0.1425919952392578, 0.14226809692382814, 0.14502085876464843, 0.14365528869628907, 0.14260018920898437, 0.1421721649169922, 0.14199562072753907, 0.1439129638671875, 0.14212300109863282, 0.14336614990234375, 0.1419502410888672, 0.1436021728515625, 0.1432674560546875, 0.14241670227050782, 0.14204617309570314, 0.14170195007324218, 0.14311013793945312, 0.14215081787109374, 0.14433775329589843, 0.14168006896972657, 0.14392787170410157, 0.141653564453125, 0.141666748046875, 0.1435299835205078, 0.14171955871582032, 0.14267391967773438, 0.14171136474609375, 0.14183424377441406, 0.14147378540039063, 0.14132211303710937, 0.14205349731445313, 0.14193814086914064, 0.14225836181640625, 0.14151100158691407, 0.1417072296142578, 0.141343994140625, 0.1444639434814453, 0.144005859375, 0.14193804931640625, 0.14243084716796875, 0.14164991760253906, 0.14239334106445312, 0.1416417236328125, 0.14236058044433594, 0.14315110778808593, 0.14299545288085938, 0.14764031982421874, 0.14202879333496093, 0.14308665466308593, 0.14206396484375, 0.14126345825195313, 0.14206108093261718, 0.14197398376464843, 0.14237008666992187, 0.14147625732421876, 0.14181581115722655, 0.14120787048339845, 0.14112553405761719, 0.14191421508789062, 0.14099623107910156, 0.14472434997558595, 0.14134681701660157, 0.14157414245605468, 0.14102528381347657, 0.14148786926269533, 0.14175975036621094, 0.1421068115234375, 0.14250425720214843, 0.14130227661132813, 0.1414430694580078, 0.14163148498535155, 0.1438756103515625, 0.14266438293457032, 0.14169929504394532, 0.1417431640625, 0.14346949768066405, 0.1440498809814453, 0.1420572509765625, 0.14242367553710938, 0.14241007995605467, 0.14312620544433594, 0.14517744445800781, 0.14146070861816407, 0.14273411560058594, 0.14152088928222656, 0.142077880859375, 0.14199404907226562, 0.14252032470703124, 0.14284384155273439, 0.1423289337158203, 0.1427404479980469, 0.14152499389648437, 0.14282281494140625, 0.14268048095703126, 0.14308348083496095, 0.1432855987548828, 0.1421762237548828, 0.14215606689453125, 0.14236735534667969, 0.14269850158691405, 0.14261453247070313, 0.14257151794433592, 0.14317945861816406, 0.141998046875, 0.14250172424316407, 0.14495989990234376, 0.14258367919921874, 0.14276016235351563, 0.1430546875, 0.14315740966796875, 0.14191194152832032, 0.14145138549804687, 0.1420185546875, 0.14193049621582032, 0.142376953125, 0.14195712280273437, 0.1430768280029297, 0.1422380828857422, 0.14247100830078124, 0.14123992919921874, 0.14183084106445312, 0.14171749877929687, 0.14185430908203125, 0.1422683563232422, 0.14164373779296874, 0.14119168090820314, 0.14080613708496093, 0.14169491577148438, 0.1418363494873047, 0.14173590087890625, 0.14792643737792968, 0.1417628173828125, 0.14161456298828126, 0.14129244995117188, 0.14224461364746094, 0.14185894775390626, 0.1425569305419922, 0.14178317260742188, 0.1418997802734375, 0.14170317077636718, 0.14523187255859374, 0.14171955871582032, 0.14127923583984375, 0.14205746459960938, 0.1426321258544922, 0.1449451446533203, 0.14163848876953125, 0.1418014373779297, 0.1417523193359375, 0.1434066925048828, 0.14441696166992188, 0.14159062194824218, 0.14126092529296874, 0.14123353576660155, 0.1417222442626953, 0.1420221405029297, 0.14159635925292968, 0.14280111694335937, 0.14160751342773437, 0.1418318328857422, 0.14127488708496094, 0.14254893493652343, 0.14394026184082032, 0.14210588073730468, 0.14262109375, 0.14141676330566405, 0.14134413146972657, 0.1411273956298828, 0.1422181396484375, 0.1421721649169922, 0.14231756591796876, 0.14349241638183594, 0.14252703857421875, 0.14211903381347657, 0.14507615661621093, 0.14330201721191407, 0.1433504638671875, 0.1430160675048828, 0.14413792419433594, 0.1425531768798828, 0.1422514953613281, 0.14230348205566407, 0.14301011657714843, 0.1456681671142578, 0.14359539794921874, 0.14436979675292969, 0.14271078491210937, 0.1429679412841797, 0.14217100524902343, 0.14280455017089844, 0.14206108093261718, 0.1425396728515625, 0.14342710876464843, 0.14219644165039064, 0.142629638671875, 0.14272306823730468, 0.14365866088867188, 0.14273638916015624, 0.14347775268554688, 0.1431183319091797, 0.14240972900390625, 0.14194793701171876, 0.14212556457519532, 0.1420333709716797, 0.14244224548339843, 0.1428372802734375, 0.1423585205078125, 0.14193539428710938, 0.14226988220214845, 0.1452999725341797, 0.14248098754882813, 0.14214793395996095, 0.143931396484375, 0.14421612548828125, 0.14473338317871093, 0.14275616455078124, 0.14187161254882813, 0.14225340270996092, 0.14242422485351564, 0.14493717956542967, 0.14243458557128907, 0.14249481201171876, 0.14249871826171875, 0.14243983459472656, 0.14213711547851562, 0.1430187225341797, 0.14419363403320312, 0.1422142791748047, 0.1430414123535156, 0.14248089599609376, 0.14250137329101562, 0.14358377075195314, 0.14272149658203126, 0.14343782043457032, 0.14219059753417967, 0.14213046264648438, 0.1421543426513672, 0.1423538818359375, 0.14239811706542968, 0.142202880859375, 0.14313641357421875, 0.14263536071777344, 0.14257122802734376, 0.14627049255371094, 0.14284092712402344, 0.1420914306640625, 0.14254464721679688, 0.14304013061523438, 0.14215000915527343, 0.14156390380859374, 0.141844482421875, 0.14197760009765625, 0.14252236938476562, 0.14577186584472657, 0.14148880004882813, 0.14272848510742187, 0.14234083557128907, 0.14184640502929688, 0.1421510009765625, 0.14221597290039062, 0.14287411499023436, 0.143077880859375, 0.14142416381835937, 0.14149014282226563, 0.14245120239257814, 0.14240339660644533, 0.14234828186035156, 0.14223744201660157, 0.14307801818847657, 0.14231446838378906, 0.14207472229003906, 0.1434582061767578, 0.14251596069335937, 0.14241993713378906, 0.14239578247070311, 0.1429811248779297, 0.14165402221679688, 0.1421917724609375, 0.14426109313964844, 0.14200694274902342, 0.14221539306640624, 0.1429667205810547, 0.144872802734375, 0.14549066162109375, 0.14206565856933595, 0.1434207305908203, 0.14279750061035157, 0.14275788879394533, 0.14442425537109374, 0.14344053649902344, 0.14227459716796875, 0.14186087036132813, 0.14254197692871093, 0.14316566467285155, 0.14255783081054688, 0.1426117706298828, 0.14366586303710938, 0.14260223388671875, 0.14171676635742186, 0.14248419189453124, 0.14324327087402344, 0.14216397094726563, 0.1426241912841797, 0.14310588073730468, 0.14214620971679687, 0.14230514526367188, 0.1423771514892578, 0.1427671661376953, 0.14179014587402344, 0.1420977325439453, 0.14319276428222658, 0.1428868865966797, 0.1450449523925781, 0.1423797149658203, 0.14217613220214845, 0.14191346740722657, 0.14156454467773438, 0.14365286254882811, 0.14179122924804688, 0.1415557098388672, 0.14189567565917968, 0.14240562438964843, 0.14185218811035155, 0.14373001098632812, 0.1425408935546875, 0.1422689971923828, 0.14194464111328126, 0.1416992645263672, 0.14249903869628905, 0.14230335998535157, 0.1432930908203125, 0.1416110076904297, 0.14160076904296875, 0.14162944030761718, 0.14145436096191405, 0.14149935913085937, 0.14232984924316405, 0.14313827514648436, 0.14136358642578126, 0.14129168701171874, 0.14291763305664062, 0.14163740539550781, 0.14185084533691406, 0.14154054260253907, 0.1426164855957031, 0.14229530334472656, 0.1419204406738281, 0.142018310546875, 0.14759738159179686, 0.1419639434814453, 0.14228274536132812, 0.14365426635742187, 0.1430946807861328, 0.14268821716308594, 0.14197673034667968, 0.1414617919921875, 0.14543228149414061, 0.15279983520507812, 0.14431996154785157, 0.14248194885253906, 0.1415017547607422, 0.141935302734375, 0.14375730895996094, 0.14200218200683593, 0.14151632690429689, 0.14208000183105468, 0.14178761291503905, 0.1415302734375, 0.1416343994140625, 0.14259365844726563, 0.14243247985839844, 0.14178466796875, 0.14330857849121093, 0.14184938049316406, 0.14194073486328124, 0.14270054626464843, 0.14197555541992188, 0.14163699340820313, 0.14190861511230468, 0.1428152618408203, 0.142034912109375, 0.1454571533203125, 0.14314227294921875, 0.14184512329101562, 0.1420384979248047, 0.144046630859375]",tokens/s,7.01684468381095,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,7544.397824,3657.302016,0.0,3254.779904,3057.54368,s,1,17.309408203125,17.309408203125,0.0,17.309408203125,17.309408203125,17.309408203125,17.309408203125,[17.309408203125],,kWh,0.0002934640462833007,3.2363855755023655e-05,9.420535314200174e-05,0.0004200332551803261,,MB,4168.515584,3743.285248,0.0,3319.791616,3198.096896,s,10,1.099827651977539,0.10998276519775389,0.0007807287458452681,0.1097735824584961,0.1108468635559082,0.11106552925109864,0.11124046180725097,"[0.10900691223144532, 0.1091518096923828, 0.10960633850097656, 0.11079827117919921, 0.10975033569335937, 0.10905648040771485, 0.11061702728271484, 0.10979682922363282, 0.11128419494628906, 0.11075945281982422]",tokens/s,2327.6374215514643,kWh,3.22675835480671e-06,3.5581153217091236e-07,1.60647564415425e-06,5.189045531131872e-06,tokens/kWh,49334699.12031384,MB,4172.746752,3785.228288,0.0,3361.734656,3180.01152,s,10,69.50304150390625,6.950304150390624,0.010526177298898699,6.950729736328125,6.961805029296874,6.962716235351563,6.963445200195313,"[6.9567216796875, 6.945984375, 6.96063623046875, 6.95370751953125, 6.9434599609375, 6.96362744140625, 6.9616025390625, 6.9414453125, 6.9281044921875, 6.947751953125]",tokens/s,9.064351521430792,kWh,0.00020273308084102913,2.2362414133234935e-05,7.535672481524416e-05,0.0003004522197895082,tokens/kWh,209683.92260219195,,s,630,69.50068821716307,0.11031855272565569,0.0009821173781436585,0.11013214492797851,0.11135411758422852,0.11198053932189941,0.11491720962524415,"[0.10996892547607422, 0.11093222045898438, 0.11029055786132813, 0.11000077056884766, 0.1098891830444336, 0.10971695709228516, 0.11048576354980469, 0.1099024658203125, 0.10948329925537109, 0.11036454772949218, 0.11073442840576173, 0.11461196899414063, 0.11108966064453125, 0.11063455963134766, 0.11018899536132812, 0.11037872314453125, 0.10977257537841797, 0.10960521697998046, 0.11119222259521484, 0.10990175628662109, 0.11024800109863281, 0.11058380889892579, 0.11105280303955078, 0.1102581787109375, 0.1101108169555664, 0.11072092437744141, 0.1106855697631836, 0.11074934387207032, 0.11083980560302735, 0.11413961791992187, 0.11074406433105469, 0.11002435302734374, 0.10977519989013672, 0.11053654479980468, 0.11024400329589844, 0.11034623718261719, 0.11144198608398438, 0.10961420440673827, 0.10976338958740234, 0.11514492797851562, 0.11014326477050781, 0.1098828125, 0.10966790771484375, 0.10984550476074219, 0.11004927825927735, 0.10988543701171875, 0.10957823944091796, 0.10950978851318359, 0.11027951812744141, 0.11081721496582031, 0.10929567718505859, 0.10951052856445312, 0.11020301055908203, 0.10931954956054687, 0.10935769653320312, 0.10920550537109375, 0.10923757171630859, 0.11161465454101563, 0.11100978851318359, 0.1104649887084961, 0.10998582458496094, 0.11137229156494141, 0.11009622192382812, 0.10913430023193359, 0.1113564453125, 0.1096192626953125, 0.1101805419921875, 0.11019174194335937, 0.10999241638183593, 0.11023107147216797, 0.11001100921630859, 0.10965196990966797, 0.10974166107177734, 0.1096278076171875, 0.10994687652587891, 0.11019878387451172, 0.11080665588378906, 0.11007654571533203, 0.11087846374511719, 0.11000627136230469, 0.11043740844726563, 0.11018544006347657, 0.11012300872802734, 0.11027372741699219, 0.11063174438476563, 0.1105633316040039, 0.11004108428955078, 0.1100838394165039, 0.1101817626953125, 0.11010336303710938, 0.11033106994628906, 0.10999385833740234, 0.11008230590820313, 0.11168627166748046, 0.1118414077758789, 0.11244070434570312, 0.11160995483398438, 0.1103529281616211, 0.10984239959716798, 0.11038313293457032, 0.11038508605957031, 0.11545174407958984, 0.11214627075195313, 0.11085836791992187, 0.11093001556396484, 0.11013340759277344, 0.10991001892089844, 0.10989552307128907, 0.10928758239746093, 0.10961510467529297, 0.10915634918212891, 0.11022541046142578, 0.10943897247314453, 0.10942873382568359, 0.10934067535400391, 0.10926646423339843, 0.10960345458984375, 0.10945676422119141, 0.10991824340820312, 0.10974662780761718, 0.10981321716308594, 0.10986662292480469, 0.10945219421386719, 0.10959667205810547, 0.1095613784790039, 0.11041795349121093, 0.10910720062255859, 0.10929357147216796, 0.11329708862304687, 0.11037741088867188, 0.11027033233642577, 0.11036262512207032, 0.11016985321044923, 0.10984678649902344, 0.10986441802978515, 0.10941043090820313, 0.109916259765625, 0.11082784271240234, 0.11177996826171875, 0.11153395080566406, 0.11245938873291016, 0.1105692138671875, 0.11092031860351563, 0.11100486755371093, 0.11066655731201172, 0.10985001373291016, 0.11266918182373047, 0.11141539001464844, 0.1103439712524414, 0.1100362548828125, 0.11194367980957032, 0.11029395294189454, 0.10984172821044921, 0.11129923248291015, 0.11111017608642579, 0.11009638214111328, 0.11016989135742188, 0.10931613159179687, 0.1093834228515625, 0.11039734649658203, 0.10967622375488281, 0.10982278442382812, 0.1133691177368164, 0.10985881805419923, 0.10997904205322266, 0.11157974243164062, 0.11125350189208984, 0.11049346923828125, 0.11102230072021485, 0.11026179504394532, 0.1131463394165039, 0.11010393524169922, 0.10957068634033203, 0.11064217376708985, 0.11040032196044922, 0.11021715545654297, 0.11051033782958984, 0.11027776336669921, 0.11033074951171876, 0.11002223968505859, 0.10965404510498047, 0.10980390167236329, 0.10951065826416016, 0.10994070434570312, 0.10998345947265625, 0.10983660888671876, 0.10963046264648438, 0.10993670654296875, 0.1097061767578125, 0.11562496185302734, 0.11129167938232422, 0.11119161224365234, 0.11061875152587891, 0.10991004943847656, 0.10988339233398438, 0.10992230224609376, 0.11009548950195312, 0.11020582580566406, 0.1095244140625, 0.10932838439941406, 0.11031136322021484, 0.11049433898925781, 0.11026636505126954, 0.11021635437011719, 0.10995184326171875, 0.11072102355957031, 0.11058380889892579, 0.11113062286376953, 0.11078451538085937, 0.10968057250976562, 0.11027833557128906, 0.11131488037109374, 0.11069894409179687, 0.11008576202392578, 0.11143154907226563, 0.11023206329345703, 0.10997555541992188, 0.11318873596191406, 0.11204621124267578, 0.11139481353759766, 0.1113538589477539, 0.11033805084228515, 0.10985266876220703, 0.11077222442626954, 0.11014348602294922, 0.10971100616455078, 0.10983254241943359, 0.11000438690185547, 0.11014313507080079, 0.10963372802734375, 0.11031247711181641, 0.10934166717529296, 0.1098988800048828, 0.11013209533691407, 0.10988339233398438, 0.10997964477539063, 0.11133888244628906, 0.11033647918701171, 0.10996701049804687, 0.11125794982910156, 0.10956166076660157, 0.10976905822753906, 0.10986905670166015, 0.1115525131225586, 0.10982953643798828, 0.10951683044433594, 0.10952761840820313, 0.10960195159912109, 0.10980233764648438, 0.10924441528320313, 0.10945878601074219, 0.10913155364990235, 0.10914256286621093, 0.10944306945800782, 0.11314723205566406, 0.11058067321777344, 0.11019235229492187, 0.11030528259277343, 0.10994073486328125, 0.109770751953125, 0.10957360076904298, 0.10966595458984375, 0.1099109115600586, 0.10946908569335938, 0.10978364562988281, 0.10979942321777343, 0.1092894744873047, 0.10934262084960937, 0.10904790496826172, 0.1091809310913086, 0.10956169891357422, 0.10949584197998047, 0.11085858917236328, 0.11095871734619141, 0.11085955047607422, 0.11053718566894531, 0.10967696380615234, 0.10993459320068359, 0.11035443115234375, 0.11242604827880859, 0.11021612548828125, 0.11024588775634765, 0.110388671875, 0.11044624328613281, 0.10980445098876954, 0.11006313323974609, 0.10994451141357423, 0.10907113647460938, 0.10987110137939453, 0.10993561553955078, 0.11015065765380859, 0.1109749755859375, 0.11040761566162109, 0.11117282867431641, 0.10989654541015625, 0.10993663787841797, 0.11516249847412109, 0.11045737457275391, 0.11047126770019532, 0.11024297332763672, 0.1104085464477539, 0.11095145416259766, 0.11013219451904296, 0.11085161590576172, 0.10985910034179687, 0.109442626953125, 0.10915904235839843, 0.11100281524658204, 0.1119649887084961, 0.10930124664306641, 0.10970162963867187, 0.10972364807128906, 0.1100206069946289, 0.11011686706542968, 0.10949356842041015, 0.11010454559326172, 0.11019245147705078, 0.109716796875, 0.11027526092529297, 0.10954752349853515, 0.10944908905029296, 0.10981183624267578, 0.1097523193359375, 0.10907852935791015, 0.11091251373291015, 0.11090431976318359, 0.11069840240478515, 0.11021903991699218, 0.11135948944091797, 0.10992723083496093, 0.10980067443847656, 0.10937538909912109, 0.11198553466796875, 0.10979737854003906, 0.11067596435546875, 0.10984243011474609, 0.11033164978027343, 0.11041613006591797, 0.10977196502685548, 0.10980409240722656, 0.1094546890258789, 0.10946038055419922, 0.10928300476074218, 0.10935737609863282, 0.1112125473022461, 0.10934067535400391, 0.11049779510498046, 0.10974988555908204, 0.11027900695800781, 0.11049954986572266, 0.11191273498535156, 0.11105315399169922, 0.11072736358642578, 0.11063091278076172, 0.1101844482421875, 0.11067801666259766, 0.11034732818603515, 0.1109267807006836, 0.11286860656738282, 0.11052460479736329, 0.11093270111083985, 0.11074134063720703, 0.11096793365478516, 0.11093673706054688, 0.1116080322265625, 0.11027037048339844, 0.11115225219726563, 0.11141567993164063, 0.11222844696044922, 0.11315046691894531, 0.11070787048339843, 0.11020719909667968, 0.1104751968383789, 0.11139065551757812, 0.11099225616455079, 0.1110403823852539, 0.11109171295166016, 0.11133952331542969, 0.11110198211669922, 0.11081318664550781, 0.11074547576904296, 0.11175872039794922, 0.11153279876708984, 0.1112125473022461, 0.11046297454833984, 0.1102213134765625, 0.10953113555908203, 0.10971663665771485, 0.1096174087524414, 0.10968924713134766, 0.11047545623779297, 0.10997760009765625, 0.11054898834228516, 0.10960253143310547, 0.10935939025878906, 0.10936934661865234, 0.11226930999755859, 0.1107022705078125, 0.11049606323242188, 0.11175113677978515, 0.11049116516113282, 0.11068262481689453, 0.1105059814453125, 0.11036172485351563, 0.10993046569824219, 0.10980649566650391, 0.10960486602783204, 0.10957791900634765, 0.11124358367919922, 0.11072847747802735, 0.11024022674560546, 0.10998928070068359, 0.11013206481933593, 0.10983567810058593, 0.111831298828125, 0.11055343627929687, 0.10994278717041016, 0.11057968139648437, 0.11197443389892578, 0.10972144317626953, 0.11024195098876953, 0.11703215789794921, 0.11047811126708984, 0.10983628845214843, 0.10937548828125, 0.10947583770751954, 0.1096553955078125, 0.11106787109375, 0.10980550384521484, 0.10994483184814453, 0.11239833831787109, 0.11050572967529297, 0.11018412780761719, 0.11043488311767578, 0.10930790710449219, 0.10941964721679688, 0.11130377960205078, 0.11145398712158203, 0.11007500457763672, 0.11008499145507812, 0.11057561492919922, 0.10987097930908203, 0.10954518127441407, 0.10974470520019532, 0.11043161773681641, 0.11068240356445312, 0.11000768280029297, 0.10945123291015625, 0.10976300811767578, 0.1094764175415039, 0.11025202941894531, 0.10927855682373047, 0.1091529312133789, 0.11032371520996094, 0.10985881805419923, 0.10967625427246094, 0.11049107360839844, 0.11211862182617187, 0.11087462615966796, 0.11103846740722656, 0.11099545288085938, 0.11018598175048828, 0.11046348571777344, 0.11051952362060546, 0.11042486572265625, 0.11164835357666016, 0.11148534393310547, 0.11088896179199219, 0.110671875, 0.10982809448242188, 0.11012854766845703, 0.11092642974853516, 0.11066979217529296, 0.11030326080322266, 0.10919219207763672, 0.10909184265136719, 0.1095956802368164, 0.10916572570800781, 0.10957974243164062, 0.10925929260253907, 0.11023750305175781, 0.11013734436035157, 0.1105792007446289, 0.10947430419921875, 0.10905804443359375, 0.10923779296875, 0.11297634887695313, 0.11067801666259766, 0.11019257354736328, 0.11074771118164063, 0.11060633850097656, 0.1103128662109375, 0.11039142608642578, 0.1098239974975586, 0.10992073822021485, 0.11117708587646484, 0.11057443237304687, 0.10940803527832031, 0.10974002838134765, 0.11008396911621093, 0.10980147552490234, 0.10974835205078125, 0.1093017578125, 0.1099530258178711, 0.10980831909179688, 0.10949584197998047, 0.11175369262695313, 0.10991372680664062, 0.10997571563720702, 0.11015737915039063, 0.1123144302368164, 0.11018710327148437, 0.1098239974975586, 0.10907158660888672, 0.109280029296875, 0.10963763427734376, 0.10965811157226563, 0.10987315368652344, 0.10893462371826172, 0.10994534301757812, 0.10946336364746094, 0.10964604949951172, 0.1107042236328125, 0.10934867095947266, 0.11052912139892578, 0.10950800323486329, 0.10998435211181641, 0.10967244720458984, 0.11005052947998047, 0.11023382568359374, 0.11025823974609375, 0.10984499359130859, 0.10940940856933594, 0.10956998443603516, 0.10972988891601562, 0.11048841857910156, 0.10976604461669921, 0.10965824127197266, 0.1098531494140625, 0.11021926116943359, 0.11090534210205077, 0.1094525146484375, 0.1097674560546875, 0.10951270294189454, 0.10956185913085938, 0.11046502685546875, 0.11048550415039063, 0.11327283477783204, 0.11050393676757812, 0.11018035125732421, 0.10940351867675781, 0.10950105285644532, 0.10935295867919922, 0.10946502685546874, 0.1096844482421875, 0.10915516662597656, 0.10941827392578125, 0.1104936294555664, 0.10956419372558594, 0.10995507049560548, 0.10928905487060547, 0.10980924987792968, 0.10920223999023437, 0.1097154541015625, 0.10922803497314452, 0.11247341156005859, 0.11028675079345703, 0.10986704254150391, 0.11033634948730468, 0.1101579818725586, 0.11012924957275391, 0.11033715057373047, 0.11003343963623047, 0.10933229064941406, 0.11018489837646485, 0.11504188537597657, 0.11022172546386719, 0.11049574279785156, 0.11116690826416016, 0.11038777923583984, 0.1107550048828125, 0.11052845001220703, 0.10987200164794922, 0.10973203277587891, 0.11079660797119141, 0.1095741424560547, 0.1095946273803711, 0.10969190216064453, 0.10969907379150391, 0.11048162841796876, 0.1108314208984375, 0.11069538879394532, 0.11512397003173828, 0.1112578582763672, 0.11011686706542968, 0.1093773422241211, 0.1116648941040039, 0.10940665435791015, 0.10990386962890625, 0.11033599853515624, 0.1128547821044922, 0.11066329956054688, 0.11103091430664062, 0.11003286743164062, 0.10986294555664063, 0.10950822448730468, 0.10928940582275391, 0.10942918395996094, 0.10919321441650391, 0.1092894744873047, 0.10903756713867188, 0.10926898956298828, 0.11002873229980469, 0.10921580505371094, 0.10990592193603516, 0.10925247955322266, 0.10983846282958984, 0.11045273590087891, 0.11017215728759766, 0.11074559783935548, 0.11004723358154297, 0.11033190155029297, 0.10965948486328125, 0.10989635467529296, 0.109847900390625, 0.11032764434814453, 0.11012342071533203, 0.11008656311035156, 0.11025392150878906, 0.11072118377685547]",tokens/s,9.064658439517762,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,26319.060992,13989.96992,0.0,13587.447808,13583.186432,s,1,54.21411328125,54.21411328125,0.0,54.21411328125,54.21411328125,54.21411328125,54.21411328125,[54.21411328125],,kWh,0.001356683325958344,0.0001496455342829616,0.00043972479622399196,0.0019460536564652976,,MB,1309.782016,14382.137344,0.0,13958.643712,13923.876352,s,10,1.7943414154052735,0.17943414154052734,0.0003030628327130238,0.1794289093017578,0.17978821716308593,0.1798567169189453,0.17991151672363281,"[0.17907696533203124, 0.17967100524902344, 0.1791968994140625, 0.1790460205078125, 0.17963462829589844, 0.17959686279296874, 0.17977299499511717, 0.17915986633300782, 0.17992521667480468, 0.17926095581054688]",tokens/s,1426.7073022007874,kWh,5.305786908863797e-06,5.851338734743983e-07,3.5388816189817588e-06,9.429802401319953e-06,tokens/kWh,27147970.77446352,MB,1334.247424,14384.234496,0.0,13960.740864,13856.6528,s,10,91.61911230468749,9.16191123046875,0.021655065037618965,9.1602666015625,9.185929589843749,9.192367138671875,9.197517177734374,"[9.1495458984375, 9.1844990234375, 9.14040234375, 9.127884765625, 9.16946484375, 9.143287109375, 9.151068359375, 9.171455078125, 9.1827001953125, 9.1988046875]",tokens/s,6.876294521440887,kWh,0.0002694503213115552,2.9721828309175995e-05,0.0001356964519914173,0.0004348686016121485,tokens/kWh,144871.34680785387,,s,630,91.61636875915524,0.14542280755421472,0.0010955793108377518,0.14517862701416015,0.1466825485229492,0.14766023025512695,0.1494262059020996,"[0.14538204956054687, 0.14609237670898437, 0.14502517700195314, 0.1453525390625, 0.14497744750976563, 0.14502499389648438, 0.14499002075195314, 0.14481471252441405, 0.14507980346679689, 0.14497190856933595, 0.144911865234375, 0.1453260498046875, 0.1444036102294922, 0.15041708374023438, 0.1451214141845703, 0.1452474822998047, 0.14553164672851562, 0.1477734375, 0.14539581298828125, 0.14483036804199217, 0.14550221252441406, 0.14485093688964842, 0.1444106903076172, 0.14762620544433594, 0.14588710021972656, 0.14583587646484375, 0.14654054260253907, 0.14505778503417968, 0.1442523193359375, 0.1450194854736328, 0.14526258850097656, 0.145649658203125, 0.14526824951171874, 0.1450193634033203, 0.1452088317871094, 0.14450303649902344, 0.14442521667480468, 0.14488986206054688, 0.14442105102539063, 0.1442928924560547, 0.14444403076171874, 0.14451318359375, 0.14384947204589843, 0.14509260559082032, 0.14445881652832032, 0.14509382629394532, 0.14454888916015626, 0.14679933166503906, 0.14424240112304687, 0.14480601501464843, 0.1444042510986328, 0.14744204711914063, 0.14657504272460936, 0.14554348754882812, 0.144932861328125, 0.14487551879882812, 0.1443754577636719, 0.14458621215820314, 0.14422642517089843, 0.14550230407714843, 0.14475718688964845, 0.14494924926757813, 0.1446525115966797, 0.1462057647705078, 0.14580422973632812, 0.14864816284179688, 0.14559957885742186, 0.1447940216064453, 0.1468862762451172, 0.14563189697265624, 0.1460001220703125, 0.1455715789794922, 0.14751055908203126, 0.1452612762451172, 0.1464967041015625, 0.14588307189941407, 0.1469009246826172, 0.14544720458984375, 0.1463212127685547, 0.14587382507324217, 0.14731240844726562, 0.14872149658203124, 0.14563565063476563, 0.14467481994628906, 0.14494023132324219, 0.14764070129394533, 0.14554476928710938, 0.1468152618408203, 0.1479943084716797, 0.1447596435546875, 0.14736553955078124, 0.14473225402832032, 0.1488173370361328, 0.1457057647705078, 0.14614743041992187, 0.14516387939453124, 0.14540223693847656, 0.14478338623046874, 0.14527413940429687, 0.14500527954101564, 0.14540594482421876, 0.14526976013183593, 0.14533488464355468, 0.1452445373535156, 0.14534150695800782, 0.14436805725097657, 0.14524674987792968, 0.1449674835205078, 0.14519631958007811, 0.14448899841308593, 0.1490020751953125, 0.14664349365234375, 0.14541619873046874, 0.14495033264160156, 0.14530245971679687, 0.14449253845214843, 0.14484480285644533, 0.14417100524902343, 0.14679769897460937, 0.1449088897705078, 0.14499891662597655, 0.14458624267578124, 0.14477853393554688, 0.1449154510498047, 0.1450762176513672, 0.14520109558105468, 0.14614483642578124, 0.14550239562988282, 0.14457843017578126, 0.1447773742675781, 0.14595301818847656, 0.14572303771972656, 0.14517417907714844, 0.14473190307617187, 0.14540463256835937, 0.14440675354003907, 0.14446131896972655, 0.14497605895996094, 0.14420204162597655, 0.14498927307128906, 0.14435420227050783, 0.1453360900878906, 0.14453082275390625, 0.1447412109375, 0.14496357727050782, 0.14471133422851562, 0.14424485778808593, 0.1451929016113281, 0.14595671081542969, 0.14798623657226562, 0.14407501220703126, 0.14456185913085937, 0.1476566467285156, 0.1459246063232422, 0.1447367401123047, 0.1451456298828125, 0.14439219665527345, 0.14551881408691406, 0.1487439727783203, 0.144500732421875, 0.14398204040527343, 0.1463027801513672, 0.144116455078125, 0.14469325256347657, 0.14431642150878907, 0.14449049377441406, 0.14491148376464844, 0.14614822387695312, 0.14654873657226564, 0.14552239990234375, 0.14500416564941407, 0.14517222595214843, 0.14409561157226564, 0.14414405822753906, 0.14457884216308595, 0.14665171813964845, 0.14388134765625, 0.14475360107421875, 0.14451910400390625, 0.14415805053710937, 0.14380841064453126, 0.14507516479492188, 0.1442275848388672, 0.14455043029785156, 0.1444454345703125, 0.1451663360595703, 0.14688665771484374, 0.14518272399902343, 0.14450186157226563, 0.14472819519042968, 0.14542262268066405, 0.14468301391601562, 0.14478717041015626, 0.14510108947753905, 0.14481330871582032, 0.14438188171386718, 0.14464288330078126, 0.1442037811279297, 0.14500250244140625, 0.1438695983886719, 0.14463734436035156, 0.14418853759765626, 0.14464300537109376, 0.14479859924316407, 0.1462906951904297, 0.14414381408691407, 0.14434976196289062, 0.14417715454101562, 0.14466867065429687, 0.14433187866210936, 0.14469123840332032, 0.1457038116455078, 0.14452940368652345, 0.14399842834472656, 0.14441935729980468, 0.1451490936279297, 0.1455747528076172, 0.14728172302246093, 0.14511737060546875, 0.1441455078125, 0.14817149353027342, 0.14511737060546875, 0.14501805114746094, 0.1441513671875, 0.14494300842285157, 0.14446159362792968, 0.14660844421386718, 0.14442086791992187, 0.14489213562011719, 0.1448192596435547, 0.14540853881835938, 0.14473980712890624, 0.14494595336914062, 0.144555419921875, 0.14445417785644532, 0.14492463684082033, 0.14512130737304688, 0.14455398559570312, 0.1447926025390625, 0.145808349609375, 0.14690077209472657, 0.14469110107421876, 0.1450499267578125, 0.14475251770019532, 0.144603271484375, 0.14399395751953126, 0.14454861450195314, 0.14413226318359376, 0.14588671875, 0.14354893493652343, 0.14518067932128906, 0.14394528198242187, 0.1467879943847656, 0.14604470825195312, 0.1449006652832031, 0.144795654296875, 0.144721923828125, 0.14582957458496093, 0.14448057556152344, 0.14443717956542967, 0.14493907165527345, 0.14581864929199218, 0.14490313720703124, 0.1452355499267578, 0.14459126281738283, 0.145149658203125, 0.14478160095214843, 0.14478668212890625, 0.14463871765136718, 0.14499151611328126, 0.14642623901367188, 0.1461640625, 0.1451764221191406, 0.14538153076171875, 0.14533952331542968, 0.14628131103515624, 0.14580128479003907, 0.1461903076171875, 0.14529763793945313, 0.14625689697265626, 0.1447506561279297, 0.14593458557128905, 0.14531631469726564, 0.1473915557861328, 0.14698591613769532, 0.14573362731933595, 0.14567327880859374, 0.14514381408691407, 0.14926431274414062, 0.14534988403320312, 0.14516812133789062, 0.14590156555175782, 0.14504925537109375, 0.1510424041748047, 0.1449369659423828, 0.14583602905273438, 0.145328125, 0.14568038940429687, 0.14466252136230467, 0.14496127319335939, 0.145295654296875, 0.14531295776367187, 0.14544671630859374, 0.14551933288574218, 0.14571279907226561, 0.14491708374023438, 0.14508175659179687, 0.14487202453613282, 0.1448775634765625, 0.1452236785888672, 0.1451724853515625, 0.14530339050292967, 0.14491651916503906, 0.14567642211914061, 0.14558822631835938, 0.14582611083984376, 0.14570614624023437, 0.14404234313964845, 0.14487193298339843, 0.1450004425048828, 0.14863871765136719, 0.14489651489257813, 0.14531642150878907, 0.14608787536621093, 0.14557183837890625, 0.1448468475341797, 0.14517657470703124, 0.1442274169921875, 0.14497654724121095, 0.14441839599609374, 0.14489468383789061, 0.1443429718017578, 0.14438604736328126, 0.14449868774414062, 0.1441976318359375, 0.1441212158203125, 0.14413398742675781, 0.14415274047851562, 0.14438838195800782, 0.14535101318359375, 0.144461181640625, 0.14365554809570313, 0.1443687744140625, 0.14437580871582031, 0.14470579528808594, 0.14632614135742186, 0.14564883422851563, 0.1459649658203125, 0.1447307586669922, 0.14478265380859376, 0.14515814208984376, 0.14623023986816405, 0.14713839721679686, 0.14457072448730468, 0.14427238464355469, 0.14938099670410157, 0.1449911651611328, 0.14469859313964845, 0.1445030975341797, 0.1455735321044922, 0.14497398376464843, 0.14727235412597656, 0.14413984680175781, 0.14449708557128907, 0.1440133056640625, 0.14516217041015625, 0.14449261474609376, 0.14610365295410158, 0.14536146545410156, 0.14453273010253906, 0.1453905029296875, 0.14498722839355468, 0.14541705322265625, 0.1456906280517578, 0.1454941101074219, 0.14528297424316405, 0.14565785217285157, 0.1449369659423828, 0.1451663360595703, 0.14569427490234374, 0.1444949493408203, 0.14470346069335938, 0.1445418243408203, 0.14530709838867187, 0.14477571105957032, 0.14508966064453124, 0.1450023651123047, 0.14526278686523436, 0.14730528259277345, 0.14480998229980468, 0.14426921081542968, 0.14776480102539064, 0.14542233276367186, 0.14525027465820312, 0.14429446411132812, 0.14528102111816407, 0.14473954772949219, 0.14525648498535157, 0.14509526062011718, 0.1452178955078125, 0.14440838623046875, 0.14508639526367187, 0.14384559631347657, 0.14420156860351563, 0.14386585998535156, 0.14481138610839844, 0.14526031494140626, 0.14455894470214845, 0.14460646057128906, 0.14467286682128908, 0.14393753051757813, 0.14487210083007812, 0.14709555053710938, 0.14515110778808593, 0.1447225341796875, 0.14495724487304687, 0.14435714721679688, 0.1443007354736328, 0.14629661560058593, 0.14480508422851562, 0.14944467163085937, 0.14501676940917968, 0.14450790405273437, 0.1480396728515625, 0.14502919006347656, 0.14562442016601562, 0.14409715270996093, 0.1460641326904297, 0.14751242065429687, 0.14679122924804688, 0.1447704620361328, 0.1450946502685547, 0.14453826904296874, 0.1455529022216797, 0.14508674621582032, 0.14623699951171876, 0.14496630859375, 0.1456353302001953, 0.14539161682128907, 0.1461759948730469, 0.1446604766845703, 0.14604054260253907, 0.14547048950195313, 0.14696957397460939, 0.145712890625, 0.1490373077392578, 0.14509056091308595, 0.14521958923339845, 0.14592930603027343, 0.14529612731933594, 0.14526666259765625, 0.14490374755859375, 0.14645231628417968, 0.1452485809326172, 0.14492515563964845, 0.147219970703125, 0.1459760284423828, 0.14529104614257812, 0.14549139404296876, 0.14459890747070311, 0.1454405059814453, 0.14495747375488283, 0.14565469360351563, 0.14490829467773436, 0.14653030395507813, 0.14800038146972655, 0.14535533142089843, 0.1486617889404297, 0.14545875549316406, 0.14740713500976563, 0.14537750244140624, 0.14487126159667968, 0.145287353515625, 0.14457228088378907, 0.14488397216796875, 0.14449465942382814, 0.1447403564453125, 0.14483853149414064, 0.14502310180664063, 0.14504045104980468, 0.14495840454101563, 0.1445498809814453, 0.1449858856201172, 0.1441337890625, 0.14481800842285156, 0.1444171905517578, 0.14653388977050782, 0.143955810546875, 0.14809519958496092, 0.14506985473632814, 0.1477147216796875, 0.145389892578125, 0.1452844543457031, 0.14486927795410157, 0.14438450622558593, 0.14513587951660156, 0.1500384979248047, 0.14476419067382812, 0.1448209991455078, 0.14522982788085936, 0.14513766479492188, 0.14518067932128906, 0.14499020385742187, 0.14507008361816406, 0.14662655639648436, 0.1451346893310547, 0.14422723388671874, 0.1458501739501953, 0.14573382568359375, 0.14566604614257814, 0.14526258850097656, 0.14527693176269532, 0.145431640625, 0.14530636596679689, 0.14536473083496093, 0.14549853515625, 0.14639436340332032, 0.14510566711425782, 0.14468505859375, 0.1464514617919922, 0.14777650451660157, 0.14517266845703125, 0.14542140197753906, 0.14620037841796876, 0.1455522918701172, 0.14445960998535157, 0.14528854370117186, 0.14446879577636718, 0.14589462280273438, 0.14511712646484376, 0.14454371643066405, 0.14502992248535157, 0.14454144287109374, 0.14455215454101564, 0.14457379150390626, 0.14500534057617187, 0.14510908508300782, 0.1441846008300781, 0.14449420166015625, 0.1445440673828125, 0.14507283020019532, 0.14643788146972656, 0.14617155456542968, 0.14623599243164062, 0.14571688842773436, 0.1456827850341797, 0.14594038391113281, 0.14595423889160156, 0.14608866882324217, 0.1464913330078125, 0.14621087646484374, 0.14588470458984376, 0.145885498046875, 0.14755235290527344, 0.14572134399414063, 0.14949789428710938, 0.14663827514648436, 0.14595298767089843, 0.14855123901367187, 0.1466717071533203, 0.14647740173339843, 0.1468091583251953, 0.14578413391113282, 0.14766316223144532, 0.14639059448242187, 0.1457356414794922, 0.1452696075439453, 0.14613618469238282, 0.14574899291992188, 0.1465485382080078, 0.1456041259765625, 0.14660450744628906, 0.14543667602539062, 0.14589552307128906, 0.14562051391601563, 0.1459771270751953, 0.1497007293701172, 0.14555183410644532, 0.14598451232910156, 0.14592512512207031, 0.1458094024658203, 0.14618829345703124, 0.14530252075195313, 0.14543154907226563, 0.145485595703125, 0.14573146057128905, 0.14520664978027345, 0.1457035217285156, 0.14581983947753907, 0.14521484375, 0.15034220886230468, 0.14535466003417968, 0.14528668212890625, 0.14525059509277344, 0.14563792419433594, 0.1459218292236328, 0.14657968139648436, 0.14564877319335937, 0.14613372802734376, 0.14539984130859376, 0.14595269775390626, 0.1453895721435547, 0.14481593322753905, 0.14597158813476563, 0.14618800354003905, 0.14526016235351563, 0.14512783813476562, 0.1450314178466797, 0.14529869079589844, 0.14538595581054686, 0.14591004943847657, 0.14566371154785157, 0.1461721954345703, 0.14484480285644533, 0.14609330749511718, 0.1451763458251953, 0.14611935424804687, 0.14547715759277344, 0.1457610168457031, 0.1453912353515625, 0.14543891906738282, 0.14683538818359376, 0.14569856262207032, 0.14866896057128906, 0.14678012084960937, 0.14914924621582032, 0.14662477111816405, 0.14591226196289062, 0.1461890869140625, 0.14801834106445313]",tokens/s,6.876500439088225,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,3996.745728,2050.883584,0.0,1648.361472,1630.36416,s,1,12.222203125,12.222203125,0.0,12.222203125,12.222203125,12.222203125,12.222203125,[12.222203125],,kWh,0.00014855253883747535,1.6379204986622288e-05,4.710587101799946e-05,0.0002120376148420971,,MB,2429.390848,2172.5184,0.0,1749.024768,1728.590848,s,10,0.7309898834228516,0.07309898834228516,0.0006633127918262002,0.07283580780029297,0.0740219123840332,0.0741707805633545,0.07428987510681152,"[0.07252937316894531, 0.07255404663085938, 0.07301602935791016, 0.07243856048583984, 0.07258956909179687, 0.07398883056640625, 0.07269078063964844, 0.07388220977783203, 0.07431964874267578, 0.0729808349609375]",tokens/s,3502.10045043692,kWh,2.159670295373686e-06,2.3817354866313185e-07,1.0587038698382952e-06,3.4565477138751134e-06,tokens/kWh,74062336.52507578,MB,2438.53312,2214.46144,0.0,1790.967808,1714.824192,s,10,46.34736376953124,4.634736376953126,0.018029384581929295,4.63290673828125,4.650301904296875,4.661827221679688,4.6710474755859375,"[4.6225986328125, 4.63529345703125, 4.64453271484375, 4.624888671875, 4.64477392578125, 4.63052001953125, 4.64774072265625, 4.6733525390625, 4.6054326171875, 4.61823046875]",tokens/s,13.59300613369863,kWh,0.0001369344961717078,1.5104174966421749e-05,4.9401392053760716e-05,0.00020144006319189022,tokens/kWh,312748.11475801957,,s,630,46.34507415008543,0.0735636097620404,0.0008376199272574177,0.07336038208007813,0.0743756935119629,0.07483492813110351,0.0766151721954346,"[0.07308493041992188, 0.07316841888427734, 0.07317692565917969, 0.0730621795654297, 0.07282707214355469, 0.07327728271484375, 0.07309385681152344, 0.072951904296875, 0.07260774230957032, 0.07273369598388672, 0.07287910461425781, 0.0737259521484375, 0.07289241790771485, 0.07340425872802735, 0.07308303833007812, 0.07329177856445312, 0.07346790313720702, 0.0726671371459961, 0.0730439682006836, 0.07302896118164062, 0.07260022735595703, 0.072764892578125, 0.07262876892089844, 0.07273843383789062, 0.07351891326904297, 0.07276191711425781, 0.072689697265625, 0.07300233459472656, 0.07321459197998047, 0.07389183807373047, 0.07380738830566407, 0.07330220794677735, 0.07314575958251954, 0.07310015869140625, 0.0729249267578125, 0.072780029296875, 0.07256092834472656, 0.07419055938720703, 0.07503644561767578, 0.07345174407958985, 0.07412310028076172, 0.0733279037475586, 0.07329062652587891, 0.07406745910644531, 0.0731668472290039, 0.07315023803710938, 0.07327145385742187, 0.07283769226074219, 0.07290879821777344, 0.08307068634033203, 0.07364841461181641, 0.0731495361328125, 0.0735006103515625, 0.07311663818359375, 0.07310537719726562, 0.07360105895996094, 0.07333625793457031, 0.07359923553466798, 0.07334745788574219, 0.07329593658447266, 0.07327117156982423, 0.07348659515380859, 0.07316687774658204, 0.07328128051757812, 0.07394303894042968, 0.07333273315429688, 0.07371708679199218, 0.07319618988037109, 0.07334297943115234, 0.07403110504150391, 0.07336140441894531, 0.07330515289306641, 0.07366342163085937, 0.0734764175415039, 0.07335084533691406, 0.07309043121337891, 0.07311564636230469, 0.0738004150390625, 0.0744078369140625, 0.07311164855957031, 0.0729742431640625, 0.07305216217041016, 0.07304838562011719, 0.07382598114013672, 0.07305625915527343, 0.07301119995117188, 0.0732873306274414, 0.07355427551269532, 0.07308844757080078, 0.073060546875, 0.0734082260131836, 0.07374095916748047, 0.07301728057861329, 0.07291091156005859, 0.07291494750976563, 0.07310851287841796, 0.07294377899169922, 0.07442515563964844, 0.07361945343017579, 0.07339609527587891, 0.07298880004882813, 0.07296422576904296, 0.07295577239990235, 0.07453199768066407, 0.07397462463378907, 0.0739051513671875, 0.07394972991943359, 0.0739078369140625, 0.07363056182861329, 0.07328880310058594, 0.07342991638183594, 0.07465369415283203, 0.07438358306884765, 0.0738854751586914, 0.0734669418334961, 0.0778741455078125, 0.07352243041992187, 0.07299964904785156, 0.07282688140869141, 0.07370956420898438, 0.07432396697998046, 0.07360307312011719, 0.07291693115234375, 0.07395651245117188, 0.07462310028076172, 0.07381890869140625, 0.07343718719482421, 0.0740679702758789, 0.074176513671875, 0.07365964508056641, 0.076740478515625, 0.07434124755859375, 0.07343647766113282, 0.07340131378173828, 0.07328899383544922, 0.07315500640869141, 0.0734576644897461, 0.07425228881835938, 0.0738887710571289, 0.07381858825683593, 0.0742364501953125, 0.07364982604980469, 0.07341091156005859, 0.07327903747558594, 0.07318790435791016, 0.07378284454345703, 0.07341289520263672, 0.07348226928710938, 0.073276611328125, 0.07324550628662109, 0.0740244140625, 0.0733655014038086, 0.07368144226074219, 0.0740185317993164, 0.07361305236816407, 0.07342678070068359, 0.07376675415039062, 0.07382694244384766, 0.07378761291503906, 0.0731830062866211, 0.07435286712646484, 0.07528431701660156, 0.07480902099609375, 0.07448214721679687, 0.07417241668701172, 0.07368000030517578, 0.07394393920898437, 0.07318077087402344, 0.07307100677490234, 0.07294950103759766, 0.07271759796142578, 0.07401491546630859, 0.07452957153320312, 0.07485612487792968, 0.07309139251708985, 0.0730769271850586, 0.07292704010009765, 0.07318016052246094, 0.07308943939208984, 0.0732903060913086, 0.07365350341796875, 0.07326348876953125, 0.07305792236328125, 0.07554102325439453, 0.07329830169677734, 0.0735713882446289, 0.07369219207763672, 0.07360896301269532, 0.07317302703857421, 0.07310617828369141, 0.07290879821777344, 0.07320518493652343, 0.07324668884277344, 0.07336844635009766, 0.07301430511474609, 0.07306301116943359, 0.07282201385498047, 0.07282367706298828, 0.07282233428955077, 0.07266550445556641, 0.07379289245605469, 0.07303641510009766, 0.07295116424560547, 0.07328364562988281, 0.07373804473876953, 0.07329663848876954, 0.07354374694824219, 0.07468844604492188, 0.07335935974121094, 0.07301734161376953, 0.07296640014648438, 0.07380127716064454, 0.07327353668212891, 0.07285459136962891, 0.07286383819580078, 0.07267798614501954, 0.07278575897216796, 0.0728123550415039, 0.07299091339111329, 0.07354819488525391, 0.07344496154785156, 0.0776087646484375, 0.0740890884399414, 0.07351904296875, 0.07333455657958984, 0.07343743896484375, 0.07330374145507812, 0.07334345245361328, 0.07280569458007813, 0.07286557006835938, 0.07300729370117187, 0.07347052764892578, 0.07314361572265625, 0.07425504302978515, 0.07336140441894531, 0.07362969970703125, 0.07358252716064453, 0.07609315490722657, 0.07327305603027344, 0.07331494140625, 0.07376895904541016, 0.07311167907714844, 0.0728344955444336, 0.07281472015380859, 0.07335356903076172, 0.07302754974365235, 0.07291935729980469, 0.07336723327636718, 0.0730245132446289, 0.0750540771484375, 0.07454524993896484, 0.07366851043701172, 0.07356175994873047, 0.07339014434814453, 0.07305267333984375, 0.07306012725830079, 0.07306598663330079, 0.07352582550048828, 0.07318431854248048, 0.07370432281494141, 0.07310131072998047, 0.07303167724609375, 0.07335743713378906, 0.07355369567871094, 0.07292486572265625, 0.07299727630615234, 0.07330220794677735, 0.07314617919921874, 0.07283686065673828, 0.07327311706542969, 0.07336908721923828, 0.07297904205322266, 0.07305980682373046, 0.07356073760986329, 0.07305651092529297, 0.07297023773193359, 0.07300710296630859, 0.0736911392211914, 0.07341241455078125, 0.07330601501464844, 0.07364867401123047, 0.07357933044433594, 0.07345667266845703, 0.07348828887939453, 0.07388566589355469, 0.07354780578613282, 0.07346995544433593, 0.07415193939208985, 0.0735005111694336, 0.07363308715820313, 0.0737628173828125, 0.07391439819335938, 0.07403142547607422, 0.0741095962524414, 0.07393049621582032, 0.0737959976196289, 0.07384441375732421, 0.07427072143554687, 0.07383245086669922, 0.0738016357421875, 0.0742646713256836, 0.07385849761962891, 0.07412083435058593, 0.07422457885742187, 0.07418470764160157, 0.0749521255493164, 0.07605225372314453, 0.07456371307373047, 0.0742446060180664, 0.07392387390136719, 0.0739521255493164, 0.07344297790527343, 0.07425692749023438, 0.07592530822753907, 0.07630838775634766, 0.07580441284179687, 0.0752498550415039, 0.07523580932617188, 0.07396355438232421, 0.07350704193115234, 0.0730439682006836, 0.07295292663574218, 0.07284159851074219, 0.07276297760009766, 0.07330912017822265, 0.07285759735107422, 0.07321600341796874, 0.07281664276123047, 0.07332006072998047, 0.07341683197021484, 0.07565721893310547, 0.07362924957275391, 0.07364832305908203, 0.07331235504150391, 0.07341471862792968, 0.07300540924072266, 0.07290879821777344, 0.07352268981933593, 0.07291110229492187, 0.07294377899169922, 0.07305430603027344, 0.07343513488769532, 0.07326464080810546, 0.07312553405761718, 0.07361212921142578, 0.074067138671875, 0.0738384017944336, 0.07344640350341797, 0.07308019256591797, 0.07330265808105468, 0.07294290924072265, 0.07319638061523437, 0.07280377960205078, 0.07299116516113281, 0.0734856948852539, 0.0734901123046875, 0.07329065704345702, 0.07342880249023437, 0.07314656066894532, 0.07315561676025391, 0.07333372497558593, 0.07333641815185547, 0.07387308502197265, 0.0729974365234375, 0.07326326751708985, 0.07293337249755859, 0.07301734161376953, 0.07288390350341797, 0.07341497802734374, 0.07327260589599609, 0.07328947448730469, 0.07557401275634766, 0.0738936996459961, 0.07379363250732422, 0.07471135711669921, 0.0740043487548828, 0.0739902114868164, 0.07333312225341797, 0.07297023773193359, 0.07322819519042968, 0.07339427185058593, 0.07326662445068359, 0.07307730865478515, 0.07872921752929687, 0.07315763092041015, 0.07307917022705078, 0.07367334747314454, 0.07408640289306641, 0.07361507415771484, 0.0731938247680664, 0.07294528198242188, 0.07442982482910156, 0.07330870056152344, 0.0730054702758789, 0.07326873779296875, 0.07304246520996094, 0.07305197143554687, 0.07305615997314453, 0.0729420166015625, 0.0729250259399414, 0.07294745635986329, 0.07315478515625, 0.07359693145751953, 0.07292716979980468, 0.07288864135742187, 0.07303270721435547, 0.07308982086181641, 0.07374845123291016, 0.07380489349365234, 0.07376169586181641, 0.07413673400878906, 0.07406400299072266, 0.07345785522460938, 0.07348902130126952, 0.07475833892822266, 0.07457974243164063, 0.0743443832397461, 0.07349247741699219, 0.07406179046630859, 0.07355171203613281, 0.07355996704101563, 0.07376310729980469, 0.07389389038085938, 0.074136962890625, 0.07420582580566407, 0.07399833679199219, 0.07378739166259765, 0.07395327758789062, 0.07368653106689453, 0.07458662414550782, 0.0737113265991211, 0.0736464614868164, 0.07374428558349609, 0.07360717010498047, 0.07353068542480469, 0.07369942474365235, 0.07383715057373047, 0.07385001373291016, 0.0738578872680664, 0.07579456329345703, 0.07816998291015625, 0.07369548797607423, 0.07528243255615234, 0.07417180633544922, 0.0743351058959961, 0.07436492919921875, 0.07401398468017578, 0.07403171539306641, 0.07411724853515625, 0.07488451385498048, 0.07528012847900391, 0.07396028900146484, 0.07412895965576172, 0.07417900848388671, 0.07388569641113281, 0.07374028778076172, 0.07427072143554687, 0.07382015991210937, 0.07450367736816406, 0.07466381072998046, 0.07433484649658204, 0.07426662445068359, 0.07422512054443359, 0.074032958984375, 0.07377737426757812, 0.07392092895507812, 0.07368883514404297, 0.07448611450195312, 0.0737303009033203, 0.07372774505615234, 0.07426457977294922, 0.07465350341796875, 0.07394707489013672, 0.07384925079345703, 0.07365567779541016, 0.07387184143066407, 0.07379100799560546, 0.07381635284423828, 0.07427657318115234, 0.07447113800048828, 0.07428316497802734, 0.07389244842529297, 0.07348982238769532, 0.07444950103759766, 0.07489494323730468, 0.07437481689453125, 0.07442924499511719, 0.07442127990722656, 0.07438419342041015, 0.07471932983398437, 0.07418675231933594, 0.07364975738525391, 0.07400595092773438, 0.07513916778564453, 0.07553852844238282, 0.075337890625, 0.07459241485595704, 0.07436540985107422, 0.07473474884033203, 0.07359555053710938, 0.07307667541503907, 0.07329714965820312, 0.07315763092041015, 0.07302963256835937, 0.07387200164794921, 0.0738466567993164, 0.07403142547607422, 0.07340934753417969, 0.07315763092041015, 0.07289059448242187, 0.07299254608154297, 0.07294493103027344, 0.07327760314941406, 0.07281107330322266, 0.07282252502441407, 0.0728663330078125, 0.07519203186035156, 0.07313967895507813, 0.07294032287597656, 0.07280000305175781, 0.07306034851074218, 0.07275068664550781, 0.07270877075195313, 0.07253971099853515, 0.07460598754882812, 0.07314070129394531, 0.0730811538696289, 0.07305622100830078, 0.07271017456054688, 0.07296205139160156, 0.07273680114746094, 0.07261590576171875, 0.0730021743774414, 0.07516630554199219, 0.07301142120361329, 0.07273903656005859, 0.07276470184326173, 0.0727407989501953, 0.07289299011230468, 0.07352118682861328, 0.07321212768554687, 0.07304370880126954, 0.07292108917236328, 0.07309011077880859, 0.07275820922851563, 0.07289791870117188, 0.0729645767211914, 0.0728741455078125, 0.07314022064208985, 0.07271542358398438, 0.07273763275146485, 0.07307577514648438, 0.07287417602539062, 0.07296896362304688, 0.07333020782470703, 0.07353187561035156, 0.07283213043212891, 0.07289036560058594, 0.07296851348876954, 0.07280697631835938, 0.07317094421386719, 0.07293714904785156, 0.07278623962402343, 0.07272447967529297, 0.07316838073730468, 0.07295231628417968, 0.07306649780273437, 0.07356275177001953, 0.0729815673828125, 0.07290361785888672, 0.07275110626220703, 0.07262342071533204, 0.07693596649169922, 0.07402076721191406, 0.07396332550048829, 0.07321619415283204, 0.07282688140869141, 0.07284288024902344, 0.07328396606445313, 0.07291085052490234, 0.07289788818359375, 0.0733023681640625, 0.07325052642822266, 0.07358326721191406, 0.07467616271972656, 0.07313731384277344, 0.07355068969726562, 0.07392870330810547, 0.07350851440429687, 0.07356617736816407, 0.07323836517333984, 0.0732615966796875, 0.07319369506835938, 0.07297789001464844, 0.07297055816650391, 0.07332864379882813, 0.07320166778564453, 0.07308092498779296, 0.07305820465087891, 0.0729599380493164, 0.07308294677734375, 0.07370934295654297, 0.07300246429443359, 0.07305120086669922, 0.07284909057617188, 0.07334012603759765, 0.07276419067382812, 0.0729886703491211, 0.07317491149902344, 0.07357612609863282, 0.07344608306884766, 0.07311539459228515, 0.07352044677734375, 0.07399520111083985, 0.07350351715087891, 0.07356006622314454, 0.07312483215332032, 0.0728616943359375, 0.07296348571777343, 0.07298675537109375, 0.0728969268798828, 0.072806396484375, 0.07328678131103515, 0.07346431732177734, 0.07327385711669922, 0.07317919921875, 0.07303974151611328, 0.073244384765625, 0.07318761444091797, 0.07355391693115235]",tokens/s,13.59367767887881,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,4847.980544,2562.588672,0.0,2160.06656,2133.479936,s,1,12.5006220703125,12.5006220703125,0.0,12.5006220703125,12.5006220703125,12.5006220703125,12.5006220703125,[12.5006220703125],,kWh,0.00015488639145005436,1.7077662330590946e-05,4.908809482601084e-05,0.00022105214860665615,,MB,4899.971072,2677.932032,0.0,2254.4384,2227.234304,s,10,0.831797737121582,0.0831797737121582,0.0013457676201269018,0.08276248168945313,0.08385467987060546,0.08546570816040039,0.08675453079223633,"[0.08293574523925781, 0.08277091217041016, 0.08306630706787109, 0.08224153900146484, 0.0822701416015625, 0.08349667358398437, 0.082650146484375, 0.08253548431396485, 0.08707673645019531, 0.08275405120849609]",tokens/s,3077.671272416326,kWh,2.3952290261957366e-06,2.6409440592072166e-07,1.0964193652952882e-06,3.7557427974117465e-06,tokens/kWh,68162282.08609526,MB,4904.218624,2721.972224,0.0,2298.478592,2219.490304,s,10,52.3375791015625,5.23375791015625,0.013458814410458266,5.2327905273437505,5.250342578125,5.25665224609375,5.26169998046875,"[5.233552734375, 5.2629619140625, 5.23819482421875, 5.2225361328125, 5.2489404296875, 5.22888720703125, 5.2320283203125, 5.215173828125, 5.21941455078125, 5.23588916015625]",tokens/s,12.037239987303728,kWh,0.0001524742443775505,1.681824501975418e-05,5.5194042333703695e-05,0.0002244865317310084,tokens/kWh,280640.44428059465,,s,630,52.335323524475086,0.08307194210234142,0.0007875933220576843,0.08289139175415039,0.0837305305480957,0.0841595703125,0.08648801834106445,"[0.08361369323730469, 0.08384307098388671, 0.08306687927246094, 0.08361606597900391, 0.08326691436767578, 0.082808349609375, 0.08470816040039063, 0.08338432312011719, 0.0835006103515625, 0.08313286590576172, 0.08286617279052734, 0.08265840148925781, 0.08309347534179687, 0.08353443145751953, 0.08297289276123047, 0.08403123474121094, 0.08257100677490234, 0.08253298950195312, 0.08268595123291016, 0.08301158142089844, 0.08327782440185547, 0.0836167984008789, 0.0831968994140625, 0.08292867279052735, 0.08335036468505859, 0.08382681274414062, 0.0831488037109375, 0.08331433868408203, 0.08292182159423828, 0.08286978912353515, 0.0827642593383789, 0.08282064056396485, 0.0828359375, 0.0828579864501953, 0.08281088256835938, 0.0826429443359375, 0.08275939178466797, 0.08339894104003906, 0.08314275360107422, 0.0830683822631836, 0.08490227508544922, 0.08345398712158203, 0.08297625732421875, 0.08259584045410157, 0.0826844482421875, 0.08284159851074219, 0.08300710296630859, 0.08269452667236328, 0.08256626892089844, 0.08376438140869141, 0.08278717041015625, 0.08326563262939453, 0.08261507415771484, 0.08223744201660156, 0.08244429016113282, 0.08243746948242188, 0.08252278137207031, 0.08280796813964844, 0.0828486099243164, 0.08267689514160156, 0.08267430114746094, 0.08321250915527344, 0.08285593414306641, 0.08278227233886719, 0.08254704284667969, 0.08248076629638672, 0.0822701416015625, 0.08579724884033203, 0.08759526062011719, 0.08299075317382812, 0.08280089569091797, 0.08400601959228515, 0.08368780517578125, 0.08466886138916016, 0.08333894348144531, 0.0826798095703125, 0.08316259002685547, 0.08758358764648437, 0.08342044830322265, 0.08258745574951172, 0.08347440338134765, 0.0831497573852539, 0.08313855743408204, 0.08321430206298829, 0.08332288360595703, 0.08303443145751953, 0.08324412536621094, 0.08352358245849609, 0.08292607879638672, 0.08354745483398437, 0.08348140716552735, 0.08326143646240235, 0.08334950256347656, 0.08372991943359374, 0.08363878631591797, 0.08332662200927735, 0.08415267181396484, 0.08331855773925781, 0.08323830413818359, 0.08355490875244141, 0.08306451416015626, 0.08294767761230469, 0.0834439697265625, 0.08334162902832032, 0.08379199981689453, 0.08350131225585937, 0.08320515441894531, 0.08342118072509766, 0.08364134216308594, 0.08338751983642578, 0.08346438598632812, 0.08356275177001952, 0.08386396789550782, 0.0843644790649414, 0.08392991638183593, 0.08373603057861329, 0.08337257385253906, 0.08312406158447265, 0.08285609436035156, 0.08344371032714844, 0.08349686431884766, 0.08389020538330078, 0.0828797149658203, 0.0830917739868164, 0.08307103729248047, 0.08479792022705078, 0.08295423889160156, 0.08420934295654296, 0.0833088607788086, 0.08312982177734375, 0.08306294250488282, 0.08331926727294922, 0.08306473541259765, 0.08319181060791016, 0.08306483459472656, 0.08394137573242187, 0.08269004821777344, 0.08237452697753907, 0.0824136962890625, 0.08273101043701171, 0.08265670776367187, 0.08241824340820313, 0.08278569793701172, 0.08416521453857422, 0.08341709136962891, 0.0828743667602539, 0.08265321350097657, 0.08773155212402343, 0.08339068603515624, 0.08279695892333984, 0.08281526184082032, 0.08249110412597656, 0.08265523529052735, 0.08283340454101562, 0.08311302185058594, 0.0836434555053711, 0.08316726684570312, 0.08364630126953125, 0.08239923095703125, 0.08234803009033204, 0.08279840087890625, 0.08280873870849609, 0.08267110443115235, 0.08268252563476562, 0.08251404571533204, 0.08236441802978516, 0.08276992034912109, 0.08317337799072265, 0.08313980865478515, 0.08332061004638672, 0.08260224151611328, 0.0828485107421875, 0.0832850570678711, 0.08292150115966797, 0.08273603057861328, 0.0825241928100586, 0.08243942260742188, 0.08251222229003906, 0.08266381072998047, 0.08334912109375, 0.0842735366821289, 0.08328598022460937, 0.08275068664550782, 0.08273388671875, 0.08272022247314453, 0.08300956726074218, 0.08590386962890625, 0.08627311706542969, 0.08345283508300781, 0.08243599700927734, 0.08347657775878906, 0.08417279815673828, 0.08344166564941406, 0.08296857452392578, 0.08248934173583984, 0.08535858917236328, 0.08318361663818359, 0.08317747497558593, 0.08289075469970703, 0.08272013092041015, 0.08265382385253907, 0.08252006530761719, 0.08306278228759766, 0.08326121520996094, 0.08339683532714844, 0.08261046600341797, 0.08259903717041016, 0.08253446197509766, 0.08328451538085938, 0.08312403106689453, 0.08253065490722657, 0.08321622467041015, 0.08235826873779296, 0.08233164978027344, 0.08303555297851563, 0.0827616958618164, 0.08321920013427735, 0.08271858978271485, 0.08223270416259766, 0.08211519622802735, 0.08259174346923828, 0.08319817352294921, 0.08257218933105469, 0.08239807891845703, 0.0824791030883789, 0.08219052886962891, 0.08227632141113281, 0.08272061157226562, 0.08275888061523437, 0.08333808135986329, 0.08250969696044921, 0.08238495635986329, 0.08241561889648437, 0.08252825927734375, 0.08294796752929688, 0.08323209381103516, 0.08254953765869141, 0.08242160034179688, 0.08267382049560547, 0.0827959976196289, 0.08270489501953125, 0.08337206268310547, 0.08287641906738281, 0.08244403076171875, 0.08650300598144531, 0.08267756652832031, 0.08378176116943359, 0.08312825775146485, 0.08269417572021484, 0.08233036804199219, 0.08210636901855468, 0.08284502410888672, 0.08299132537841797, 0.08295613098144532, 0.0825223388671875, 0.08252934265136719, 0.08299616241455078, 0.08333229064941407, 0.08310157012939454, 0.08235104370117187, 0.08204492950439453, 0.08241350555419921, 0.08251932525634766, 0.08234806060791015, 0.082636962890625, 0.08271318054199218, 0.08259366607666016, 0.08219251251220704, 0.08220854187011718, 0.08236838531494141, 0.08328841400146485, 0.08287225341796875, 0.08270240020751952, 0.08278015899658203, 0.08333023834228516, 0.08282723236083984, 0.08368418884277344, 0.08328316497802735, 0.0832356185913086, 0.08293170928955078, 0.08285794830322266, 0.08280194854736328, 0.08362060546875, 0.0832216033935547, 0.08303913879394531, 0.08349247741699219, 0.08355059051513672, 0.0829071044921875, 0.08385881805419922, 0.08347484588623047, 0.0835647964477539, 0.08346543884277344, 0.08365033721923829, 0.08316207885742187, 0.08352976226806641, 0.08335993957519532, 0.08339641571044921, 0.08341299438476563, 0.08390243530273438, 0.083187744140625, 0.0834244155883789, 0.08313954925537109, 0.08324288177490234, 0.08362188720703125, 0.08402947235107422, 0.08514665222167969, 0.08894134521484375, 0.08393539428710937, 0.0832020492553711, 0.08324095916748046, 0.08450045013427734, 0.08290512084960938, 0.08273715209960937, 0.08364236450195313, 0.08777728271484375, 0.08236198425292969, 0.08243785858154297, 0.0821233901977539, 0.08231263732910156, 0.08257379150390624, 0.08248953247070312, 0.08230092620849609, 0.08277196502685547, 0.08313037109375, 0.08286412811279296, 0.08631295776367187, 0.08314179229736328, 0.08302400207519531, 0.08277680206298828, 0.08301302337646485, 0.08244898986816407, 0.08410297393798828, 0.08331001281738282, 0.0829032974243164, 0.08267616271972657, 0.08284121704101563, 0.08294230651855469, 0.08243619537353515, 0.08298905944824218, 0.08347183990478516, 0.08309129333496093, 0.08308112335205078, 0.08254291534423829, 0.083083740234375, 0.08336383819580079, 0.08323638153076172, 0.08251468658447265, 0.08296243286132812, 0.08271603393554687, 0.0824876480102539, 0.08267161560058593, 0.0831710433959961, 0.08288678741455079, 0.0830643539428711, 0.08248297882080079, 0.0827257308959961, 0.08335769653320313, 0.08328498840332031, 0.08258457946777344, 0.08287782287597656, 0.08305113220214844, 0.08292684936523438, 0.085178466796875, 0.08368563079833985, 0.0836728973388672, 0.08355900573730468, 0.0831033935546875, 0.08277974700927734, 0.0827844467163086, 0.08322515106201171, 0.08259693145751953, 0.08267052459716796, 0.08254054260253907, 0.08275660705566407, 0.08229993438720704, 0.08286431884765624, 0.08379984283447266, 0.08320358276367187, 0.08278630065917969, 0.08309683227539062, 0.08324582672119141, 0.0828743667602539, 0.0827507553100586, 0.08252694702148437, 0.0824336929321289, 0.08240160369873047, 0.08251395416259766, 0.08324710083007812, 0.08312185668945313, 0.08258502197265626, 0.08268275451660156, 0.08257536315917968, 0.08350486755371093, 0.08350918579101563, 0.08403702545166016, 0.08263980865478515, 0.0826382064819336, 0.08251455688476562, 0.08248320007324218, 0.0828694076538086, 0.0835810546875, 0.08286486053466798, 0.08342301177978516, 0.0836036834716797, 0.08302365112304687, 0.08349005126953125, 0.08321638488769531, 0.08273814392089844, 0.08273715209960937, 0.08344303894042969, 0.08236470031738281, 0.08246089935302735, 0.08290895843505859, 0.08272319793701172, 0.08266278076171875, 0.08277439880371094, 0.08262477111816406, 0.08314031982421875, 0.08285369873046874, 0.08259391784667969, 0.0823315200805664, 0.08248716735839844, 0.08243389129638672, 0.08594662475585937, 0.08390415954589844, 0.08724355316162109, 0.08277565002441406, 0.08269580841064453, 0.08256502532958984, 0.083808349609375, 0.08277897644042968, 0.08275497436523438, 0.08418099212646485, 0.08469564819335937, 0.08257331085205079, 0.08310723114013673, 0.08278486633300781, 0.0829144287109375, 0.08261106872558593, 0.08260198211669922, 0.0823581771850586, 0.08290847778320312, 0.08249517059326172, 0.08250572967529297, 0.08286412811279296, 0.08257536315917968, 0.08262028503417969, 0.08377561950683594, 0.08344371032714844, 0.08389017486572266, 0.08298086547851563, 0.08344166564941406, 0.08254268646240234, 0.0828497314453125, 0.08312611389160156, 0.08281110382080079, 0.08261440277099609, 0.08300316619873047, 0.08285305786132813, 0.08261622619628907, 0.08282927703857422, 0.08313139343261719, 0.0827391357421875, 0.08304640197753907, 0.08241670227050782, 0.0826744613647461, 0.08230009460449218, 0.08230937957763672, 0.08235491180419922, 0.08210022735595703, 0.08226780700683593, 0.08212009429931641, 0.08255760192871094, 0.08291280364990235, 0.08252256011962891, 0.08275385284423828, 0.08262860870361328, 0.08244751739501953, 0.08254924774169922, 0.08234649658203125, 0.08311100769042969, 0.08477362823486329, 0.08265862274169922, 0.08349488067626953, 0.08308911895751953, 0.08289382171630859, 0.08265628814697265, 0.08254752349853516, 0.08240144348144532, 0.08218624114990235, 0.08283273315429687, 0.08233395385742187, 0.08308777618408203, 0.08255078125, 0.08249549102783203, 0.08307218933105469, 0.08272978973388671, 0.08307414245605468, 0.08314755249023438, 0.0823031005859375, 0.08281394958496094, 0.08223548889160157, 0.082681884765625, 0.08286041259765625, 0.08266960144042969, 0.08276361846923828, 0.08222160339355469, 0.08239718627929687, 0.08218418884277344, 0.08268524932861328, 0.08257606506347656, 0.08338829040527344, 0.08243007659912109, 0.0824463348388672, 0.08226201629638671, 0.08238694763183593, 0.08261561584472656, 0.08215010833740234, 0.08221858978271485, 0.0828542709350586, 0.08279212951660156, 0.08285337829589844, 0.08323974609375, 0.08269945526123047, 0.08310662078857421, 0.0826060791015625, 0.08241356658935547, 0.08276787567138671, 0.0829044189453125, 0.08289756774902343, 0.08262355041503906, 0.08221177673339844, 0.08244223785400391, 0.08261427307128906, 0.08381238555908203, 0.08340067291259766, 0.0826588134765625, 0.08261888122558594, 0.0821739501953125, 0.08226998138427734, 0.0826431655883789, 0.08334499359130859, 0.08365219116210937, 0.08396854400634765, 0.08594182586669921, 0.082662109375, 0.08311398315429687, 0.08426652526855469, 0.0831943359375, 0.08271600341796875, 0.08245724487304687, 0.08585842895507813, 0.08310963439941406, 0.08276185607910157, 0.08289202880859375, 0.08291590118408203, 0.08238495635986329, 0.0826266860961914, 0.0825159683227539, 0.08326560211181641, 0.08284339141845704, 0.08230883026123047, 0.08237308502197266, 0.08229068756103515, 0.08282726287841796, 0.08243116760253906, 0.08248607635498047, 0.08229478454589843, 0.08249753570556641, 0.08292147064208985, 0.08303158569335937, 0.0827518081665039, 0.08264924621582032, 0.0828436508178711, 0.08270428466796875, 0.08269993591308594, 0.08350553894042968, 0.08284550476074219, 0.08256924438476562, 0.08246006774902344, 0.08260095977783204, 0.08251123046875, 0.08255091094970703, 0.08289488220214844, 0.08305062103271485, 0.08245059204101562, 0.08248320007324218, 0.08236441802978516, 0.0827471694946289, 0.0822801284790039, 0.082727294921875, 0.08236224365234375, 0.08270230102539063, 0.08285830688476563, 0.08270233917236328, 0.08250572967529297, 0.08291327667236328, 0.08248761749267577, 0.08389606475830078, 0.08645132446289062, 0.08381427001953125, 0.08361670684814453, 0.08409907531738281, 0.08391270446777344, 0.08351884460449219, 0.08350498962402343, 0.08383702087402344, 0.08323347473144531, 0.08362598419189453, 0.0833064956665039, 0.08326918029785156, 0.08305401611328125, 0.08308838653564453, 0.08367273712158203, 0.08367862701416015, 0.08298745727539063, 0.08306108856201172, 0.08348185729980469, 0.08319398498535156, 0.08325177764892579, 0.08360777282714844, 0.08361779022216796, 0.08331014251708985, 0.08300694274902344, 0.08303715515136718, 0.08321135711669922, 0.08333404541015625, 0.08330585479736329, 0.08343154907226563, 0.08324352264404297]",tokens/s,12.037758775015018,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,6541.090816,3722.313728,0.0,3319.791616,3239.455232,s,1,17.685328125,17.685328125,0.0,17.685328125,17.685328125,17.685328125,17.685328125,[17.685328125],,kWh,0.0002981588049124942,3.288204378898694e-05,9.940507952399535e-05,0.0004304459282254765,,MB,1956.745216,3986.55488,0.0,3563.061248,3472.737792,s,10,0.7374600601196288,0.07374600601196288,0.0015814739250583445,0.07334425354003907,0.07500089111328125,0.0765439826965332,0.07777845596313476,"[0.07328067016601562, 0.07294729614257812, 0.0746579818725586, 0.0734078369140625, 0.07387996673583984, 0.07230912017822265, 0.07280774688720704, 0.07808707427978516, 0.07258911895751953, 0.07349324798583984]",tokens/s,3471.3744356334682,kWh,2.1838487455843886e-06,2.408379451790418e-07,1.3745678492386982e-06,3.7992545400021286e-06,tokens/kWh,67381639.55707389,MB,1965.625344,3986.55488,0.0,3563.061248,3469.309952,s,10,46.22544384765625,4.622544384765624,0.030358017852691887,4.61222216796875,4.672084375,4.67643671875,4.67991859375,"[4.6807890625, 4.6098408203125, 4.6711171875, 4.6312509765625, 4.60647998046875, 4.58633251953125, 4.633548828125, 4.591455078125, 4.614603515625, 4.60002587890625]",tokens/s,13.628857779630444,kWh,0.00013792634157066695,1.521366588059366e-05,5.7739618330960724e-05,0.00021087962578222131,tokens/kWh,298748.6333319895,,s,630,46.22308232116699,0.0733699719383603,0.0008657507969964541,0.07321878433227538,0.07420052871704103,0.07479541130065918,0.07658003715515138,"[0.07349350738525391, 0.07366950225830078, 0.07413145446777344, 0.073478271484375, 0.07362889862060547, 0.0734293441772461, 0.07374601745605469, 0.07491254425048828, 0.07410838317871093, 0.0741013412475586, 0.0739675521850586, 0.07456774139404297, 0.07382374572753907, 0.07637430572509765, 0.07550947570800781, 0.07745782470703125, 0.07612588500976562, 0.07371603393554688, 0.07512882995605469, 0.07363388824462891, 0.07407615661621093, 0.07357440185546875, 0.07350067138671874, 0.07380732727050782, 0.07441462707519532, 0.07382425689697265, 0.07664409637451172, 0.07407609558105469, 0.0777830047607422, 0.07513721466064453, 0.07409232330322266, 0.07398998260498046, 0.07442076873779296, 0.0761363525390625, 0.07404895782470704, 0.07367334747314454, 0.07410691070556641, 0.07384063720703125, 0.07579853057861329, 0.07477452850341797, 0.07397990417480468, 0.07353958129882812, 0.07384848022460938, 0.0747441635131836, 0.07423571014404297, 0.07351513671875, 0.0733921890258789, 0.07394505310058594, 0.07337577819824219, 0.07336774444580078, 0.07431759643554688, 0.07457794952392578, 0.07423580932617188, 0.07370082855224609, 0.07391641235351562, 0.07335794830322266, 0.07423590087890625, 0.0736371841430664, 0.07448646545410156, 0.07374057769775391, 0.07349014282226562, 0.07411097717285156, 0.07408172607421876, 0.07288774108886718, 0.07349043273925782, 0.07311801910400391, 0.07276953887939454, 0.07298457336425782, 0.07328108978271484, 0.07333952331542969, 0.07280748748779296, 0.0729239044189453, 0.07338787078857421, 0.07358889770507812, 0.07308902740478515, 0.07298457336425782, 0.07308322906494141, 0.07359248352050782, 0.07300710296630859, 0.07310707092285157, 0.07335298919677734, 0.07330364990234375, 0.07301123046875, 0.07364297485351562, 0.07291200256347656, 0.07278192138671875, 0.07243446350097656, 0.07267721557617188, 0.07252320098876953, 0.07268854522705079, 0.07363932800292969, 0.07392681884765626, 0.07315245056152343, 0.07318489837646484, 0.07305721282958984, 0.07348812866210938, 0.07263337707519531, 0.07437548828125, 0.07331097412109375, 0.07270774078369141, 0.07248722839355469, 0.07313606262207031, 0.07518627166748047, 0.07346959686279297, 0.07296790313720704, 0.07369344329833985, 0.07296028900146484, 0.07278559875488282, 0.0724842529296875, 0.0724625244140625, 0.07239753723144532, 0.07272447967529297, 0.07327948760986328, 0.0729456329345703, 0.07306857299804688, 0.0731933135986328, 0.07315267181396484, 0.0733240966796875, 0.07352365112304687, 0.0736377944946289, 0.07318685150146484, 0.07339884948730468, 0.07334864044189453, 0.07321852874755859, 0.07347360229492188, 0.07382441711425781, 0.07411507415771484, 0.07417871856689454, 0.07445311737060546, 0.07355174255371094, 0.0739082260131836, 0.07421046447753907, 0.07498838043212891, 0.07384579467773437, 0.07375971221923829, 0.07361891174316407, 0.07361795043945313, 0.07347097778320312, 0.07349350738525391, 0.07433001708984376, 0.07409209442138671, 0.0740695343017578, 0.07354790496826172, 0.07386841583251953, 0.07361714935302735, 0.07404978942871093, 0.0736416015625, 0.07732774353027344, 0.07428173065185546, 0.07476838684082031, 0.07402086639404297, 0.07319116973876953, 0.07443084716796874, 0.07378460693359375, 0.0737286376953125, 0.07335065460205079, 0.07342086029052734, 0.0734044189453125, 0.07357215881347656, 0.07352716827392577, 0.07869718170166015, 0.07383606719970703, 0.07409471893310547, 0.07352764892578124, 0.07333683013916016, 0.07348633575439453, 0.07361126708984375, 0.07932006072998046, 0.07466291046142579, 0.07375667572021484, 0.07388588714599609, 0.07483372497558594, 0.07521075439453125, 0.07491609954833985, 0.07374368286132812, 0.07357075500488282, 0.07373619079589844, 0.0747930908203125, 0.07378438568115234, 0.07417734527587891, 0.07467008209228515, 0.07457382202148438, 0.07360495758056641, 0.07355359649658202, 0.07342707061767578, 0.0741085433959961, 0.07362019348144531, 0.0736358413696289, 0.07347187042236328, 0.07369126129150391, 0.07356195068359375, 0.07365708923339843, 0.07340195465087891, 0.07348675537109375, 0.07479730987548829, 0.0742581787109375, 0.07379484558105469, 0.07401136016845702, 0.07362668609619141, 0.07402969360351562, 0.07374470520019531, 0.07364569854736328, 0.07344745635986329, 0.07323878479003906, 0.07332243347167969, 0.07335247802734375, 0.07395011138916016, 0.07442406463623047, 0.07422547149658203, 0.0736014404296875, 0.07334706878662109, 0.07348429107666016, 0.07353548431396484, 0.07358483123779297, 0.07335711669921875, 0.07368902587890624, 0.07311955261230468, 0.07347225952148438, 0.07308697509765626, 0.07264665222167968, 0.07281664276123047, 0.07313104248046876, 0.07352828979492188, 0.07352114868164063, 0.07326934051513671, 0.07394841766357421, 0.07369385528564452, 0.07346182250976563, 0.07618476867675782, 0.0739315185546875, 0.0731250228881836, 0.07303433227539062, 0.07342924499511719, 0.07297564697265625, 0.073552734375, 0.07337152099609374, 0.07388774108886718, 0.07343711853027343, 0.0737109146118164, 0.07330892944335937, 0.07309747314453124, 0.07309436798095703, 0.07267340850830079, 0.07278966522216797, 0.07274540710449219, 0.07271475219726563, 0.07258029174804688, 0.07265753936767579, 0.0727956771850586, 0.07361788940429688, 0.0745134048461914, 0.07381484985351562, 0.07307158660888671, 0.0732938232421875, 0.07325606536865234, 0.07303667449951172, 0.07340758514404297, 0.07346495819091797, 0.07335708618164062, 0.07321600341796874, 0.0732541732788086, 0.07419942474365235, 0.0735665283203125, 0.07388915252685548, 0.07382819366455078, 0.0733130874633789, 0.0729722900390625, 0.07332659149169922, 0.0728309783935547, 0.07244185638427734, 0.07268704223632813, 0.07322844696044922, 0.0726470718383789, 0.07285555267333985, 0.07283430480957032, 0.07314921569824219, 0.07250713348388672, 0.07272463989257813, 0.07418476867675781, 0.0728084487915039, 0.07297948455810546, 0.07615177917480469, 0.0740495376586914, 0.0730943374633789, 0.07292396545410157, 0.07297216033935547, 0.07404761505126953, 0.07258956909179687, 0.07307564544677735, 0.07292368316650391, 0.07300080108642579, 0.07330348968505859, 0.07302553558349609, 0.07270217895507812, 0.07502313232421876, 0.07269757080078125, 0.07277974700927735, 0.07246060943603516, 0.07220633697509765, 0.07234355163574219, 0.07234566497802734, 0.07422354888916016, 0.07326934051513671, 0.0726932144165039, 0.07300262451171875, 0.07294774627685546, 0.07401913452148437, 0.07307500457763672, 0.07262799835205078, 0.07270432281494141, 0.07231084442138672, 0.07248470306396484, 0.07236764526367187, 0.07223081970214844, 0.07225939178466798, 0.07226348876953125, 0.074342529296875, 0.0721734390258789, 0.07234931182861327, 0.0732757797241211, 0.07303571319580078, 0.07300038146972657, 0.0729012451171875, 0.07256678771972656, 0.07271775817871094, 0.0726267547607422, 0.07263795471191406, 0.07254067230224609, 0.07279001617431641, 0.07319347381591797, 0.07248623657226562, 0.07235855865478516, 0.0729886703491211, 0.07303577423095703, 0.07248281860351563, 0.07268105316162109, 0.07306896209716797, 0.07232102203369141, 0.07237577819824219, 0.07246089935302734, 0.07253517150878906, 0.0726063995361328, 0.0732200927734375, 0.07268978881835937, 0.07229763031005859, 0.07225020599365234, 0.07274086761474609, 0.07271424102783203, 0.07298252868652344, 0.07328153228759765, 0.07275724792480469, 0.07243357086181641, 0.0724524154663086, 0.07223007965087891, 0.07269436645507812, 0.07268470764160156, 0.0729915542602539, 0.07272876739501953, 0.07285763549804687, 0.07279100799560546, 0.07334588623046875, 0.07282399749755859, 0.07300569915771485, 0.0731435546875, 0.07262448120117188, 0.07276399993896485, 0.07242150115966797, 0.07297654724121094, 0.07267504119873047, 0.07264851379394531, 0.07413539123535157, 0.0732040023803711, 0.07307504272460938, 0.07299238586425781, 0.07301538848876953, 0.07271654510498046, 0.07299251556396484, 0.07292534637451172, 0.07295804595947265, 0.0729599380493164, 0.07239398193359375, 0.07263452911376952, 0.0726779556274414, 0.07381529235839844, 0.07335420989990235, 0.07341852569580078, 0.07381724548339844, 0.07333964538574218, 0.07352738952636718, 0.07368089294433594, 0.07311360168457032, 0.07278112030029296, 0.07321903991699219, 0.07309085083007813, 0.0732544937133789, 0.07302588653564453, 0.07407001495361328, 0.07333238220214844, 0.0737938232421875, 0.07900128173828125, 0.0739691162109375, 0.07351593780517578, 0.0736727066040039, 0.07422566223144532, 0.07354895782470704, 0.07365923309326172, 0.07362969970703125, 0.0740126724243164, 0.07329958343505859, 0.07363827514648437, 0.07370252990722656, 0.07273526763916016, 0.07261164855957031, 0.07292726135253906, 0.07259391784667969, 0.07298265838623047, 0.07480716705322266, 0.07608729553222657, 0.07535616302490235, 0.07310336303710938, 0.07283916473388671, 0.0741928939819336, 0.0729623031616211, 0.07288751983642579, 0.07280284881591798, 0.07334889221191407, 0.07263257598876953, 0.07305622100830078, 0.07282688140869141, 0.07543612670898438, 0.07307225799560547, 0.07333494567871093, 0.07287324523925781, 0.07256352233886719, 0.07642320251464843, 0.07334092712402343, 0.07429494476318359, 0.07325321960449219, 0.07276678466796875, 0.07318937683105468, 0.073859619140625, 0.07327855682373047, 0.0731566390991211, 0.07294361877441406, 0.0732429428100586, 0.07299954986572266, 0.07275507354736328, 0.07277974700927735, 0.0727795181274414, 0.0727918701171875, 0.0728887710571289, 0.0726448974609375, 0.07267504119873047, 0.07324995422363281, 0.07286665344238281, 0.07276544189453125, 0.07291289520263672, 0.07275698852539063, 0.07316505432128906, 0.07327561950683593, 0.07283484649658203, 0.07269529724121093, 0.07262493133544921, 0.07269120025634766, 0.07242569732666015, 0.07301097869873047, 0.07284674835205078, 0.07279052734375, 0.07330668640136719, 0.07239449310302734, 0.07250125122070313, 0.07283478546142579, 0.07316508483886719, 0.07268515014648437, 0.0724005126953125, 0.0729053726196289, 0.07308710479736329, 0.07329353332519531, 0.07274489593505859, 0.0727801284790039, 0.07260934448242187, 0.0726611557006836, 0.0731017608642578, 0.07328342437744141, 0.07333888244628907, 0.07362355041503907, 0.07379929351806641, 0.07341094207763672, 0.07258937835693359, 0.07254962921142578, 0.07255657958984375, 0.07333135986328125, 0.07269785308837891, 0.072840576171875, 0.07286540985107422, 0.07247100830078125, 0.07261833953857422, 0.07251753234863281, 0.07249129486083984, 0.07269785308837891, 0.07281254577636719, 0.07293337249755859, 0.07295094299316406, 0.07250409698486328, 0.07314383697509766, 0.07352182769775391, 0.07292518615722657, 0.07307469177246094, 0.0727061767578125, 0.07247245025634766, 0.07256598663330079, 0.07238326263427734, 0.07298818969726563, 0.0727162857055664, 0.07343132781982421, 0.07284464263916016, 0.07286051177978516, 0.07301734161376953, 0.07317014312744141, 0.07318608093261719, 0.07316630554199219, 0.07335171508789062, 0.07286784362792968, 0.0727040023803711, 0.07297379302978516, 0.07313164520263672, 0.07357737731933593, 0.07392460632324219, 0.07307469177246094, 0.07270195007324219, 0.0728453140258789, 0.07290876770019532, 0.072604736328125, 0.07274185943603516, 0.07265494537353516, 0.07279827117919922, 0.07280009460449219, 0.07264361572265625, 0.07283999633789062, 0.07294124603271485, 0.07340873718261719, 0.07337395477294922, 0.07363174438476562, 0.07354064178466797, 0.07351805114746093, 0.07339788818359375, 0.0732204818725586, 0.07349664306640626, 0.07360626983642578, 0.07359366607666015, 0.07461682891845703, 0.07493222045898437, 0.07619993591308594, 0.07345101165771484, 0.07324928283691406, 0.07263254547119141, 0.0741138916015625, 0.07331116485595703, 0.07402063751220703, 0.07308921813964844, 0.072974365234375, 0.07275529479980469, 0.07296399688720703, 0.07295999908447266, 0.07341056060791015, 0.07597046661376954, 0.07267475128173828, 0.0730440673828125, 0.0728880615234375, 0.07416191864013671, 0.0733120346069336, 0.07329609680175782, 0.07284333038330078, 0.07276976013183593, 0.07396578979492187, 0.07384371185302735, 0.0732108154296875, 0.07309327697753906, 0.0729908447265625, 0.07303497314453125, 0.07332621002197266, 0.07299372863769531, 0.07371561431884766, 0.07325286102294921, 0.07332054138183594, 0.07302098846435547, 0.07274889373779297, 0.07293807983398437, 0.07333599853515625, 0.07299359893798828, 0.07273267364501954, 0.07290573120117187, 0.0724858856201172, 0.07259677124023438, 0.07276358032226563, 0.07308758544921876, 0.0727633285522461, 0.07244390106201172, 0.07268067169189453, 0.07255657958984375, 0.07242617797851562, 0.07263385772705078, 0.07315718078613281, 0.07266460418701172, 0.07257449340820313, 0.07317561340332031, 0.07312627410888672, 0.07337369537353515, 0.07264460754394532, 0.0725032958984375, 0.0730902099609375, 0.07357936096191406, 0.07405951690673829, 0.07326335906982422, 0.07264380645751953, 0.07282473754882812, 0.07275532531738281, 0.0732679672241211, 0.07343920135498047, 0.07418800354003906, 0.07303199768066407, 0.07264921569824219, 0.0725091552734375, 0.07256707000732422, 0.07255859375, 0.07247257232666016, 0.07316995239257812, 0.07240188598632813, 0.07257516479492188, 0.07336531066894532]",tokens/s,13.629554074794,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,15376.744448,9514.647552,0.0,9112.12544,9086.72256,s,1,33.31555859375,33.31555859375,0.0,33.31555859375,33.31555859375,33.31555859375,33.31555859375,[33.31555859375],,kWh,0.000764862871675003,8.43626858855212e-05,0.0002478715871860049,0.001097097144746529,,MB,4038.197248,9692.905472,0.0,9269.41184,9235.912192,s,10,1.1804213790893554,0.11804213790893554,0.0005922263773105904,0.11788027191162109,0.1188023567199707,0.11896026802062988,0.11908659706115723,"[0.1180063705444336, 0.11868697357177735, 0.11790777587890625, 0.11755561828613281, 0.11911817932128907, 0.11876726531982422, 0.11776287841796874, 0.11785276794433594, 0.11766969299316406, 0.11709385681152344]",tokens/s,2168.7170745542835,kWh,3.462545903627478e-06,3.8185594889759925e-07,2.2911390878115465e-06,6.135540940336623e-06,tokens/kWh,41724112.427804075,MB,4044.259328,9705.488384,0.0,9281.994752,9177.194496,s,10,74.47744921875001,7.447744921875001,0.029943863231243324,7.45096142578125,7.46855654296875,7.48920380859375,7.50572162109375,"[7.4355009765625, 7.46303466796875, 7.4578564453125, 7.4606015625, 7.50985107421875, 7.46396826171875, 7.44406640625, 7.43447314453125, 7.41369970703125, 7.39439697265625]",tokens/s,8.458936317080457,kWh,0.00021609046094845407,2.3835823360342875e-05,9.980985925958984e-05,0.0003397361435683867,tokens/kWh,185438.02651753626,,s,630,74.47504272460935,0.118214353531126,0.001086583657644455,0.11801612854003907,0.11954604797363282,0.12045656776428222,0.121962587890625,"[0.1181063003540039, 0.11838998413085937, 0.11793231964111328, 0.11739740753173829, 0.11715030670166016, 0.11728256225585937, 0.11818624114990234, 0.11747052764892578, 0.11666835021972656, 0.11683097839355469, 0.11757568359375, 0.1210777587890625, 0.11819401550292968, 0.11842736053466797, 0.11772672271728515, 0.11868870544433593, 0.117095458984375, 0.11723260498046875, 0.11792384338378906, 0.11792822265625, 0.11722108459472656, 0.11823241424560547, 0.11786067199707032, 0.11888674926757813, 0.11878377532958985, 0.1177213134765625, 0.11727037048339843, 0.11715750122070312, 0.11772367858886719, 0.11770684814453125, 0.11784796905517578, 0.11763283538818359, 0.11771718597412109, 0.11733760070800782, 0.11801615905761718, 0.11706594848632812, 0.11758783721923828, 0.11813913726806641, 0.11842108917236328, 0.11816182708740235, 0.11804694366455078, 0.11771062469482421, 0.11799539184570312, 0.11771699523925781, 0.11790962982177734, 0.11790541076660156, 0.11805286407470703, 0.11843164825439453, 0.11766588592529297, 0.11796873474121093, 0.11750582122802734, 0.11838838195800781, 0.11923324584960937, 0.1177702407836914, 0.12057295989990234, 0.11872927856445313, 0.11819664001464844, 0.11923776245117187, 0.11830073547363282, 0.11772415924072266, 0.11751507568359375, 0.12134486389160157, 0.1175709457397461, 0.11807933044433594, 0.11864742279052734, 0.12017664337158203, 0.11831501007080078, 0.11817574310302735, 0.11828633880615234, 0.11847987365722656, 0.11836723327636718, 0.11906253051757812, 0.11836211395263672, 0.11922431945800781, 0.11923388671875, 0.12146959686279298, 0.11859967803955078, 0.1187430419921875, 0.11901734161376953, 0.11854656219482422, 0.11898611450195312, 0.11851136016845704, 0.11910233306884765, 0.11916287994384765, 0.11873075103759766, 0.11818096160888672, 0.11920066833496094, 0.11975475311279297, 0.11898675537109375, 0.11813683319091797, 0.11885088348388671, 0.11755939483642579, 0.11854064178466797, 0.11758195495605468, 0.11941305541992188, 0.11838854217529297, 0.11857305908203125, 0.11806221008300781, 0.11801689910888671, 0.11751420593261719, 0.11800569915771485, 0.11698595428466797, 0.11723168182373046, 0.11727638244628906, 0.11799539184570312, 0.12330429077148437, 0.11764054107666015, 0.11768899536132812, 0.11736473846435547, 0.11726847839355468, 0.11691110229492188, 0.11688240051269531, 0.11706985473632812, 0.11684812927246094, 0.12155670166015625, 0.11773007965087891, 0.11756543731689453, 0.12131942749023437, 0.11727667236328125, 0.11700633239746094, 0.1173807373046875, 0.11737257385253906, 0.11731017303466797, 0.1220505599975586, 0.1179152603149414, 0.11777267456054688, 0.11743846130371094, 0.11800096130371093, 0.11774179077148438, 0.11802419281005859, 0.11774188995361329, 0.1186244125366211, 0.11840681457519531, 0.11804093170166016, 0.11799346923828125, 0.11896217346191407, 0.11899430084228516, 0.11857968139648438, 0.11804278564453125, 0.11789212799072266, 0.11841769409179688, 0.11815801239013672, 0.11800166320800781, 0.12089737701416016, 0.1183561248779297, 0.11815670776367188, 0.11798092651367187, 0.11811312103271485, 0.1185498275756836, 0.11885225677490234, 0.11794825744628906, 0.11771302032470703, 0.1177291488647461, 0.11726825714111327, 0.11744630432128907, 0.11716275024414062, 0.11706294250488282, 0.11878880310058594, 0.11833097839355469, 0.12270790100097656, 0.11742806243896485, 0.11917993927001953, 0.11848659515380859, 0.1178485107421875, 0.11727641296386719, 0.11758617401123046, 0.11797913360595703, 0.12036300659179687, 0.11872051239013671, 0.11818803405761719, 0.11773747253417968, 0.11873894500732422, 0.11741184234619141, 0.11817574310302735, 0.11791721343994141, 0.11837078094482421, 0.11813887786865235, 0.11833097839355469, 0.1202548828125, 0.11903078460693359, 0.11930716705322265, 0.11894985961914062, 0.1195050277709961, 0.11867340850830078, 0.11883920288085938, 0.11870793914794922, 0.11903014373779297, 0.11765555572509766, 0.11767135620117188, 0.11817842864990234, 0.11789516448974609, 0.11802806091308594, 0.11777206420898438, 0.11741423797607421, 0.11732553863525391, 0.11726681518554688, 0.118212158203125, 0.11786819458007812, 0.11795299530029296, 0.11745059204101563, 0.11841993713378907, 0.11765081787109374, 0.11721177673339844, 0.11758377838134766, 0.11778057861328126, 0.11799887847900391, 0.11818819427490235, 0.11771552276611329, 0.11961344146728516, 0.11877785491943359, 0.11792578887939453, 0.11837375640869141, 0.11829936218261719, 0.11750790405273437, 0.1176329574584961, 0.11827839660644532, 0.11756944274902344, 0.11833964538574218, 0.11962089538574219, 0.117698974609375, 0.11938441467285156, 0.11938979339599609, 0.1177146224975586, 0.11781785583496093, 0.1178636474609375, 0.1179310073852539, 0.1191363525390625, 0.12078889465332031, 0.11820195007324219, 0.11785462188720704, 0.11737190246582031, 0.11715071868896484, 0.11769801330566407, 0.11713571166992187, 0.11794656372070313, 0.1183662109375, 0.12218911743164063, 0.1184181137084961, 0.11897628784179687, 0.11828451538085938, 0.11884041595458984, 0.11876783752441407, 0.12068438720703124, 0.12155375671386719, 0.11953561401367188, 0.11922022247314454, 0.11871231842041016, 0.11943116760253907, 0.11925299072265624, 0.11882291412353516, 0.11905964660644532, 0.1192845458984375, 0.1197925796508789, 0.11978342437744141, 0.11956243133544922, 0.1186827163696289, 0.11918819427490235, 0.11863568115234376, 0.11843606567382813, 0.11856893157958984, 0.12189762878417969, 0.11948441314697265, 0.11887615966796874, 0.11868978881835937, 0.11858710479736329, 0.11964854431152344, 0.11948381042480469, 0.12038819122314454, 0.12054937744140624, 0.12051251220703126, 0.1200223388671875, 0.11954422760009766, 0.11915907287597656, 0.11970121765136718, 0.11863849639892578, 0.11860211181640624, 0.11879993438720703, 0.11871891021728516, 0.11819213104248047, 0.11823712158203124, 0.12031187438964844, 0.1183061752319336, 0.11848665618896484, 0.11863529968261718, 0.11909552001953125, 0.1185885467529297, 0.11893030548095704, 0.11921408081054688, 0.11910272216796874, 0.1195261459350586, 0.1197973403930664, 0.11977155303955078, 0.1197649917602539, 0.12198912048339844, 0.12152783966064454, 0.11965283203125, 0.11962163543701172, 0.11855052947998047, 0.11853004455566406, 0.11829452514648438, 0.11795661163330078, 0.11919564819335937, 0.11783776092529297, 0.1203380126953125, 0.11892336273193359, 0.11806963348388672, 0.11807849884033203, 0.11785724639892578, 0.11790054321289062, 0.11851427459716797, 0.12122537231445313, 0.1189755859375, 0.11900406646728516, 0.11871753692626953, 0.11891395568847657, 0.11976051330566406, 0.11856825256347656, 0.11870207977294922, 0.11884726715087891, 0.11992991638183594, 0.11990102386474609, 0.11812850952148438, 0.11828771209716797, 0.11805315399169922, 0.11834419250488282, 0.1206982421875, 0.1186794891357422, 0.11839968109130859, 0.11823433685302734, 0.11853699493408203, 0.11854233551025391, 0.11815116882324218, 0.12000460815429688, 0.11936540985107422, 0.11892758178710937, 0.11869961547851562, 0.11809795379638671, 0.11744678497314454, 0.11750406646728516, 0.11806297302246094, 0.11754528045654297, 0.11773133087158204, 0.11750508880615235, 0.1207100830078125, 0.11931407928466797, 0.11931468963623047, 0.11922815704345703, 0.118153564453125, 0.11818582153320313, 0.11872444915771484, 0.11776764678955078, 0.11812131500244141, 0.11843167877197265, 0.12011955261230468, 0.11817759704589843, 0.11845017242431641, 0.11777433776855468, 0.11728281402587891, 0.11804876708984376, 0.11757977294921874, 0.11823104095458985, 0.11803648376464844, 0.11762483215332031, 0.1177426528930664, 0.11866928100585937, 0.12056060791015626, 0.11898255920410156, 0.11747113800048828, 0.11737417602539063, 0.11789615631103516, 0.1181777572631836, 0.11824924468994141, 0.11835359954833985, 0.11823967742919922, 0.11810944366455078, 0.118401123046875, 0.11783452606201172, 0.11774102020263671, 0.11810816192626954, 0.11758182525634765, 0.11819779205322266, 0.11816802978515625, 0.11829248046875, 0.11829430389404297, 0.11932080078125, 0.12014112091064454, 0.11914720153808593, 0.11835206604003906, 0.11852706909179687, 0.11814985656738282, 0.11757692718505859, 0.1172303695678711, 0.12262220764160156, 0.11873049926757813, 0.11825766754150391, 0.11818803405761719, 0.11783577728271484, 0.1181286392211914, 0.11837395477294922, 0.11774553680419922, 0.11776057434082031, 0.11752857971191406, 0.11852185821533204, 0.11887174224853515, 0.1179361572265625, 0.11785858917236328, 0.11835113525390625, 0.11759894561767578, 0.11919974517822265, 0.11799529266357422, 0.11861628723144531, 0.11844608306884766, 0.11801538848876954, 0.11778467559814453, 0.1180263671875, 0.11755494689941406, 0.11785794830322266, 0.1173862075805664, 0.11754291534423827, 0.11860761260986329, 0.11793023681640626, 0.11712921905517579, 0.11799244689941406, 0.11768115234375, 0.11816960144042969, 0.11768627166748047, 0.11841535949707031, 0.1198116455078125, 0.11864096069335937, 0.1180079345703125, 0.11740774536132813, 0.11749721527099609, 0.1174677734375, 0.11794598388671874, 0.11823961639404297, 0.11755027008056641, 0.11808185577392578, 0.11760201263427734, 0.11730204772949218, 0.11746304321289062, 0.11738111877441407, 0.11793817901611328, 0.11746288299560546, 0.11757584381103516, 0.11771289825439453, 0.1173381118774414, 0.11709849548339844, 0.11751974487304688, 0.11740428924560548, 0.118998046875, 0.11777059173583984, 0.11733670043945313, 0.11780863952636719, 0.11801609802246094, 0.11760066986083985, 0.11739750671386719, 0.1174835205078125, 0.11799756622314453, 0.11814067077636718, 0.11771942138671875, 0.11736259460449219, 0.11766553497314453, 0.11712876892089844, 0.11737673950195313, 0.11763398742675782, 0.11715583801269532, 0.1178766098022461, 0.11789324951171876, 0.1191990737915039, 0.1213419189453125, 0.11923731231689454, 0.1184582748413086, 0.11801334381103516, 0.11757638549804687, 0.11755859375, 0.1179993896484375, 0.12102543640136719, 0.11841069030761718, 0.11789936065673828, 0.11738944244384765, 0.117399169921875, 0.11818284606933593, 0.11739631652832032, 0.11747833251953126, 0.11758386993408203, 0.11718278503417968, 0.11787026977539063, 0.11919692993164062, 0.11708828735351562, 0.11778326416015625, 0.12206515502929688, 0.11790898895263671, 0.11902912139892578, 0.11889548492431641, 0.11901741027832032, 0.11833708953857422, 0.11713097381591797, 0.11739142608642578, 0.11670396423339843, 0.11678323364257813, 0.11788227081298829, 0.1169883804321289, 0.12074703979492188, 0.11767459106445312, 0.11710470581054687, 0.11721244812011719, 0.11649295806884766, 0.11638579559326172, 0.11638729858398437, 0.11987974548339844, 0.11825730895996094, 0.11928876495361328, 0.11773951721191406, 0.11727648162841797, 0.11728470611572266, 0.11688694763183594, 0.11745990753173828, 0.11724752044677735, 0.1167642593383789, 0.11785919952392578, 0.11706192016601563, 0.11716169738769532, 0.11730534362792969, 0.11728876495361328, 0.11639030456542969, 0.1173911361694336, 0.11722547149658204, 0.11766556549072266, 0.11834703826904297, 0.11742713928222656, 0.11760025787353516, 0.11758386993408203, 0.12121907043457031, 0.11855232238769531, 0.11815328216552734, 0.11762911987304688, 0.11829357147216797, 0.11783641815185547, 0.11742649841308593, 0.11765350341796875, 0.11733197021484375, 0.11759820556640625, 0.11752448272705078, 0.11737702178955078, 0.11736678314208984, 0.11803443145751953, 0.11747532653808594, 0.1177149429321289, 0.1175920639038086, 0.11743852996826172, 0.1198647689819336, 0.11950745391845703, 0.11852700805664063, 0.12134681701660156, 0.11791792297363281, 0.1172459487915039, 0.11771289825439453, 0.11694284820556641, 0.11687017822265625, 0.11624297332763672, 0.1205284194946289, 0.11708460998535156, 0.11780172729492187, 0.11665789031982422, 0.1163666229248047, 0.11650265502929688, 0.11614883422851563, 0.11657421112060547, 0.11747052764892578, 0.11718521881103515, 0.11772902679443359, 0.1169903335571289, 0.11716934204101563, 0.11625337219238281, 0.11637564849853516, 0.116529052734375, 0.12080070495605469, 0.11844051361083985, 0.11814889526367188, 0.11814729309082031, 0.11825357055664062, 0.11782553863525391, 0.11769475555419921, 0.11747913360595703, 0.1172531509399414, 0.11690668487548828, 0.11760463714599609, 0.11866521453857422, 0.11828451538085938, 0.11879331207275391, 0.11670950317382812, 0.11648825836181641, 0.1166402587890625, 0.11984893035888672, 0.11726646423339844, 0.11847177886962891, 0.11724892425537109, 0.1171230697631836, 0.11675405120849609, 0.11686140441894531, 0.11676662445068359, 0.11660902404785156, 0.11699404907226563, 0.11745484924316406, 0.11809613037109375, 0.11728163146972656, 0.11729603576660157, 0.11679475402832032, 0.11635161590576172, 0.1163325424194336, 0.11638988494873047, 0.11665782165527344, 0.11974691009521485, 0.1171836166381836, 0.11784591674804687, 0.11763228607177735, 0.11689500427246094, 0.11680194854736328, 0.11641808319091797, 0.11681839752197265, 0.11742822265625, 0.11792793273925781, 0.11715583801269532, 0.11802419281005859, 0.11701219177246094, 0.1168400650024414, 0.1169082260131836, 0.11673442840576172, 0.11654662322998047, 0.11723782348632812]",tokens/s,8.459209648654879,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.13 GiB is free. Process 100204 has 13.61 GiB memory in use. Of the allocated memory 13.36 GiB is allocated by PyTorch, and 136.95 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,13838.798848,7509.77024,0.0,7107.248128,7106.945536,s,1,32.6133359375,32.6133359375,0.0,32.6133359375,32.6133359375,32.6133359375,32.6133359375,[32.6133359375],,kWh,0.0007276733125083182,8.026045410247235e-05,0.00023517491036199878,0.0010431086769727893,,MB,1303.724032,7786.594304,0.0,7363.100672,7335.695872,s,10,1.1502250900268556,0.11502250900268556,0.000801368798413419,0.11492712020874024,0.115746883392334,0.1163005771636963,0.11674353218078613,"[0.1149552001953125, 0.11556224060058594, 0.1168542709350586, 0.11400077056884765, 0.11468192291259766, 0.11502419281005859, 0.11489904022216797, 0.11464627075195312, 0.11397734069824218, 0.11562384033203126]",tokens/s,2225.65132876751,kWh,3.370642769827295e-06,3.7171705845756007e-07,2.2406020479081113e-06,5.982961876192966e-06,tokens/kWh,42788171.69446783,MB,1327.161344,7786.594304,0.0,7363.100672,7289.561088,s,10,72.42316357421875,7.242316357421875,0.012998121750292666,7.239060058593751,7.260198828125,7.263545703125,7.266223203125,"[7.266892578125, 7.2355869140625, 7.2266337890625, 7.259455078125, 7.23674853515625, 7.22830224609375, 7.23006884765625, 7.24137158203125, 7.24508154296875, 7.2530224609375]",tokens/s,8.698874350529861,kWh,0.00021163847418100197,2.33447003966836e-05,9.317411317269191e-05,0.0003281572877503775,tokens/kWh,191981.10891239083,,s,630,72.4203433456421,0.11495292594546365,0.0009449285035989159,0.11469867324829101,0.11597210540771484,0.11675631523132324,0.11836126304626465,"[0.11782553863525391, 0.11512627410888672, 0.11507647705078125, 0.11500755310058594, 0.11545433807373047, 0.11517724609375, 0.11544412994384766, 0.11563619232177734, 0.11510991668701172, 0.11617616271972656, 0.11504029083251953, 0.11527442932128906, 0.11553587341308594, 0.11556617736816406, 0.11529241943359375, 0.11529814147949219, 0.11518495941162109, 0.11558710479736328, 0.11583564758300781, 0.1156180191040039, 0.11655075073242188, 0.11510598754882813, 0.11542339324951172, 0.11478630065917969, 0.11443666839599609, 0.11469414520263672, 0.11440057373046875, 0.11536659240722656, 0.11427830505371094, 0.11512226867675782, 0.11485794830322266, 0.11479225921630859, 0.11471222686767578, 0.1171782684326172, 0.11548451232910156, 0.11577219390869141, 0.11519593811035156, 0.11518486022949219, 0.11521910095214843, 0.11538995361328125, 0.11670572662353515, 0.11688365173339844, 0.11548569488525391, 0.11510272216796875, 0.11655782318115235, 0.11476140594482422, 0.11456748962402344, 0.11524095916748046, 0.1183985595703125, 0.11441398620605468, 0.11466957092285156, 0.11425791931152343, 0.11466719818115234, 0.11504467010498047, 0.11468179321289063, 0.11444025421142578, 0.1149368667602539, 0.11445346832275391, 0.11464841461181641, 0.11597216033935547, 0.11589027404785156, 0.11575552368164063, 0.11484063720703125, 0.11591065979003906, 0.1151651840209961, 0.11572812652587891, 0.11550131225585937, 0.11471871948242188, 0.11478793334960938, 0.11555267333984375, 0.11465727996826172, 0.11469414520263672, 0.11448729705810547, 0.11419417572021484, 0.11415577697753906, 0.11431449890136719, 0.11425868988037109, 0.11433984375, 0.1158221435546875, 0.1144181137084961, 0.1145401611328125, 0.11432588958740235, 0.11801958465576172, 0.11467369842529297, 0.1142952651977539, 0.11426815795898437, 0.11703910064697266, 0.11522396850585938, 0.11440959930419922, 0.11424610900878907, 0.11518771362304688, 0.1141944351196289, 0.11445043182373046, 0.11440486145019531, 0.11445708465576172, 0.11467571258544922, 0.11470848083496094, 0.11458866882324219, 0.11409705352783203, 0.11393833923339844, 0.1140799331665039, 0.11406950378417968, 0.11802937316894531, 0.1145739517211914, 0.11582495880126953, 0.11664787292480469, 0.11464889526367188, 0.11435552215576172, 0.11428141021728516, 0.11456428527832031, 0.11441439819335937, 0.11453836822509765, 0.11473932647705078, 0.11486208343505859, 0.11493580627441406, 0.11429033660888673, 0.11462041473388672, 0.11414720153808594, 0.11436489868164063, 0.11628953552246094, 0.11466928100585938, 0.11489923095703125, 0.11465670776367187, 0.11497116851806641, 0.11479615783691406, 0.11456310272216796, 0.11600934600830078, 0.11491942596435546, 0.1144606704711914, 0.11499314880371093, 0.11586150360107422, 0.11510374450683594, 0.11567922973632813, 0.11639807891845703, 0.11483302307128906, 0.11547071838378906, 0.11427766418457032, 0.1144327392578125, 0.11501773071289062, 0.11435782623291016, 0.11478399658203126, 0.11419513702392578, 0.1146030731201172, 0.11430726623535156, 0.11477833557128907, 0.11431314849853516, 0.1147906265258789, 0.11484140777587891, 0.11430899047851563, 0.11500409698486327, 0.1142824935913086, 0.11532288360595704, 0.11427145385742188, 0.11447171020507813, 0.11430825805664062, 0.11468476867675781, 0.11466316986083984, 0.11423677062988281, 0.11487705230712891, 0.11417424011230469, 0.11423849487304688, 0.1140459213256836, 0.11420819091796874, 0.11486061096191406, 0.11489250946044922, 0.11416809844970703, 0.11428044891357422, 0.11469107055664063, 0.11748611450195312, 0.11419081878662109, 0.11402374267578125, 0.1141562271118164, 0.11604563140869141, 0.11612384033203126, 0.11449753570556641, 0.11376844787597656, 0.11398659515380859, 0.11421590423583984, 0.11443405151367188, 0.11456259155273438, 0.11387455749511718, 0.11489161682128907, 0.11467107391357421, 0.11471517181396484, 0.11424972534179688, 0.11412889862060546, 0.11435606384277344, 0.11527798461914063, 0.11427782440185547, 0.11469283294677735, 0.11454668426513671, 0.11671961975097657, 0.11460918426513672, 0.11441046142578125, 0.11525325012207031, 0.11688960266113281, 0.11518566131591797, 0.11467359924316406, 0.11560966491699219, 0.11481497955322266, 0.11526499176025391, 0.11422937774658203, 0.1148235855102539, 0.11431935882568359, 0.11652505493164063, 0.11455043029785156, 0.11455522918701172, 0.1150277099609375, 0.11512038421630859, 0.1149234848022461, 0.11474031829833985, 0.11584198760986328, 0.11826995086669922, 0.11590451049804687, 0.11597209930419922, 0.11531263732910156, 0.11517747497558593, 0.11461763000488281, 0.11550985717773438, 0.11611968231201172, 0.11470438385009765, 0.11436390686035157, 0.11434422302246093, 0.1142356185913086, 0.11587379455566406, 0.11658854675292969, 0.11486617279052734, 0.11416575622558593, 0.1141370849609375, 0.1145528335571289, 0.114091552734375, 0.11589446258544922, 0.12202217864990235, 0.11538841247558594, 0.11678633880615234, 0.11451853179931641, 0.11417225646972656, 0.11443814086914063, 0.11487232208251953, 0.11412480163574219, 0.11436646270751953, 0.11663359832763671, 0.11504434967041016, 0.1148231658935547, 0.11466560363769532, 0.11364482879638672, 0.11422882843017577, 0.11442854309082032, 0.11678742218017578, 0.11565379333496094, 0.11595468902587891, 0.11460403442382812, 0.11526300811767579, 0.11655872344970702, 0.115414306640625, 0.11516313934326172, 0.11499798583984375, 0.1153961944580078, 0.1178095703125, 0.11562393951416015, 0.11485311889648438, 0.11503897857666015, 0.11490303802490234, 0.11552358245849609, 0.11525433349609375, 0.11524998474121094, 0.11466121673583984, 0.11455465698242187, 0.11463113403320313, 0.11480038452148437, 0.11396233367919922, 0.11469510650634765, 0.1140469741821289, 0.11419852447509765, 0.11409817504882812, 0.11444400024414063, 0.11667485046386719, 0.11713536071777343, 0.11419232177734374, 0.11395487976074219, 0.11390509033203125, 0.11444895935058594, 0.11427839660644531, 0.11430707550048828, 0.11432083129882813, 0.11409465789794922, 0.11393603515625, 0.11419068908691406, 0.11414118194580078, 0.1141797103881836, 0.11599295806884766, 0.11463475036621094, 0.11427964782714843, 0.11470518493652344, 0.11510079956054688, 0.114512451171875, 0.11406163024902344, 0.1155788803100586, 0.1157342071533203, 0.11485420989990235, 0.1155051498413086, 0.11580397033691406, 0.11447929382324219, 0.11460713958740235, 0.11543785858154297, 0.11883590698242187, 0.11408921813964844, 0.11432963562011719, 0.11387977600097657, 0.11415129852294922, 0.11441779327392577, 0.11421603393554687, 0.11428323364257813, 0.1141883544921875, 0.1139078369140625, 0.1150406723022461, 0.11439727783203125, 0.11578777313232422, 0.11533030700683594, 0.11423411560058594, 0.1150967025756836, 0.11415641784667968, 0.11430707550048828, 0.11393949127197266, 0.11414627075195312, 0.11401830291748047, 0.11476153564453125, 0.11440876770019531, 0.11420761871337891, 0.11416738891601562, 0.11445085144042969, 0.1141391372680664, 0.11473225402832031, 0.11441846466064454, 0.11424358367919922, 0.11513855743408204, 0.11451392364501953, 0.114544189453125, 0.11485228729248047, 0.11580416107177735, 0.11466342163085938, 0.1145159683227539, 0.11447090911865235, 0.11462655639648438, 0.1180032958984375, 0.11456674957275391, 0.11442816162109375, 0.11504287719726562, 0.115019775390625, 0.11446662139892579, 0.114093505859375, 0.11443206024169922, 0.11516588592529296, 0.11425507354736328, 0.11431190490722656, 0.11547039794921875, 0.1154672622680664, 0.1143223648071289, 0.11419574737548828, 0.1140269775390625, 0.11783942413330079, 0.11491983795166015, 0.11470223999023438, 0.11439094543457032, 0.11436649322509766, 0.1141396484375, 0.11471437072753907, 0.1150835189819336, 0.11405481719970703, 0.11427225494384766, 0.11564272308349609, 0.11457433319091796, 0.11479679870605469, 0.11557145690917969, 0.11450099182128906, 0.1142872314453125, 0.11589427185058594, 0.11387660980224609, 0.11520435333251954, 0.11474598693847657, 0.11423538970947265, 0.1148141098022461, 0.11451849365234375, 0.11694844818115234, 0.11529923248291016, 0.11464089965820312, 0.1149945297241211, 0.11453663635253906, 0.11422550201416015, 0.11429901123046875, 0.11743202972412109, 0.11422134399414062, 0.11521971130371093, 0.11514137268066406, 0.11507273864746094, 0.11577577972412109, 0.11426934051513672, 0.11441648101806641, 0.1141944351196289, 0.11429878234863282, 0.11377180480957032, 0.11467388916015625, 0.11442851257324219, 0.11443814086914063, 0.11451923370361328, 0.11446697235107423, 0.11417420959472656, 0.11455023956298828, 0.1141278076171875, 0.11392160034179688, 0.11407199859619141, 0.11392575836181641, 0.11408953857421875, 0.11462044525146485, 0.11502665710449218, 0.11600041961669921, 0.11493622589111328, 0.1146810531616211, 0.11445302581787109, 0.1145031967163086, 0.11445731353759765, 0.1149560317993164, 0.11435238647460938, 0.11447705841064452, 0.11400300598144532, 0.11636994934082032, 0.11425628662109374, 0.11445404815673828, 0.11390147399902344, 0.11451795196533203, 0.11718310546875, 0.11725004577636719, 0.11441356658935548, 0.11493785858154297, 0.11509490966796875, 0.11428108978271484, 0.11421491241455078, 0.11387904357910156, 0.1157949447631836, 0.11447321319580078, 0.11462470245361328, 0.11465682983398437, 0.11555474853515625, 0.11455693054199219, 0.11445187377929687, 0.11440953826904297, 0.11440383911132812, 0.11481501007080078, 0.11531257629394531, 0.11475564575195313, 0.11445043182373046, 0.11458255767822266, 0.11466441345214844, 0.11519795227050782, 0.11465711975097656, 0.1151014404296875, 0.11536835479736328, 0.11451721954345703, 0.11681996917724609, 0.1168858871459961, 0.11548508453369141, 0.11503590393066407, 0.11623961639404297, 0.11539968109130859, 0.11494166564941406, 0.11517161560058593, 0.11487232208251953, 0.11486780548095703, 0.11469046020507813, 0.1179156494140625, 0.11516099548339843, 0.1143358383178711, 0.11489606475830078, 0.11432428741455078, 0.11445184326171876, 0.11506047821044922, 0.11626547241210937, 0.11449747467041016, 0.11528364562988282, 0.11425814056396484, 0.11458409881591797, 0.1144849624633789, 0.11488694763183593, 0.11452210998535156, 0.11470233917236328, 0.11408383941650391, 0.11440467071533203, 0.11457606506347656, 0.11431935882568359, 0.12073983764648437, 0.1149296646118164, 0.11421839904785157, 0.11473980712890625, 0.11386665344238281, 0.11450323486328125, 0.11418268585205078, 0.11476278686523438, 0.11435517120361328, 0.11438044738769532, 0.11489488220214844, 0.11405696105957032, 0.1140249252319336, 0.11432150268554687, 0.11422713470458984, 0.11517913818359375, 0.11462076568603516, 0.11501570892333984, 0.1148579864501953, 0.11509264373779297, 0.11447996520996094, 0.11668064117431641, 0.11540895843505859, 0.11530572509765626, 0.11574249267578125, 0.11492246246337891, 0.11481603240966796, 0.11995645141601563, 0.11502550506591797, 0.11569942474365234, 0.11470713806152344, 0.11542921447753907, 0.11548687744140625, 0.11574457550048828, 0.11526573181152344, 0.11530604553222656, 0.11506118774414062, 0.11486540985107421, 0.11518768310546874, 0.11472156524658203, 0.11457536315917968, 0.1142449951171875, 0.11458972930908203, 0.11422576141357423, 0.11450287628173828, 0.11448400115966798, 0.11439923095703125, 0.11381942749023438, 0.11437897491455078, 0.11497267150878906, 0.11485334777832032, 0.11463279724121093, 0.11616255950927734, 0.1151385269165039, 0.11491900634765626, 0.11538521575927735, 0.11416166687011718, 0.11472589111328126, 0.11476274871826173, 0.11509097290039062, 0.11483798217773437, 0.11469004821777344, 0.11428368377685547, 0.1140928955078125, 0.1137129898071289, 0.11417526245117188, 0.11628838348388672, 0.11468537902832031, 0.11490156555175782, 0.11513241577148438, 0.1148251495361328, 0.11440544128417969, 0.11397529602050781, 0.11653020477294922, 0.11471731567382812, 0.11507673645019531, 0.1151495361328125, 0.11472077178955079, 0.1162608642578125, 0.11455078125, 0.1142824935913086, 0.11409724426269531, 0.11394137573242187, 0.11507408142089844, 0.11416835021972656, 0.1162756805419922, 0.11478943634033204, 0.11467842864990234, 0.11424797058105468, 0.11406499481201172, 0.11453632354736328, 0.11453810882568359, 0.11495721435546875, 0.11454195404052735, 0.11458169555664062, 0.1151185302734375, 0.11521568298339843, 0.11480290985107422, 0.11523519897460938, 0.11478582763671875, 0.11484201812744141, 0.11528406524658204, 0.11534713745117188, 0.11537856292724609, 0.1153617935180664, 0.11496038055419922, 0.11476969909667968, 0.11938633728027344, 0.11732787322998046, 0.11638784027099609, 0.11474432373046875, 0.11430751800537109, 0.1145466537475586, 0.1178978271484375, 0.11486003112792968, 0.11467721557617187, 0.11433628845214844, 0.11439308929443359, 0.11497779083251954, 0.11461119842529296, 0.11492556762695312, 0.11622796630859375, 0.11755328369140625, 0.11492086029052734, 0.11445465850830078, 0.11429277038574219, 0.11485228729248047, 0.11463065338134766, 0.11428044891357422, 0.1193861083984375, 0.11515289306640625, 0.11535155487060547, 0.1143579864501953, 0.11437423706054688, 0.11517391967773437, 0.11460829162597656, 0.11490652465820313, 0.11450835418701172, 0.11471056365966797, 0.11566595458984374, 0.11527417755126954]",tokens/s,8.69921310636689,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,15365.840896,7937.589248,0.0,7535.067136,7509.663744,s,1,32.492291015625,32.492291015625,0.0,32.492291015625,32.492291015625,32.492291015625,32.492291015625,[32.492291015625],,kWh,0.0007414884357707706,8.178437142373444e-05,0.0002410540817320006,0.0010643268889265057,,MB,1872.572416,8115.847168,0.0,7692.353536,7658.720256,s,10,1.1994598388671875,0.11994598388671876,0.0021319867069776537,0.11916702270507812,0.12140693664550781,0.12369116592407227,0.12551854934692383,"[0.11857132720947265, 0.1187913589477539, 0.11834950256347657, 0.11903202819824218, 0.12089933013916015, 0.11887923431396484, 0.1198770523071289, 0.12597539520263673, 0.1197825927734375, 0.11930201721191407]",tokens/s,2134.294052244179,kWh,3.5114037333326246e-06,3.8724906032643417e-07,2.3246314893333544e-06,6.223284282992413e-06,tokens/kWh,41135835.73542049,MB,1876.598784,8126.332928,0.0,7702.839296,7600.00256,s,10,75.2368876953125,7.523688769531249,0.03248689609096921,7.521123779296875,7.565182861328125,7.573779663085937,7.580657104492188,"[7.4889169921875, 7.4873671875, 7.49193310546875, 7.52077001953125, 7.49258740234375, 7.53427685546875, 7.58237646484375, 7.55390966796875, 7.5214775390625, 7.5632724609375]",tokens/s,8.373552113842305,kWh,0.0002198233881975024,2.4247576311799783e-05,9.812105997826827e-05,0.0003421920244875704,tokens/kWh,184107.1547308619,,s,630,75.23466179656988,0.11942009808979337,0.0011867596983067261,0.11912895965576173,0.1208210205078125,0.12173429946899414,0.12384647972106935,"[0.11905228424072266, 0.11848226928710938, 0.11915094757080077, 0.11803679656982421, 0.1182738265991211, 0.11849513244628906, 0.11830918121337891, 0.1191178207397461, 0.11858908843994141, 0.11859184265136719, 0.11888435363769531, 0.11884544372558593, 0.11887615966796874, 0.11910348510742187, 0.11878604888916015, 0.11826748657226563, 0.11829289245605469, 0.11839282989501954, 0.11830681610107421, 0.11879833221435547, 0.11819622039794922, 0.11891097259521484, 0.11837849426269531, 0.11894169616699218, 0.11834121704101562, 0.11834111785888672, 0.11857142639160156, 0.11868172454833985, 0.12233503723144531, 0.11947065734863281, 0.11982848358154297, 0.11940557098388672, 0.11883990478515626, 0.11848281860351563, 0.11868172454833985, 0.11875513458251953, 0.11905903625488282, 0.1193878402709961, 0.11958509063720703, 0.11845222473144532, 0.11933245086669922, 0.11854275512695313, 0.11850752258300781, 0.11885772705078125, 0.12178841400146484, 0.11852371215820312, 0.1187608642578125, 0.11893949127197266, 0.11882537841796875, 0.1185931167602539, 0.12001376342773437, 0.11856281280517578, 0.11920365142822266, 0.11909529876708984, 0.11831929779052734, 0.1185291519165039, 0.11850045013427735, 0.11832707214355469, 0.11862214660644531, 0.11881212615966796, 0.1185224609375, 0.11901542663574219, 0.11921817779541016, 0.11842582702636718, 0.11854006195068359, 0.11840739440917969, 0.11879548645019532, 0.1182229461669922, 0.11874953460693359, 0.11927350616455078, 0.12151225280761718, 0.12058419036865234, 0.11872665405273437, 0.12209878540039062, 0.11890959930419921, 0.118325439453125, 0.11980143737792968, 0.12064998626708984, 0.11940991973876953, 0.11996256256103516, 0.11907075500488282, 0.11871437072753906, 0.118724609375, 0.11830681610107421, 0.1181014404296875, 0.11831868743896484, 0.1192232666015625, 0.11846246337890624, 0.11884748840332031, 0.11811436462402344, 0.11813676452636719, 0.11794393920898437, 0.11850697326660156, 0.11822582244873046, 0.11892530822753906, 0.11895913696289062, 0.11918556976318359, 0.11854112243652344, 0.11819622039794922, 0.11801216125488281, 0.11834697723388672, 0.1178587188720703, 0.11815744018554687, 0.11884291076660156, 0.11829090881347656, 0.11836211395263672, 0.11784806060791016, 0.11833558654785156, 0.11819612884521484, 0.1186937255859375, 0.11811650848388672, 0.12089769744873047, 0.11978720092773437, 0.11917533111572266, 0.11873222351074218, 0.11898659515380859, 0.11865971374511719, 0.11871202850341797, 0.11833586883544922, 0.11858470153808594, 0.11932121276855469, 0.11899609375, 0.11872361755371094, 0.11919139099121094, 0.11863587188720703, 0.11846460723876953, 0.11835958099365235, 0.11851824188232422, 0.118644287109375, 0.11923705291748046, 0.11869526672363281, 0.11877648162841797, 0.11839282989501954, 0.11831231689453126, 0.11831346893310547, 0.11819635009765625, 0.11858124542236329, 0.11865907287597656, 0.11844608306884766, 0.11851705932617188, 0.11823379516601562, 0.11875234985351563, 0.11829750061035156, 0.11872652435302734, 0.11881417846679687, 0.1187170867919922, 0.11831692504882813, 0.1182639389038086, 0.1180979232788086, 0.11877171325683594, 0.11910553741455078, 0.118556640625, 0.11810739135742188, 0.12325936126708985, 0.12088761901855469, 0.11937510681152344, 0.12469276428222656, 0.11874345397949218, 0.11862185668945313, 0.11974492645263672, 0.12001689910888672, 0.11933497619628906, 0.11858704376220704, 0.11885801696777344, 0.11838486480712891, 0.11846249389648437, 0.1183536605834961, 0.11808972930908203, 0.12074524688720703, 0.11820310211181641, 0.11858236694335937, 0.11859855651855469, 0.11878195190429687, 0.11876966094970703, 0.11811840057373046, 0.11822694396972656, 0.11814262390136719, 0.11849967956542969, 0.11944755554199218, 0.11937382507324219, 0.1194203872680664, 0.11891970825195312, 0.11905843353271485, 0.11905039978027344, 0.11866893005371094, 0.11868592071533203, 0.11871846771240234, 0.11918953704833984, 0.11870409393310546, 0.11942098999023437, 0.11816802978515625, 0.11867375946044922, 0.11833916473388671, 0.12141391754150391, 0.12018291473388672, 0.12098150634765625, 0.11954994964599609, 0.11975270080566407, 0.11919155120849609, 0.11903794860839843, 0.1185396499633789, 0.11857968139648438, 0.11888246154785156, 0.12009677124023438, 0.11922447967529297, 0.11968495941162109, 0.11866726684570313, 0.11807491302490235, 0.11803424072265625, 0.11813340759277344, 0.11830226898193359, 0.11857145690917968, 0.11861811065673829, 0.11813273620605469, 0.11886815643310547, 0.11857901000976563, 0.11845600128173828, 0.11848531341552734, 0.11874034881591797, 0.11845491027832031, 0.11925017547607422, 0.11833625793457031, 0.1191014404296875, 0.11863629150390625, 0.11849343872070313, 0.11946530914306641, 0.11821062469482421, 0.1182193603515625, 0.11946803283691407, 0.11955814361572266, 0.12084982299804688, 0.12002569580078125, 0.11964211273193359, 0.11927263641357422, 0.11934912109375, 0.1190163803100586, 0.1223342056274414, 0.12102681732177735, 0.11936150360107421, 0.12314498901367188, 0.11993084716796874, 0.11915814208984375, 0.12045174407958985, 0.12130281829833985, 0.11946620941162109, 0.12011014556884765, 0.11994822692871093, 0.11993907165527344, 0.1199466552734375, 0.11984483337402344, 0.12000041961669922, 0.1198394546508789, 0.1225749740600586, 0.1209200668334961, 0.11929705810546876, 0.11897917175292969, 0.118736572265625, 0.11869868469238282, 0.11828428649902344, 0.11890262603759766, 0.11956352233886719, 0.1194239044189453, 0.11896012878417969, 0.11861135864257813, 0.11831970977783203, 0.1186030044555664, 0.1188503646850586, 0.11815110778808594, 0.1184563217163086, 0.11884969329833985, 0.1187326431274414, 0.11867545318603516, 0.1186693115234375, 0.11838825225830078, 0.11860630035400391, 0.11822489929199219, 0.1205650863647461, 0.11957520294189453, 0.12071116638183593, 0.11876761627197266, 0.11850137329101562, 0.11867545318603516, 0.11880857849121093, 0.1187468490600586, 0.11829804992675781, 0.11868656158447266, 0.11857872009277344, 0.11906678771972656, 0.11949702453613281, 0.11850752258300781, 0.11809174346923829, 0.11867314910888672, 0.12241241455078125, 0.1183138885498047, 0.11862358093261718, 0.11839254760742188, 0.11837337493896484, 0.11886310577392578, 0.11820921325683593, 0.11903590393066406, 0.11839282989501954, 0.1182371826171875, 0.11860768127441407, 0.11911705780029297, 0.11869075012207031, 0.11891693115234375, 0.11894802856445312, 0.11834758758544922, 0.11848108673095703, 0.11933238220214844, 0.11840150451660156, 0.11879119873046876, 0.11877884674072266, 0.11959705352783204, 0.11826380920410157, 0.11823814392089843, 0.11808124542236328, 0.11860403442382812, 0.12094640350341797, 0.12107574462890625, 0.12009881591796875, 0.1184911346435547, 0.12148271942138672, 0.11834217834472656, 0.11852313232421875, 0.121005859375, 0.11942393493652344, 0.119838623046875, 0.11990847778320313, 0.11920979309082032, 0.11893369293212891, 0.11837145233154298, 0.11819459533691407, 0.11841379547119141, 0.1189375991821289, 0.1182863006591797, 0.11911376190185546, 0.11924384307861328, 0.11850176239013673, 0.11795906829833984, 0.11806735992431641, 0.11792793273925781, 0.11886806488037109, 0.1191566390991211, 0.11875119781494141, 0.11883724975585938, 0.11838671875, 0.11807743835449219, 0.11855462646484374, 0.11945935821533203, 0.1180533447265625, 0.11888870239257812, 0.11975859069824218, 0.12107161712646484, 0.11985919952392578, 0.1197998046875, 0.1197192611694336, 0.11945846557617187, 0.11905996704101562, 0.12313836669921875, 0.12500396728515625, 0.12086637115478516, 0.1206685791015625, 0.12001487731933594, 0.11963321685791016, 0.11995126342773438, 0.11941741180419922, 0.11912809753417969, 0.12441824340820312, 0.11990630340576172, 0.11974451446533203, 0.11970764923095703, 0.11944550323486328, 0.11961138916015625, 0.11961138916015625, 0.11916902160644531, 0.11923251342773437, 0.12439545440673828, 0.11963043212890626, 0.11970774078369141, 0.11922431945800781, 0.11927961730957032, 0.11925619506835937, 0.11906752014160156, 0.12320304107666015, 0.12037174224853515, 0.1201844482421875, 0.12015245056152343, 0.12002508544921875, 0.11990790557861328, 0.12052524566650391, 0.11960934448242187, 0.11946578979492188, 0.12394681549072266, 0.12003974151611328, 0.12010655975341797, 0.1198527069091797, 0.11967779541015625, 0.11915264129638672, 0.11963597106933593, 0.12294348907470704, 0.12144844818115234, 0.12072086334228516, 0.1208196792602539, 0.12299935913085938, 0.12021952056884766, 0.12141580963134765, 0.125159423828125, 0.11943116760253907, 0.11905741119384766, 0.11908198547363281, 0.11982876586914062, 0.11912982177734376, 0.11918649291992188, 0.11861702728271484, 0.12285747528076171, 0.11858739471435546, 0.11968659210205078, 0.1203773422241211, 0.12066265869140624, 0.12062918090820313, 0.12011519622802734, 0.1221734390258789, 0.12059648132324219, 0.12002054595947266, 0.12032454681396484, 0.11996774291992188, 0.121169921875, 0.1208975372314453, 0.11991244506835938, 0.1201743392944336, 0.11961369323730468, 0.11954176330566406, 0.11953561401367188, 0.11943116760253907, 0.1200777587890625, 0.12077510070800782, 0.12041136169433594, 0.12011817932128906, 0.12070035552978516, 0.12166815948486329, 0.12055538940429687, 0.1198062744140625, 0.12140748596191406, 0.12019087982177734, 0.12002489471435547, 0.12053507232666015, 0.11996934509277343, 0.11935609436035156, 0.11968102264404297, 0.119912353515625, 0.11997548675537109, 0.12025526428222656, 0.11944115447998047, 0.11934105682373047, 0.11810816192626954, 0.11864064025878907, 0.11827609252929687, 0.1185337905883789, 0.11865135955810546, 0.11869580841064453, 0.119312255859375, 0.12024588775634766, 0.11993075561523438, 0.11960582733154297, 0.11922547149658203, 0.11916793823242187, 0.11979571533203125, 0.11967206573486328, 0.12014620971679688, 0.11950947570800781, 0.11996774291992188, 0.11907689666748048, 0.11910902404785156, 0.11858386993408203, 0.11853823852539062, 0.11856441497802735, 0.11849171447753906, 0.1188201904296875, 0.11968911743164062, 0.11951372528076172, 0.11930802917480468, 0.11979126739501954, 0.12091593933105468, 0.12123379516601562, 0.11958089447021485, 0.1213194580078125, 0.12462028503417968, 0.12061542510986328, 0.12299468994140625, 0.11972608184814452, 0.1207910385131836, 0.11939225769042969, 0.1200373764038086, 0.11962982177734376, 0.12012960052490235, 0.1204254379272461, 0.11956697845458984, 0.11960150146484375, 0.11980595397949219, 0.1206702117919922, 0.12266806030273437, 0.12209661102294922, 0.12043836975097656, 0.11935212707519531, 0.11886857604980469, 0.11932502746582031, 0.11873484802246094, 0.11898611450195312, 0.11852864074707031, 0.11907875061035156, 0.11933916473388671, 0.12014387512207031, 0.11918048095703125, 0.11912274932861328, 0.11892304229736328, 0.11918153381347656, 0.11927552032470704, 0.11928985595703125, 0.11924479675292969, 0.11914227294921875, 0.11905760192871094, 0.12149043273925782, 0.1191951675415039, 0.11972239685058594, 0.12035174560546875, 0.11995442962646484, 0.11992441558837891, 0.11926121520996094, 0.11975094604492187, 0.11953545379638672, 0.11909740447998046, 0.11889215850830079, 0.118949951171875, 0.11887452697753906, 0.11958271789550781, 0.11957612609863282, 0.11920223999023437, 0.12201369476318359, 0.11972825622558594, 0.11898287963867188, 0.12002217864990235, 0.11864899444580078, 0.11851219177246093, 0.11956201934814453, 0.11931190490722657, 0.11948438262939454, 0.11946854400634765, 0.1191014404296875, 0.11932876586914062, 0.11921408081054688, 0.11894989013671875, 0.11976703643798828, 0.11959910583496093, 0.1192175064086914, 0.12010086059570313, 0.11881334686279296, 0.11874918365478515, 0.11856281280517578, 0.11878809356689453, 0.11911167907714844, 0.11908902740478515, 0.11861619567871094, 0.11863603210449218, 0.11920755004882813, 0.12183180999755859, 0.12069734191894531, 0.11895603179931641, 0.11858732604980468, 0.12243897247314453, 0.11883187103271485, 0.123600830078125, 0.11970361328125, 0.11970531463623046, 0.11938166046142579, 0.11923865509033203, 0.11907046508789063, 0.1194730224609375, 0.11920384216308594, 0.11855667114257812, 0.11913760375976562, 0.11905709075927734, 0.11953766632080078, 0.11894703674316406, 0.11916297912597656, 0.1189525146484375, 0.11960530853271484, 0.11890412902832032, 0.11876019287109375, 0.12040601348876953, 0.1192591323852539, 0.11898387145996094, 0.11895686340332032, 0.119012451171875, 0.11986831665039062, 0.11883519744873047, 0.11856486511230468, 0.11895996856689453, 0.11914656066894531, 0.1191343002319336, 0.118961181640625, 0.11968943786621093, 0.11980467224121094, 0.12000230407714843, 0.11939222717285156, 0.12318316650390625, 0.12081378936767578, 0.12001894378662109, 0.11999231719970703, 0.11987149047851563, 0.12021331024169922, 0.12069292449951172, 0.12073983764648437, 0.12013286590576172, 0.1212116470336914, 0.12357762908935546, 0.12027362823486328, 0.12076850891113282, 0.12083309173583984, 0.12093507385253906, 0.12023017883300781, 0.1206230697631836, 0.12044076538085938, 0.12308828735351562, 0.120607421875, 0.12138204956054688, 0.12357923126220703, 0.12157132720947265, 0.12094620513916016, 0.11954838562011719]",tokens/s,8.373799854427252,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,3927.699456,2152.660992,0.0,1757.413376,1736.37632,s,1,12.52262109375,12.52262109375,0.0,12.52262109375,12.52262109375,12.52262109375,12.52262109375,[12.52262109375],,kWh,0.0001584674610750047,1.7472890894140697e-05,5.979782561599256e-05,0.00023573817758513796,,MB,3934.502912,2387.542016,0.0,1971.32288,1913.084928,s,10,0.56003564453125,0.056003564453125,0.0004012378070317411,0.05581399917602539,0.056581279373168945,0.05664607982635498,0.05669792018890381,"[0.05617155075073242, 0.056360126495361325, 0.056566879272460936, 0.05582175827026367, 0.05580624008178711, 0.055629119873046876, 0.056710880279541014, 0.05570294570922851, 0.0554600944519043, 0.055806049346923826]",tokens/s,4571.13761418297,kWh,1.6468480188436859e-06,1.8161163862555528e-07,9.848853072584362e-07,2.8133449647276774e-06,tokens/kWh,90994884.45590602,MB,3934.502912,2408.513536,0.0,1992.2944,1972.635136,s,10,34.87702587890625,3.487702587890625,0.01510740592291888,3.4910791015625,3.501748315429688,3.504462048339844,3.506633034667969,"[3.48496044921875, 3.4877802734375, 3.501145263671875, 3.4943779296875, 3.494394775390625, 3.500339111328125, 3.50717578125, 3.4849052734375, 3.457155029296875, 3.4647919921875]",tokens/s,18.063466827342815,kWh,0.00010163501932365669,1.1210439492826176e-05,4.5264707247940516e-05,0.00015811016606442335,tokens/kWh,398456.35210028244,,s,630,34.872874938964884,0.05535376974438864,0.0011945569063210235,0.055267553329467776,0.0559307762145996,0.05636314430236816,0.05848259189605713,"[0.05495974349975586, 0.0547782096862793, 0.05524457550048828, 0.05441926574707031, 0.05457145690917969, 0.05482086563110351, 0.055213535308837894, 0.05532499313354492, 0.05646115112304687, 0.05487971115112305, 0.05439369583129883, 0.054841407775878905, 0.05492326354980469, 0.0549826545715332, 0.05455462265014648, 0.05454972839355469, 0.055261985778808594, 0.05527142333984375, 0.05487807846069336, 0.055411968231201175, 0.05461017608642578, 0.054653568267822264, 0.0550010871887207, 0.0551475830078125, 0.05549075317382812, 0.05562239837646484, 0.056797119140625, 0.05628934478759766, 0.05552249526977539, 0.055554878234863284, 0.05535884857177734, 0.05584511947631836, 0.055505279541015626, 0.055384063720703126, 0.055556095123291016, 0.055431167602539064, 0.05568697738647461, 0.05549075317382812, 0.058859519958496094, 0.05565849685668945, 0.05506867218017578, 0.05473484802246094, 0.05516419219970703, 0.05539503860473633, 0.05525299072265625, 0.05499903869628906, 0.055019519805908204, 0.05504150390625, 0.05520233535766601, 0.055029022216796876, 0.054583999633789064, 0.05498601531982422, 0.055363838195800784, 0.055495166778564455, 0.05567692947387695, 0.05540224075317383, 0.055363838195800784, 0.055224128723144535, 0.055430465698242185, 0.055374561309814455, 0.055698944091796876, 0.05607609558105469, 0.0557371826171875, 0.05461702346801758, 0.05509107208251953, 0.05524460983276367, 0.056344383239746096, 0.05517567825317383, 0.05541427230834961, 0.05535750579833985, 0.055402942657470707, 0.05548569488525391, 0.055098110198974606, 0.05493721771240234, 0.05527795028686523, 0.05535334396362305, 0.05551839828491211, 0.05564396667480469, 0.055659423828125, 0.055483905792236325, 0.05555260848999023, 0.05546393585205078, 0.05565987014770508, 0.05554857635498047, 0.05599027252197265, 0.05558236694335938, 0.05569366455078125, 0.05566195297241211, 0.055591552734375, 0.055228416442871096, 0.05474508666992187, 0.054860801696777345, 0.05476249694824219, 0.05459715270996094, 0.05464223861694336, 0.055257823944091795, 0.05518355178833008, 0.055416831970214846, 0.055409919738769534, 0.05487241744995117, 0.05710480117797852, 0.05513356781005859, 0.055427711486816404, 0.0551383056640625, 0.05495779037475586, 0.05529219055175781, 0.05529302215576172, 0.055223201751708986, 0.054949119567871095, 0.05465520095825195, 0.054507648468017575, 0.054679969787597656, 0.05494784164428711, 0.055057502746582034, 0.05526595306396485, 0.05526348876953125, 0.05541455841064453, 0.05531465530395508, 0.05517926406860352, 0.055022624969482424, 0.055667678833007814, 0.05535238265991211, 0.05541939163208008, 0.055798206329345706, 0.055739585876464846, 0.05841388702392578, 0.05521897506713867, 0.055379169464111325, 0.05547110366821289, 0.0553779182434082, 0.05552243041992187, 0.05528982543945313, 0.055534496307373046, 0.05544879913330078, 0.05570230484008789, 0.055711742401123046, 0.0555228157043457, 0.05585356903076172, 0.055330814361572264, 0.054976097106933595, 0.05583456039428711, 0.05560924911499023, 0.05566316986083984, 0.05550694274902344, 0.05603942489624023, 0.05585100936889648, 0.05560319900512695, 0.055096351623535156, 0.055389152526855466, 0.05517107009887695, 0.055325855255126954, 0.05803641510009765, 0.05583251190185547, 0.05566332626342774, 0.05544550323486328, 0.055884990692138675, 0.05578742218017578, 0.05611942291259766, 0.05523273468017578, 0.05534163284301758, 0.05539833450317383, 0.055181377410888674, 0.05510518264770508, 0.055013729095458985, 0.05508505630493164, 0.05494521713256836, 0.05496275329589844, 0.05522166442871094, 0.05555817413330078, 0.05536588668823242, 0.05542272186279297, 0.05523440170288086, 0.05591910552978516, 0.05552150344848633, 0.055547904968261716, 0.05567692947387695, 0.055586814880371094, 0.055521278381347655, 0.05643468856811523, 0.05568716812133789, 0.05573427200317383, 0.05588172912597656, 0.055715198516845706, 0.05563369750976563, 0.05543612670898437, 0.055553470611572266, 0.05651718521118164, 0.05563596725463867, 0.05547372817993164, 0.05481881713867188, 0.0561868782043457, 0.05528371047973633, 0.05565235137939453, 0.055119873046875, 0.055373504638671876, 0.055212352752685545, 0.0550830078125, 0.055119873046875, 0.054880256652832034, 0.054951934814453124, 0.05517663955688477, 0.055401023864746095, 0.05561753463745117, 0.05594249725341797, 0.05749542236328125, 0.05513059234619141, 0.055185726165771484, 0.05510105514526367, 0.05481689453125, 0.0551283187866211, 0.05502975845336914, 0.05518096160888672, 0.05522467041015625, 0.055142398834228515, 0.05591625595092774, 0.0568056640625, 0.05633769607543945, 0.05592947387695312, 0.05502767944335937, 0.055225631713867185, 0.055653217315673825, 0.05556208038330078, 0.05587507247924805, 0.05536630249023437, 0.056341793060302736, 0.05546985626220703, 0.055821121215820314, 0.055553375244140626, 0.055925342559814455, 0.05555116653442383, 0.05542195129394531, 0.0556317138671875, 0.05543132781982422, 0.055449310302734374, 0.05540687942504883, 0.055387264251708986, 0.05502041625976563, 0.05518131256103516, 0.055777278900146485, 0.055531520843505856, 0.05569740676879883, 0.05519919967651367, 0.05523411178588867, 0.05509423828125, 0.05513216018676758, 0.055027713775634764, 0.055216289520263674, 0.05531631851196289, 0.055991744995117186, 0.055639808654785156, 0.05539718246459961, 0.05524873733520508, 0.054787872314453125, 0.05483280181884766, 0.055070465087890624, 0.05544428634643555, 0.05523251342773437, 0.05514400100708008, 0.05514080047607422, 0.0551464958190918, 0.05500668716430664, 0.055560737609863284, 0.054873600006103515, 0.05559552001953125, 0.05563596725463867, 0.055382015228271485, 0.05575657653808594, 0.055245025634765625, 0.05510371017456055, 0.05520281600952148, 0.05559580612182617, 0.05575475311279297, 0.0555601921081543, 0.05584896087646484, 0.05556752014160156, 0.07943241882324219, 0.05702822494506836, 0.057579360961914065, 0.05531862258911133, 0.05523846435546875, 0.05511004638671875, 0.0550497932434082, 0.05541321563720703, 0.05541510391235351, 0.055104671478271486, 0.0547250862121582, 0.05430460739135742, 0.05440524673461914, 0.05434147262573242, 0.05471289443969726, 0.054994144439697266, 0.05463017654418945, 0.05475635147094727, 0.05459574508666992, 0.05476335906982422, 0.05485977554321289, 0.05463449478149414, 0.055069950103759764, 0.0542042236328125, 0.054104415893554685, 0.05462691116333008, 0.05484134292602539, 0.05503110504150391, 0.05499078369140625, 0.05453865432739258, 0.05453859329223633, 0.05470003128051758, 0.05446656036376953, 0.05464678573608398, 0.055021568298339846, 0.05482416152954102, 0.05486262512207031, 0.054747135162353515, 0.054771713256835934, 0.055087104797363284, 0.05446041488647461, 0.05444540786743164, 0.054728832244873044, 0.05514704132080078, 0.055144447326660156, 0.0553221435546875, 0.05501795196533203, 0.05519721603393555, 0.055030174255371093, 0.05514236831665039, 0.05522441482543945, 0.055431072235107424, 0.055363006591796875, 0.05508966445922851, 0.05765078353881836, 0.05530886459350586, 0.055330814361572264, 0.0554161262512207, 0.054575809478759764, 0.05429590225219726, 0.05454425430297852, 0.05484124755859375, 0.05491785430908203, 0.05470633697509766, 0.05450543975830078, 0.05531036758422852, 0.05531145477294922, 0.0554598388671875, 0.055269695281982424, 0.05574102401733398, 0.05561315155029297, 0.055647872924804685, 0.05575132751464844, 0.05564131164550781, 0.0559598388671875, 0.0567116813659668, 0.05550080108642578, 0.05501542282104492, 0.055277568817138675, 0.05563734436035156, 0.05574825668334961, 0.05588684844970703, 0.05583871841430664, 0.05605580902099609, 0.05548441696166992, 0.05559500885009765, 0.055483776092529295, 0.055666656494140626, 0.05710649490356445, 0.05573897552490235, 0.05580944061279297, 0.05587337493896485, 0.05629209518432617, 0.05692777633666992, 0.05623446273803711, 0.05624335861206055, 0.05602799987792969, 0.05620940780639649, 0.056164352416992185, 0.05620441436767578, 0.05606079864501953, 0.05631740951538086, 0.056220191955566406, 0.05578956985473633, 0.05590323257446289, 0.05608755111694336, 0.05851065444946289, 0.05608927917480469, 0.055553375244140626, 0.05555267333984375, 0.056174591064453126, 0.05539836883544922, 0.055296031951904294, 0.055547904968261716, 0.055201793670654295, 0.058861568450927736, 0.055967742919921876, 0.05743206405639648, 0.05688095855712891, 0.05559724807739258, 0.05532035064697265, 0.0555706558227539, 0.055777278900146485, 0.05561040115356446, 0.0558612174987793, 0.05586806488037109, 0.05587779235839844, 0.05603247833251953, 0.05646828842163086, 0.055117984771728516, 0.05540454483032226, 0.05513353729248047, 0.055253662109375, 0.05609267044067383, 0.055117729187011716, 0.055076030731201174, 0.054868896484375, 0.05483497619628906, 0.05512582397460938, 0.05528745651245117, 0.05573503875732422, 0.055312225341796875, 0.05564432144165039, 0.05547417449951172, 0.05556224060058594, 0.05552537536621094, 0.05583052825927735, 0.055373825073242185, 0.05543231964111328, 0.05535014343261719, 0.05603737640380859, 0.05566668701171875, 0.05546915054321289, 0.05551094436645508, 0.0553087043762207, 0.05491772842407226, 0.05502361679077149, 0.055358657836914064, 0.05544636917114258, 0.055244766235351565, 0.0553164176940918, 0.055236671447753904, 0.05534089660644531, 0.055167137145996095, 0.05571379089355469, 0.0551868782043457, 0.054556671142578124, 0.055185409545898435, 0.05538313674926758, 0.055780033111572265, 0.0552468147277832, 0.055109630584716796, 0.05496582412719726, 0.0549997444152832, 0.05470150375366211, 0.05499347305297852, 0.05513216018676758, 0.055371681213378904, 0.05525104141235351, 0.05753238296508789, 0.05539023971557617, 0.055564289093017576, 0.05565983963012695, 0.05526988983154297, 0.055177406311035154, 0.05549260711669922, 0.05530588912963867, 0.05685843276977539, 0.055376415252685544, 0.055621536254882815, 0.0554230728149414, 0.05637849426269531, 0.05586419296264648, 0.05583379364013672, 0.055464767456054685, 0.055357440948486325, 0.055373825073242185, 0.05545574569702148, 0.05558639907836914, 0.056240127563476565, 0.05568710327148438, 0.05549916839599609, 0.05514246368408203, 0.055164127349853515, 0.05542172622680664, 0.055877632141113284, 0.0551479377746582, 0.05512252807617188, 0.05491616058349609, 0.05485049438476562, 0.05465599822998047, 0.055223297119140625, 0.055244800567626956, 0.055160831451416016, 0.05532876968383789, 0.055155841827392575, 0.05496307373046875, 0.055357662200927735, 0.055269153594970706, 0.05497766494750977, 0.05506342315673828, 0.05557452774047852, 0.05512713623046875, 0.054924190521240236, 0.05466908645629883, 0.05444220733642578, 0.054228897094726565, 0.055005279541015625, 0.05454172897338867, 0.05475942230224609, 0.0550563850402832, 0.05489459228515625, 0.060663806915283204, 0.05457299041748047, 0.055706878662109376, 0.055202369689941404, 0.0552204818725586, 0.05496435165405274, 0.05499062347412109, 0.05484348678588867, 0.05493132781982422, 0.05485753631591797, 0.0548969612121582, 0.054714366912841796, 0.05490687942504883, 0.054812671661376954, 0.055019390106201174, 0.05501468658447266, 0.05485836791992187, 0.054831329345703124, 0.05449932861328125, 0.05455462265014648, 0.05898761749267578, 0.055155616760253906, 0.05406252670288086, 0.05426134490966797, 0.05495702362060547, 0.05461606216430664, 0.05472819137573242, 0.05419059371948242, 0.05455651092529297, 0.05363260650634766, 0.05367776107788086, 0.05351718521118164, 0.0541102066040039, 0.0543559684753418, 0.056597503662109375, 0.0544502067565918, 0.05388592147827148, 0.053808799743652345, 0.05442595291137695, 0.054988414764404296, 0.054703807830810545, 0.054428352355957034, 0.054497055053710934, 0.054492576599121094, 0.05474150466918945, 0.05527769470214844, 0.05415097427368164, 0.0538361587524414, 0.05381100845336914, 0.05395065689086914, 0.053766143798828124, 0.05442150497436524, 0.055064064025878906, 0.056469150543212894, 0.05530268859863281, 0.05556159973144531, 0.05484025573730469, 0.05531033706665039, 0.05555814361572266, 0.05480448150634765, 0.054801761627197264, 0.055401153564453125, 0.055026817321777347, 0.057189247131347654, 0.058791934967041014, 0.054986751556396485, 0.05482700729370117, 0.0542105598449707, 0.05465686416625976, 0.0550032958984375, 0.0549310417175293, 0.054583072662353516, 0.054331329345703124, 0.054330047607421876, 0.0551383056640625, 0.055126014709472655, 0.05446041488647461, 0.054357120513916016, 0.05461695861816406, 0.05556633758544922, 0.057145343780517575, 0.05455820846557617, 0.05460128021240234, 0.05426681518554687, 0.05403968048095703, 0.05421334457397461, 0.05433686447143555, 0.05504083251953125, 0.05485340881347656, 0.054758689880371095, 0.054418495178222656, 0.05426982498168945, 0.054836929321289064, 0.05534892654418945, 0.05556185531616211, 0.05483827209472656, 0.05466316986083984, 0.055314430236816405, 0.05501337432861328, 0.05482406234741211, 0.05446131134033203, 0.054849536895751956, 0.054930721282958984, 0.05514313507080078, 0.054929279327392576, 0.05552755355834961, 0.05523020935058594, 0.054937854766845706, 0.055539710998535156, 0.05500928115844726, 0.05514662551879883, 0.05558259201049805, 0.05570880126953125, 0.055439838409423826, 0.05530838394165039, 0.055269695281982424, 0.05502975845336914, 0.054832767486572266, 0.05458367919921875, 0.054451358795166015, 0.054303585052490236, 0.054682785034179685, 0.05440777587890625]",tokens/s,18.065616933007036,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,3952.275456,2152.660992,0.0,1757.413376,1736.37632,s,1,12.4734677734375,12.4734677734375,0.0,12.4734677734375,12.4734677734375,12.4734677734375,12.4734677734375,[12.4734677734375],,kWh,0.00015622058873749058,1.722508014731104e-05,5.7967268595998656e-05,0.00023141293748080028,,MB,3963.71968,2387.542016,0.0,1971.32288,1913.084928,s,10,0.5615455055236817,0.056154550552368176,0.00035814502564154307,0.05609428787231445,0.05638534507751465,0.056742896842956546,0.05702893825531006,"[0.05630588912963867, 0.05590630340576172, 0.05578108978271484, 0.05614937591552734, 0.05582883071899414, 0.05597932815551758, 0.05603919982910156, 0.05710044860839844, 0.05618767929077149, 0.05626736068725586]",tokens/s,4558.846923033629,kWh,1.655127770386044e-06,1.8252889147709846e-07,9.916957086101636e-07,2.829352370473306e-06,tokens/kWh,90480069.81087874,MB,3967.901696,2408.513536,0.0,1992.2944,1972.635136,s,10,35.18063403320313,3.518063403320313,0.027803330613491103,3.5024200439453126,3.5556041748046874,3.558329553222656,3.560509855957031,"[3.502048828125, 3.4907255859375, 3.502791259765625, 3.5011953125, 3.495353759765625, 3.4844482421875, 3.561054931640625, 3.542568359375, 3.54544921875, 3.55499853515625]",tokens/s,17.907579476976235,kWh,0.00010228365814502975,1.1281983364420254e-05,4.5486508140589655e-05,0.00015905214965003961,tokens/kWh,396096.50129607227,,s,630,35.17682631301875,0.05583623224288699,0.0008420710525841882,0.05569855880737305,0.0566146297454834,0.056956986618041994,0.05953084297180176,"[0.055654399871826174, 0.05565011215209961, 0.05557267379760742, 0.056282398223876956, 0.055591392517089847, 0.055548160552978516, 0.055798912048339845, 0.05553241729736328, 0.05513216018676758, 0.05503171157836914, 0.05506057739257812, 0.055207454681396484, 0.05526166534423828, 0.05516287994384766, 0.054975841522216795, 0.05464115142822266, 0.054693599700927735, 0.054419776916503904, 0.05431046295166016, 0.054561088562011716, 0.055048126220703125, 0.055236927032470705, 0.05522393417358398, 0.055328353881835934, 0.05484576034545898, 0.05524435043334961, 0.05848566436767578, 0.055215679168701175, 0.05506067276000977, 0.05489075088500977, 0.0554284782409668, 0.05545843124389648, 0.057030654907226565, 0.055228416442871096, 0.055363582611083983, 0.05525094223022461, 0.05545779037475586, 0.05539945602416992, 0.05553571319580078, 0.05573900985717774, 0.055650367736816406, 0.05585308837890625, 0.05621161651611328, 0.05565030288696289, 0.05560092926025391, 0.05609699249267578, 0.05633433532714844, 0.05592473602294922, 0.05589334487915039, 0.058401344299316406, 0.05714332962036133, 0.05591865539550781, 0.056172542572021485, 0.05565999984741211, 0.05510915374755859, 0.05526416015625, 0.05515683364868164, 0.05579980850219726, 0.05551923370361328, 0.05542707061767578, 0.05578137588500977, 0.05706047821044922, 0.05552012634277344, 0.0550299186706543, 0.055273246765136716, 0.055562976837158204, 0.05536972808837891, 0.05515468978881836, 0.05492531204223633, 0.05502361679077149, 0.055011329650878904, 0.05526505661010742, 0.05464495849609375, 0.054771713256835934, 0.05499699020385742, 0.055193599700927735, 0.0554598388671875, 0.054935550689697264, 0.05450137710571289, 0.05462812805175781, 0.05498886489868164, 0.05524051284790039, 0.054796638488769533, 0.055949310302734374, 0.05554972839355469, 0.055446849822998044, 0.055591678619384764, 0.05555372619628906, 0.055556575775146486, 0.05566463851928711, 0.05578137588500977, 0.056010753631591796, 0.055844863891601565, 0.05633612823486328, 0.05584716796875, 0.05570502471923828, 0.055648574829101564, 0.05572224044799805, 0.05550284957885742, 0.05582169723510742, 0.05562227249145508, 0.0558941764831543, 0.05570339202880859, 0.055443073272705076, 0.05531795120239258, 0.05522528076171875, 0.0551649284362793, 0.05580595016479492, 0.055294975280761716, 0.05569766235351563, 0.05559577560424805, 0.055537086486816406, 0.05562835311889648, 0.0554247055053711, 0.055146816253662106, 0.055112926483154294, 0.05497296142578125, 0.05524300765991211, 0.05510508728027344, 0.05525942230224609, 0.05536374282836914, 0.05568102264404297, 0.05581414413452149, 0.056649280548095704, 0.05538860702514648, 0.05503776168823242, 0.05483520126342773, 0.05523606491088867, 0.05542147064208985, 0.0556151351928711, 0.0554516487121582, 0.05524310302734375, 0.055175167083740234, 0.05527523040771484, 0.05538435363769531, 0.055283489227294924, 0.055586719512939455, 0.05589228820800781, 0.055992321014404295, 0.05584281539916992, 0.055728126525878906, 0.055756160736083984, 0.055960193634033206, 0.05571993637084961, 0.05558272171020508, 0.05624550247192383, 0.055749374389648436, 0.05978726577758789, 0.05578956985473633, 0.055723262786865235, 0.05573471832275391, 0.05554003143310547, 0.05570060729980469, 0.0555098876953125, 0.05566873550415039, 0.055670783996582034, 0.05583977508544922, 0.05530108642578125, 0.054919166564941405, 0.05566828918457031, 0.055363296508789066, 0.05521062469482422, 0.05530019378662109, 0.055488510131835936, 0.05562515258789062, 0.05562015914916992, 0.05565004730224609, 0.055562496185302734, 0.05591244888305664, 0.055282974243164064, 0.055050975799560545, 0.05513216018676758, 0.055347198486328124, 0.05578540802001953, 0.055511104583740235, 0.055488510131835936, 0.05522227096557617, 0.055259136199951174, 0.05548646545410156, 0.05533695983886719, 0.055537662506103515, 0.05535302352905273, 0.05565267181396484, 0.05560688018798828, 0.05537833786010742, 0.055433216094970705, 0.05559091186523438, 0.05550425720214844, 0.05582912063598633, 0.05564575958251953, 0.055817150115966795, 0.056124576568603514, 0.0557371826171875, 0.0558359375, 0.05565513610839844, 0.055482368469238284, 0.055671966552734375, 0.05554671859741211, 0.05561139297485351, 0.05575215911865234, 0.05559555053710938, 0.055209983825683595, 0.0546058235168457, 0.05660988616943359, 0.05564697647094727, 0.05546368026733398, 0.05511788940429688, 0.05544585418701172, 0.05551923370361328, 0.05672256088256836, 0.054823230743408204, 0.05454086303710937, 0.05419993591308594, 0.0546429443359375, 0.05451993560791016, 0.05444607925415039, 0.05466719818115234, 0.05517059326171875, 0.055547870635986325, 0.055382591247558594, 0.05514854431152344, 0.05500457763671875, 0.054633056640625, 0.05506256103515625, 0.05514031982421875, 0.055021568298339846, 0.05802540969848633, 0.05592121505737305, 0.05555814361572266, 0.055218177795410155, 0.0551580810546875, 0.055351486206054686, 0.05535353469848633, 0.055269695281982424, 0.055605247497558595, 0.055672191619873045, 0.0558045425415039, 0.05566668701171875, 0.06031961441040039, 0.05594486236572266, 0.05576051330566406, 0.05626732635498047, 0.05572774505615234, 0.05567724609375, 0.05598553466796875, 0.05605830383300781, 0.05585974502563477, 0.055713024139404294, 0.055865280151367186, 0.055966209411621094, 0.05528998565673828, 0.05505862426757813, 0.05507727813720703, 0.05559305572509766, 0.05525955200195312, 0.05501948928833008, 0.05494172668457031, 0.0547696647644043, 0.05500310516357422, 0.054986785888671875, 0.05541388702392578, 0.05550374221801758, 0.05572198486328125, 0.055549663543701173, 0.05556028747558594, 0.05523247909545898, 0.0553856315612793, 0.054890815734863284, 0.0549728012084961, 0.055131935119628904, 0.05537964630126953, 0.05544588851928711, 0.05516099166870117, 0.05495993423461914, 0.05547417449951172, 0.05453023910522461, 0.05490435028076172, 0.055171390533447266, 0.055459999084472654, 0.05560115051269531, 0.05585903930664062, 0.05580160140991211, 0.055765407562255856, 0.05579743957519531, 0.05611142349243164, 0.055810047149658204, 0.056003841400146484, 0.05598857498168945, 0.05564985656738281, 0.055919456481933597, 0.05584076690673828, 0.055760894775390625, 0.055461952209472656, 0.0555478401184082, 0.05596281433105469, 0.05565932846069336, 0.05580287933349609, 0.05636198425292969, 0.056233985900878906, 0.0567086067199707, 0.056784767150878906, 0.05561315155029297, 0.05510211181640625, 0.05498700714111328, 0.05487411117553711, 0.055252735137939456, 0.0553515510559082, 0.05582438278198242, 0.05519974517822265, 0.05521775817871094, 0.055230880737304686, 0.05535251235961914, 0.05514652633666992, 0.05527222442626953, 0.055564289093017576, 0.05542092895507812, 0.055395809173583985, 0.05495616149902344, 0.05483152008056641, 0.05517926406860352, 0.05493100738525391, 0.05462879943847656, 0.05525094223022461, 0.055389633178710936, 0.05568272018432617, 0.05544847869873047, 0.055152641296386716, 0.05524684906005859, 0.055218177795410155, 0.055582687377929686, 0.05565193557739258, 0.055699905395507815, 0.055889598846435545, 0.05598988723754883, 0.05561619186401367, 0.05553526306152344, 0.0552916145324707, 0.055597217559814456, 0.05550307083129883, 0.05573247909545898, 0.055721023559570315, 0.05551712036132812, 0.055806976318359375, 0.05541875076293945, 0.055390335083007815, 0.054848670959472656, 0.054664031982421875, 0.055538753509521484, 0.05523756790161133, 0.05525299072265625, 0.05538816070556641, 0.055654399871826174, 0.05549260711669922, 0.0556193618774414, 0.055666656494140626, 0.05556572723388672, 0.0548934097290039, 0.055041118621826174, 0.05534735870361328, 0.05539507293701172, 0.055578624725341794, 0.054839073181152345, 0.05462579345703125, 0.054526142120361325, 0.05466502380371094, 0.05469257736206055, 0.05530214309692383, 0.054601470947265626, 0.05506073760986328, 0.055368961334228514, 0.05543756866455078, 0.055233024597167966, 0.05520793533325195, 0.05541222381591797, 0.05502518463134766, 0.05524563217163086, 0.05539372634887695, 0.05554585647583008, 0.055519775390625, 0.05577916717529297, 0.055676959991455076, 0.06326457595825195, 0.05648384094238281, 0.05652099227905273, 0.05657193756103516, 0.05656371307373047, 0.058060798645019535, 0.056311969757080076, 0.05638742446899414, 0.05612086486816406, 0.056740318298339844, 0.05644083023071289, 0.05571152114868164, 0.05599580764770508, 0.056046302795410154, 0.056465503692626956, 0.055678497314453124, 0.055935455322265626, 0.05565577697753906, 0.05504886245727539, 0.05526323318481445, 0.055070720672607425, 0.05557020950317383, 0.05952534484863281, 0.05653504180908203, 0.05569331359863281, 0.05575600051879883, 0.05598230361938476, 0.05613216018676758, 0.05588351821899414, 0.05572224044799805, 0.0554700813293457, 0.0585992317199707, 0.056664257049560546, 0.05787231826782226, 0.05953308868408203, 0.056237857818603514, 0.05581177520751953, 0.056441471099853514, 0.05595686340332031, 0.05608736038208008, 0.05710438537597656, 0.056025089263916014, 0.05652070236206055, 0.05622988891601562, 0.05689263916015625, 0.05649692916870117, 0.05664972686767578, 0.05688729476928711, 0.05652889633178711, 0.0568072624206543, 0.05639913558959961, 0.05657632064819336, 0.056231582641601566, 0.05645609664916992, 0.056594432830810545, 0.05646281433105469, 0.056944385528564456, 0.05633718490600586, 0.05719039916992188, 0.05654463958740234, 0.05532057571411133, 0.05605558395385742, 0.05568739318847656, 0.05574348831176758, 0.055989246368408206, 0.05622579193115235, 0.0562606086730957, 0.05608963012695312, 0.05582697677612305, 0.055504638671875, 0.05551718521118164, 0.05585171127319336, 0.05588787078857422, 0.05592652893066406, 0.05617279815673828, 0.056196895599365235, 0.05654553604125977, 0.056624191284179684, 0.05623849487304688, 0.05662908935546875, 0.056088993072509766, 0.056393184661865235, 0.056129344940185545, 0.05646640014648437, 0.056281089782714844, 0.05633782577514648, 0.05597859191894531, 0.0563056640625, 0.056266624450683596, 0.05584703826904297, 0.05563347244262695, 0.05576544189453125, 0.056293216705322266, 0.056536479949951174, 0.0601075210571289, 0.05669862365722656, 0.05687526321411133, 0.05653839874267578, 0.056113887786865234, 0.056455169677734375, 0.0563240966796875, 0.05663129425048828, 0.056607967376708986, 0.056405982971191405, 0.05627686309814453, 0.05638371276855469, 0.05618345642089844, 0.056102977752685544, 0.056000511169433595, 0.0560120964050293, 0.056260639190673825, 0.05622371292114258, 0.05626950454711914, 0.05634857559204102, 0.05636713409423828, 0.05598419189453125, 0.055670783996582034, 0.05597798538208008, 0.056186206817626955, 0.05643945693969726, 0.05610291290283203, 0.05611276626586914, 0.05596601486206055, 0.05539916610717773, 0.05550694274902344, 0.055531169891357424, 0.05584835052490234, 0.05597894287109375, 0.05617657470703125, 0.05574662399291992, 0.0558837776184082, 0.05601052856445313, 0.05577084732055664, 0.055610942840576175, 0.05561644744873047, 0.05590835189819336, 0.056313377380371094, 0.055944961547851564, 0.0563350715637207, 0.05594521713256836, 0.056635391235351565, 0.05622556686401367, 0.05633865737915039, 0.05610905456542969, 0.056231937408447265, 0.05628860855102539, 0.05713782501220703, 0.056281089782714844, 0.05615820693969727, 0.0567275505065918, 0.05616025543212891, 0.05632819366455078, 0.05635887908935547, 0.05663052749633789, 0.05598432159423828, 0.05550723266601563, 0.055624000549316405, 0.05577920150756836, 0.055779457092285156, 0.05546803283691406, 0.055905536651611326, 0.0553724479675293, 0.05547222518920898, 0.05698559951782227, 0.056403297424316406, 0.056021663665771486, 0.060393470764160156, 0.05673984146118164, 0.056174591064453126, 0.0559554557800293, 0.058210304260253906, 0.055976097106933596, 0.05624111938476563, 0.05648239898681641, 0.05628137588500977, 0.05581414413452149, 0.06095177459716797, 0.055773983001708986, 0.05541414260864258, 0.05569395065307617, 0.05661286544799805, 0.05783875274658203, 0.05603359985351562, 0.056207199096679684, 0.05665206527709961, 0.05618115234375, 0.055341056823730465, 0.05628492736816406, 0.056915969848632814, 0.05708211135864258, 0.05704207992553711, 0.0567152328491211, 0.05661356735229492, 0.05643487930297852, 0.05676163101196289, 0.05722390365600586, 0.057220958709716795, 0.058370208740234374, 0.05700751876831055, 0.05674454498291016, 0.05688115310668945, 0.057032703399658206, 0.056049663543701174, 0.05589126586914062, 0.05621830368041992, 0.05565030288696289, 0.05596160125732422, 0.05610886383056641, 0.05871756744384766, 0.05638156890869141, 0.05605446243286133, 0.05569945526123047, 0.05610662460327148, 0.05554201507568359, 0.05578329467773437, 0.05622761535644531, 0.05625904083251953, 0.05641625595092774, 0.056229183197021484, 0.0566995849609375, 0.05643468856811523, 0.05585504150390625, 0.05567855834960937, 0.056036865234375, 0.05683820724487305, 0.05672643280029297, 0.05629884719848633, 0.056368927001953124, 0.05569331359863281, 0.05580019378662109, 0.055844417572021486, 0.056323009490966795, 0.056370975494384766, 0.05627312088012695, 0.056013824462890625, 0.05616089630126953, 0.0569672966003418, 0.05652096176147461, 0.05667136001586914, 0.05639388656616211, 0.05651529693603516, 0.056346401214599606, 0.05636732864379883, 0.05663948822021484, 0.056378654479980465, 0.05622246551513672, 0.056051681518554684, 0.05654723358154297, 0.05649593734741211]",tokens/s,17.909517885268677,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,23791.407104,13034.78272,0.0,12639.535104,12621.66016,s,1,49.60141796875,49.60141796875,0.0,49.60141796875,49.60141796875,49.60141796875,49.60141796875,[49.60141796875],,kWh,0.00122082873514582,0.00013465903216312272,0.0004628125924720089,0.0018183003597809517,,MB,1213.681664,13802.340352,0.0,13386.121216,13251.628032,s,10,1.4718750915527345,0.14718750915527345,0.0017248891119877854,0.14775521850585938,0.14820942535400392,0.14853420181274413,0.14879402297973632,"[0.1423621063232422, 0.14804925537109376, 0.14636166381835938, 0.14885897827148437, 0.14783363342285155, 0.1471332550048828, 0.1481372528076172, 0.1476768035888672, 0.14751116943359374, 0.14795097351074218]",tokens/s,1739.2780234492336,kWh,4.296785209620409e-06,4.738581889929287e-07,2.8663544172645927e-06,7.63699781587793e-06,tokens/kWh,33521025.692551002,MB,1231.618048,13909.295104,0.0,13493.075968,13391.3472,s,10,53.44225341796874,5.344225341796876,0.012637173707958678,5.3420732421874995,5.360134765625,5.3652412109375,5.3693263671875,"[5.37034765625, 5.3293544921875, 5.3309423828125, 5.34403759765625, 5.35387060546875, 5.341775390625, 5.359, 5.33952490234375, 5.331029296875, 5.34237109375]",tokens/s,11.788425070193178,kWh,0.00015585074465787838,1.7190843563150976e-05,0.00010353645292713635,0.0002765780411481657,tokens/kWh,227783.81009015194,,s,630,53.4385687026978,0.08482312492491706,0.00091845975627109,0.08465423965454101,0.08543396759033203,0.08608785858154297,0.08827593055725098,"[0.08452223968505859, 0.08503782653808593, 0.08520909118652344, 0.08513945770263671, 0.08509849548339844, 0.08469078063964844, 0.08559017944335938, 0.08568831634521484, 0.08500819396972656, 0.08503929901123047, 0.08496867370605468, 0.08500508880615235, 0.08615465545654297, 0.08596351623535156, 0.08606499481201171, 0.09082470703125, 0.08538054656982422, 0.08464031982421875, 0.08538515472412109, 0.08495311737060547, 0.08470710754394531, 0.08473961639404297, 0.09007587432861328, 0.08823808288574218, 0.08498486328125, 0.08502582550048828, 0.08446355438232422, 0.08479948425292969, 0.08433869171142579, 0.08479888153076172, 0.0847479019165039, 0.08486319732666016, 0.08534083557128906, 0.08483439636230469, 0.08468838500976562, 0.08498982238769531, 0.08499878692626953, 0.08555110168457031, 0.08462950134277344, 0.0873941421508789, 0.08462911987304687, 0.08471142578125, 0.08455017852783203, 0.08523776245117187, 0.08702361297607422, 0.08509645080566407, 0.08510214233398437, 0.08505177307128907, 0.08490220642089844, 0.0850408935546875, 0.08473177337646484, 0.08486502075195312, 0.08484467315673828, 0.0852146224975586, 0.08509276580810547, 0.08478707122802734, 0.08404000091552734, 0.08415580749511718, 0.08454192352294922, 0.08443631744384765, 0.08483100891113281, 0.08455372619628906, 0.08404502105712891, 0.08523951721191406, 0.0846830062866211, 0.08436531066894531, 0.08455712127685547, 0.08417555236816407, 0.0845823974609375, 0.08456185913085937, 0.08452851104736328, 0.08408953857421875, 0.08438579559326172, 0.08431977844238281, 0.08453533172607422, 0.08447840118408204, 0.08457843017578125, 0.0848067855834961, 0.08489446258544922, 0.08469200134277344, 0.08469811248779296, 0.08438985443115235, 0.08463311767578124, 0.08435955047607421, 0.08463593292236328, 0.08436531066894531, 0.08427299499511719, 0.08411033630371094, 0.0847733154296875, 0.08409891510009766, 0.0842425308227539, 0.08402595520019532, 0.0842977294921875, 0.08471379089355469, 0.08501324462890625, 0.08460991668701172, 0.08422201538085937, 0.08453228759765626, 0.0841246109008789, 0.0845164794921875, 0.08438317108154297, 0.08399750518798828, 0.08441664123535156, 0.0841226577758789, 0.08471151733398438, 0.08460108947753907, 0.08533238220214844, 0.08427040100097656, 0.08457718658447265, 0.08473804473876953, 0.08482611083984375, 0.0847667236328125, 0.08466841888427734, 0.08451423645019532, 0.08492835235595703, 0.08474211120605468, 0.0849354248046875, 0.08458755493164062, 0.08576703643798828, 0.08446985626220703, 0.08534786987304688, 0.08466831970214844, 0.08568665313720703, 0.08482425689697265, 0.08457830047607422, 0.08443814086914063, 0.08515408325195313, 0.08458601379394531, 0.08482614135742188, 0.08432848358154296, 0.08623046112060546, 0.08456192016601563, 0.0840159683227539, 0.08456819152832032, 0.08442249298095703, 0.08395791625976562, 0.08545075225830077, 0.0861299819946289, 0.08612934112548828, 0.08453347015380859, 0.08536383819580078, 0.0847833251953125, 0.0845767059326172, 0.08493875122070313, 0.08452710723876954, 0.0851596450805664, 0.08442908477783204, 0.08453286743164062, 0.08478777313232422, 0.08425583648681641, 0.08460931396484375, 0.0848237762451172, 0.08531161499023437, 0.08411196899414063, 0.08551628875732421, 0.08373875427246094, 0.08409279632568359, 0.0841195526123047, 0.08463507080078125, 0.0842696304321289, 0.08404972839355469, 0.08388630676269532, 0.08464176177978516, 0.08382601928710938, 0.08466860961914062, 0.08435350036621093, 0.08402124786376954, 0.0841891860961914, 0.08431206512451171, 0.0847930908203125, 0.0848285140991211, 0.08419522857666016, 0.08408380889892578, 0.08442562866210937, 0.08490306854248048, 0.08459964752197266, 0.0849336929321289, 0.08447071838378906, 0.0852782745361328, 0.0864886703491211, 0.08453379058837891, 0.08429190063476563, 0.08499814605712891, 0.08359935760498047, 0.0849612808227539, 0.08353177642822265, 0.08396208190917968, 0.08428316497802735, 0.08400077056884765, 0.08558796691894531, 0.0855444793701172, 0.08430230712890625, 0.0850987548828125, 0.08496125030517578, 0.08587964630126953, 0.08421695709228516, 0.08454678344726563, 0.08427782440185547, 0.08470889282226562, 0.08428928375244141, 0.08470809936523438, 0.08445951843261719, 0.0850389404296875, 0.08471279907226563, 0.08534819030761719, 0.08485545349121094, 0.08551618957519531, 0.08516022491455078, 0.08493468475341796, 0.08513340759277344, 0.0853237762451172, 0.08477442932128906, 0.08462957000732421, 0.08430156707763672, 0.08484918212890626, 0.08472998046875, 0.08456569671630859, 0.08434262084960938, 0.08417123413085938, 0.08432588958740235, 0.0843658218383789, 0.08520694732666016, 0.08525628662109375, 0.08454099273681641, 0.08482246398925782, 0.08539552307128906, 0.08422755432128906, 0.084368896484375, 0.08445362854003906, 0.0843575668334961, 0.08403926086425781, 0.08431072235107422, 0.0845660171508789, 0.08452690887451172, 0.085057373046875, 0.08448397064208985, 0.08607587432861329, 0.08559724426269531, 0.0850145263671875, 0.0847955551147461, 0.08478390502929688, 0.08511727905273438, 0.08478275299072266, 0.08486911773681641, 0.08483334350585937, 0.08453772735595703, 0.08634563446044922, 0.08436908721923828, 0.08415740966796875, 0.08565663909912109, 0.08501350402832031, 0.08446355438232422, 0.08960614776611328, 0.08500208282470703, 0.08436137390136719, 0.08413565063476562, 0.0845865249633789, 0.08411100769042969, 0.0837760009765625, 0.08476067352294922, 0.08415846252441406, 0.08432157135009766, 0.084736572265625, 0.08436319732666016, 0.08444882965087891, 0.08541251373291016, 0.08478336334228516, 0.08489266967773437, 0.08500444793701172, 0.09206159973144531, 0.08471382141113282, 0.08499440002441407, 0.08481177520751954, 0.08466022491455077, 0.0845794906616211, 0.0846704330444336, 0.08476557159423828, 0.08478105926513672, 0.08449209594726563, 0.0840352325439453, 0.08474832153320312, 0.0844672622680664, 0.08420035552978515, 0.08473193359375, 0.08498995208740234, 0.08435664367675781, 0.08415280151367187, 0.08440585327148438, 0.08438758087158203, 0.08448265838623047, 0.08434073638916016, 0.08392915344238282, 0.0848603515625, 0.08727110290527344, 0.08570716857910156, 0.08525462341308594, 0.08489545440673828, 0.08431632232666016, 0.0846173095703125, 0.08462499237060547, 0.08461151885986327, 0.08528419494628907, 0.08530601501464843, 0.08593817901611328, 0.08543231964111328, 0.08508790588378906, 0.0855470733642578, 0.08523168182373046, 0.08544879913330078, 0.08547074890136719, 0.0851236801147461, 0.08512054443359375, 0.08496537780761719, 0.08468732452392579, 0.08449228668212891, 0.08429065704345703, 0.08470345306396485, 0.08431043243408202, 0.08457762908935547, 0.08423865509033203, 0.0843679962158203, 0.0843468780517578, 0.08476262664794922, 0.08436729431152344, 0.08451705932617187, 0.08439180755615235, 0.08472492980957032, 0.08441734313964844, 0.08471756744384766, 0.08462540435791016, 0.08465382385253906, 0.0848611831665039, 0.08439997100830078, 0.08456617736816406, 0.08517427062988281, 0.084748291015625, 0.08503091430664063, 0.084947998046875, 0.08512786865234374, 0.0846454086303711, 0.08486573028564454, 0.08514508819580079, 0.08465465545654297, 0.08454045104980469, 0.08417903900146484, 0.08415116882324218, 0.08434483337402343, 0.08432425689697266, 0.08442684936523437, 0.08514559936523437, 0.08495426940917969, 0.0854835205078125, 0.08441942596435546, 0.08438294219970703, 0.08421660614013672, 0.0845638427734375, 0.08443507385253907, 0.0852459487915039, 0.08421552276611328, 0.08457654571533203, 0.08692092895507812, 0.08501277160644531, 0.08465574645996093, 0.0844307861328125, 0.08467443084716797, 0.08537554931640624, 0.08760684967041016, 0.0850948486328125, 0.08616960144042969, 0.08490140533447266, 0.08496790313720703, 0.08470848083496094, 0.08459046173095704, 0.08516022491455078, 0.085036865234375, 0.0852673568725586, 0.08450048065185548, 0.08454057312011719, 0.0844516830444336, 0.08513724517822266, 0.08445536041259766, 0.08498812866210938, 0.08450457763671874, 0.08462335968017579, 0.08682291412353516, 0.08411036682128906, 0.08430486297607422, 0.08473526763916016, 0.08452783966064453, 0.08416255950927734, 0.08458383941650391, 0.08456204986572266, 0.08466009521484374, 0.08472637176513671, 0.08430592346191407, 0.08522496032714844, 0.0848465576171875, 0.08513938903808593, 0.08497315216064454, 0.08535958099365235, 0.08509375762939453, 0.08505001831054687, 0.08486243438720703, 0.08521923065185547, 0.08518022155761719, 0.08507475280761718, 0.08475033569335938, 0.08737910461425781, 0.08528304290771484, 0.08454412841796875, 0.08489369964599609, 0.08528076934814453, 0.08436479949951171, 0.0840791015625, 0.08409513854980469, 0.08431104278564452, 0.0844250259399414, 0.08453107452392578, 0.08431068420410157, 0.08488662719726563, 0.08443993377685546, 0.08471965026855469, 0.08469821166992188, 0.08480655670166015, 0.08564530944824218, 0.08490393829345703, 0.08448716735839844, 0.08445849609375, 0.08468889617919922, 0.08467046356201172, 0.08464956665039063, 0.08451522827148437, 0.08785919952392578, 0.09575730895996094, 0.08576236724853516, 0.08500492858886718, 0.08487942504882813, 0.0846929931640625, 0.08548556518554687, 0.08474755096435548, 0.08492227172851563, 0.08444345855712891, 0.0844411849975586, 0.08381478118896485, 0.08436685180664062, 0.08399504089355468, 0.08408438110351563, 0.08421625518798828, 0.0837872314453125, 0.08375894165039062, 0.08468275451660157, 0.0842574691772461, 0.08371814727783203, 0.08382038116455078, 0.08391081237792969, 0.08598643493652344, 0.0845771484375, 0.08440422058105469, 0.08389968109130859, 0.08453517150878906, 0.08425888061523437, 0.08451971435546875, 0.08470124816894531, 0.08433657836914063, 0.08466432189941406, 0.0862003173828125, 0.08478672027587891, 0.08445308685302734, 0.08480844879150391, 0.0845159683227539, 0.08567804718017578, 0.08452950286865234, 0.08483427429199218, 0.08473836517333984, 0.0853752670288086, 0.0862208023071289, 0.0841338882446289, 0.08438301086425781, 0.08409126281738281, 0.08410899353027344, 0.0846322250366211, 0.08435692596435547, 0.08517606353759766, 0.08556793975830078, 0.08453529357910156, 0.08495414733886719, 0.08472297668457031, 0.08438751983642578, 0.08433602905273438, 0.08476703643798827, 0.08453548431396485, 0.08457430267333985, 0.0847132797241211, 0.0875331802368164, 0.08487967681884766, 0.08482147216796875, 0.08497183990478516, 0.08501091003417968, 0.08546006774902344, 0.08496425628662109, 0.08786739349365234, 0.08609766387939453, 0.08510079956054688, 0.08511666870117188, 0.08513772583007813, 0.08453123474121094, 0.08424652862548829, 0.08432844543457031, 0.08433641815185547, 0.08393341064453125, 0.08407997131347657, 0.0851851806640625, 0.08426815795898437, 0.08445996856689453, 0.08453778839111328, 0.08435842895507813, 0.0842732162475586, 0.08393590545654298, 0.08402665710449218, 0.08449440002441407, 0.0843229751586914, 0.08525583648681641, 0.08744175720214843, 0.08511484527587891, 0.08447138977050782, 0.08458489227294921, 0.08469692993164063, 0.08432793426513673, 0.0844183349609375, 0.08443379211425782, 0.0842239990234375, 0.08507990264892579, 0.08483206176757813, 0.08468720245361328, 0.08452217864990234, 0.08538809967041015, 0.08479948425292969, 0.08507917022705078, 0.08466310119628906, 0.08700137329101562, 0.08420534515380859, 0.08411917114257812, 0.08420185852050781, 0.0845082550048828, 0.08486470031738282, 0.08424521636962891, 0.0842260513305664, 0.08479129791259765, 0.08438505554199219, 0.08432508850097656, 0.08423423767089844, 0.08434893035888671, 0.08440217590332032, 0.0841739501953125, 0.08410546875, 0.08451033782958985, 0.08460784149169921, 0.08428150177001953, 0.08465139007568359, 0.08558451080322266, 0.08434585571289062, 0.08458137512207031, 0.08513906860351562, 0.08440214538574219, 0.0845848617553711, 0.08480697631835937, 0.08451142120361328, 0.08524816131591798, 0.08657020568847656, 0.085751708984375, 0.08510553741455078, 0.08479350280761719, 0.08519209289550782, 0.0848687973022461, 0.08408332824707031, 0.08376537322998047, 0.08448000335693359, 0.0842608642578125, 0.08430182647705078, 0.08447590637207031, 0.08415245056152344, 0.0847806396484375, 0.08450895690917969, 0.08459398651123047, 0.08467935943603516, 0.084264892578125, 0.0845578842163086, 0.08441551971435547, 0.08414415740966796, 0.08913180541992187, 0.08433023834228516, 0.08436946868896485, 0.08439334106445312, 0.08516675567626954, 0.08471177673339844, 0.08474604797363282, 0.0849977569580078, 0.08829138946533203, 0.0850436782836914, 0.08476876831054687, 0.08482546997070313, 0.08498035430908203, 0.08464588928222656, 0.0847831039428711, 0.08500969696044922, 0.08506851196289063, 0.08449801635742188, 0.08463401794433593, 0.08394342041015625, 0.08387510681152344, 0.08371887969970702, 0.08456566619873047, 0.08458480072021485, 0.08447334289550781, 0.08426127624511719, 0.08717033386230469, 0.08496771240234376, 0.08502742767333984, 0.08464118194580078, 0.08469923400878906, 0.08427155303955078, 0.08434806060791016, 0.08403030395507813, 0.08417894744873047, 0.08439762878417968, 0.084224609375, 0.08436457824707032, 0.08451894378662109, 0.08506342315673829, 0.0852242202758789]",tokens/s,11.789237909888024,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.13 GiB is free. Process 64735 has 13.61 GiB memory in use. Of the allocated memory 13.36 GiB is allocated by PyTorch, and 136.95 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,MB,13953.24928,7826.440192,0.0,7423.91808,7411.122688,s,1,31.94983203125,31.94983203125,0.0,31.94983203125,31.94983203125,31.94983203125,31.94983203125,[31.94983203125],,kWh,0.0007114789207167102,7.847436152411221e-05,0.0002388310243980063,0.0010287843066388287,,MB,1316.6592,8279.425024,0.0,7855.931392,7802.533376,s,10,0.9396794586181642,0.09396794586181641,0.00017294943889326983,0.09390403366088867,0.09417669677734375,0.09422764587402344,0.09426840515136718,"[0.09375631713867187, 0.09375164794921875, 0.0940621109008789, 0.09387548828125, 0.09384038543701172, 0.0939109115600586, 0.09389715576171875, 0.09414147186279297, 0.09416537475585937, 0.09427859497070312]",tokens/s,2724.3332569646477,kWh,2.808306084174843e-06,3.0970767160705603e-07,1.8724961561153762e-06,4.9905099118972755e-06,tokens/kWh,51297363.29942981,MB,1360.244736,8279.425024,0.0,7855.931392,7739.936768,s,10,48.30721679687501,4.8307216796875,0.06294948348256883,4.810244628906251,4.91259892578125,4.94343447265625,4.96810291015625,"[4.76452099609375, 4.81194775390625, 4.7942509765625, 4.80854150390625, 4.75876513671875, 4.79973828125, 4.90574658203125, 4.83236962890625, 4.97427001953125, 4.85706591796875]",tokens/s,13.041529646575599,kWh,0.00013954796829374026,1.53925467126019e-05,7.377436991688548e-05,0.00022871488492322766,tokens/kWh,275452.1203162929,,s,630,48.30443121337891,0.07667370033869668,0.0014629467739648407,0.07611137390136719,0.07898550567626954,0.07956011695861816,0.08060690048217774,"[0.07607500457763672, 0.07569564819335937, 0.07496546936035156, 0.07499775695800781, 0.07515135955810547, 0.07491558074951171, 0.07504886627197266, 0.07534422302246094, 0.07530290985107421, 0.07605846405029297, 0.07582889556884766, 0.07625369262695313, 0.0757162857055664, 0.07558995056152344, 0.07540102386474609, 0.07936019134521484, 0.07543961334228516, 0.07576831817626953, 0.07614669036865235, 0.07519436645507813, 0.07496672058105469, 0.07561862182617188, 0.07524761962890625, 0.0759705581665039, 0.07791206359863281, 0.07533596801757812, 0.0754869155883789, 0.075349853515625, 0.07515360260009765, 0.07527935791015625, 0.07518540954589843, 0.07516451263427734, 0.07489628601074219, 0.07487884521484375, 0.07512265777587891, 0.07511055755615234, 0.07559603118896484, 0.075525634765625, 0.07501644897460938, 0.0749627227783203, 0.07523897552490234, 0.07520118713378907, 0.07491580963134765, 0.07566297912597657, 0.07513884735107422, 0.0752864990234375, 0.07581110382080078, 0.07556121826171874, 0.07624713897705078, 0.07519849395751953, 0.07608934020996094, 0.07577718353271484, 0.07617212677001953, 0.07615487670898438, 0.0758287353515625, 0.07578675079345704, 0.07599008178710938, 0.0766760025024414, 0.07577808380126953, 0.07602995300292968, 0.07546880340576172, 0.0762449951171875, 0.07595008087158203, 0.077572509765625, 0.075810302734375, 0.08309439849853516, 0.07833177947998046, 0.07832160186767578, 0.07601165008544922, 0.07584512329101563, 0.07590764617919922, 0.07706114959716796, 0.07579449462890625, 0.07592185974121093, 0.07585225677490234, 0.07602957153320312, 0.07643606567382813, 0.07612518310546874, 0.07599161529541015, 0.07584381103515625, 0.07602585601806641, 0.0757078094482422, 0.07608547210693359, 0.07590745544433594, 0.07995597076416015, 0.07690592193603515, 0.07611654663085937, 0.07585997009277344, 0.07601129913330078, 0.07825174713134765, 0.07634150695800782, 0.07609977722167968, 0.07598700714111328, 0.07587225341796874, 0.07554252624511719, 0.07574086761474609, 0.07578224182128906, 0.07592982482910156, 0.07558553314208985, 0.07606886291503906, 0.075757568359375, 0.07570207977294922, 0.07698818969726562, 0.07619420623779297, 0.07614463806152344, 0.0771436767578125, 0.07580095672607422, 0.07625113677978515, 0.07576067352294921, 0.07578460693359375, 0.07557331085205078, 0.07584819030761719, 0.07612124633789062, 0.0789471664428711, 0.0757960968017578, 0.07633958435058594, 0.07597657775878906, 0.07610380554199218, 0.07587830352783204, 0.0760792007446289, 0.07589644622802734, 0.07598118591308593, 0.07559983825683594, 0.07593987274169922, 0.07604966735839844, 0.07627625274658204, 0.0764722900390625, 0.07581820678710938, 0.07639260864257813, 0.0763655014038086, 0.07606719970703125, 0.07629881286621094, 0.07634534454345702, 0.0762982406616211, 0.07558294677734376, 0.07551017761230469, 0.07543401336669922, 0.07553852844238282, 0.07547007751464843, 0.07666115570068359, 0.07581526184082031, 0.07580876922607421, 0.0760400619506836, 0.07655020904541016, 0.07591292572021484, 0.07642556762695313, 0.07615692901611328, 0.0763719711303711, 0.07594502258300781, 0.07607568359375, 0.07647261047363281, 0.07599513244628907, 0.075720703125, 0.07661158752441406, 0.07679366302490234, 0.07590930938720702, 0.07651737976074219, 0.07603517150878907, 0.07583222198486328, 0.07562566375732421, 0.07576191711425781, 0.07564141082763672, 0.07613587188720704, 0.07617699432373047, 0.0760874252319336, 0.07593244934082032, 0.07616722869873047, 0.07634226989746094, 0.07576882934570313, 0.07585791778564453, 0.07593984222412109, 0.07561007690429687, 0.07726604461669923, 0.07570476531982422, 0.07597001647949218, 0.07601766204833985, 0.07672914886474609, 0.07639263916015625, 0.07637964630126953, 0.076063232421875, 0.07607705688476563, 0.07617945861816407, 0.07590207672119141, 0.0763298568725586, 0.07646988677978515, 0.07587577819824219, 0.07610054779052734, 0.07611494445800782, 0.07611084747314453, 0.07649737548828126, 0.07639036560058594, 0.07574940490722656, 0.07631462097167968, 0.07607295989990234, 0.07781369781494141, 0.07907894134521484, 0.07741811370849609, 0.07576873779296875, 0.07556095886230468, 0.07587020874023437, 0.07739801788330078, 0.07601718139648438, 0.07576796722412109, 0.07564508819580078, 0.07756816101074218, 0.07595571136474609, 0.07832157135009765, 0.076623779296875, 0.07627426910400391, 0.07639049530029297, 0.07621756744384765, 0.07594294738769532, 0.07593138885498046, 0.07868825531005859, 0.07606905364990234, 0.07619769287109375, 0.07590707397460937, 0.07680592346191406, 0.07793856048583984, 0.07654182434082031, 0.07640930938720703, 0.07623423767089844, 0.07608329772949218, 0.07592797088623048, 0.07546390533447266, 0.07616796875, 0.07541350555419922, 0.07581005096435547, 0.07620489501953125, 0.07551171112060547, 0.07661363220214844, 0.07647026824951172, 0.0753623046875, 0.07571660614013671, 0.07558553314208985, 0.07588803100585938, 0.07583190155029297, 0.07601055908203125, 0.07626847839355469, 0.07608319854736328, 0.075978271484375, 0.07625775909423828, 0.07947058868408204, 0.07567769622802735, 0.0765665283203125, 0.07540531158447265, 0.07672959899902344, 0.07572351837158203, 0.07561215972900391, 0.07530419158935547, 0.07586227416992188, 0.07588886260986329, 0.0762060775756836, 0.07551590728759766, 0.07596851348876953, 0.07641875457763672, 0.0760396499633789, 0.07552819061279296, 0.07671590423583985, 0.07590191650390625, 0.07551385498046875, 0.07570614624023438, 0.07539529418945312, 0.07534591674804687, 0.07547618865966797, 0.0753978271484375, 0.07584162902832031, 0.07522509002685547, 0.07545219421386719, 0.075716796875, 0.07529475402832031, 0.07578828430175781, 0.07546636962890625, 0.07583987426757813, 0.07606832122802734, 0.07648889923095703, 0.07636819458007812, 0.0757125473022461, 0.07595212554931641, 0.07595574188232422, 0.07583586883544922, 0.07563263702392578, 0.07570764923095703, 0.07524765014648438, 0.07526787567138672, 0.07554463958740235, 0.07513340759277344, 0.07530332946777343, 0.07518822479248047, 0.0750706558227539, 0.0753139877319336, 0.07515071868896485, 0.07523795318603516, 0.07513606262207032, 0.0753039321899414, 0.07529472351074219, 0.07535135650634765, 0.07510086059570313, 0.07544576263427734, 0.07516825866699219, 0.07514701080322266, 0.07526834869384766, 0.07522022247314453, 0.07513279724121094, 0.07523007965087891, 0.07511446380615235, 0.07530908966064453, 0.07499161529541015, 0.07564466857910156, 0.0754751968383789, 0.07512451171875, 0.07622643280029297, 0.07542205047607421, 0.07518617248535156, 0.07527410888671875, 0.07636192321777344, 0.07615283203125, 0.07603593444824219, 0.07590879821777344, 0.07571913909912109, 0.07543193817138671, 0.07593164825439454, 0.07562649536132812, 0.07789100646972656, 0.07688201904296875, 0.07667555236816406, 0.07580870056152343, 0.075976318359375, 0.07552799987792969, 0.07637404632568359, 0.07572322845458984, 0.07570649719238282, 0.07612355041503906, 0.075761474609375, 0.07608604431152344, 0.07587401580810547, 0.07609942626953126, 0.07614224243164062, 0.07559417724609375, 0.07562889862060547, 0.07564288330078126, 0.07537798309326171, 0.07864800262451171, 0.07576780700683594, 0.07588658905029297, 0.07586370849609375, 0.07599139404296874, 0.07695065307617187, 0.07640953826904297, 0.07617964935302735, 0.07626131439208984, 0.07622866821289062, 0.07631667327880859, 0.07633094024658203, 0.07602387237548829, 0.07613986968994141, 0.0758885726928711, 0.0763845443725586, 0.07619219207763672, 0.07593369293212891, 0.07626898956298828, 0.07596908569335938, 0.07576537322998046, 0.07660173034667969, 0.07641907501220703, 0.07610128021240234, 0.0759115219116211, 0.07551385498046875, 0.07553817749023438, 0.07546905517578124, 0.07572480010986328, 0.07575142669677734, 0.08336924743652344, 0.07570915222167969, 0.07591935729980469, 0.07593574523925781, 0.07613382720947266, 0.0758953628540039, 0.07783417510986328, 0.07695311737060546, 0.07651785278320312, 0.07644351959228515, 0.07701107025146485, 0.07664604949951172, 0.07641728210449218, 0.07637001800537109, 0.07619728088378906, 0.07626188659667969, 0.07679158020019532, 0.0763354263305664, 0.07601312255859374, 0.07609798431396485, 0.0760074234008789, 0.07593164825439454, 0.0757022705078125, 0.07593084716796875, 0.07582720184326172, 0.07631951904296876, 0.07695565032958984, 0.07632262420654297, 0.07669709014892578, 0.07625548553466797, 0.07632940673828124, 0.07668701171875, 0.07712802886962891, 0.08061673736572265, 0.07801519775390625, 0.07893196868896485, 0.07872921752929687, 0.07862271881103515, 0.07846623992919922, 0.07809270477294922, 0.07871324920654296, 0.07898521423339844, 0.07931494140625, 0.07899929809570312, 0.07960784149169922, 0.07901321411132813, 0.0786825942993164, 0.07973289489746094, 0.07969519805908203, 0.08072694396972656, 0.07956479644775391, 0.07936921691894532, 0.07926681518554687, 0.07855923461914062, 0.07799791717529297, 0.07846659088134765, 0.07851888275146485, 0.0802734375, 0.07940300750732422, 0.07881318664550781, 0.07864230346679688, 0.07900045013427734, 0.07827974700927734, 0.07830828857421875, 0.07930630493164062, 0.07858134460449219, 0.07812710571289062, 0.07771017456054688, 0.07833372497558594, 0.07932463836669922, 0.07959331512451172, 0.07791715240478515, 0.07806124877929688, 0.07855545806884766, 0.0791756820678711, 0.07881318664550781, 0.07900160217285156, 0.07712153625488281, 0.07918109130859376, 0.0783674545288086, 0.07991283416748046, 0.07598445129394531, 0.07684143829345703, 0.07886835479736329, 0.07741862487792969, 0.07579657745361328, 0.07561830139160156, 0.07574323272705077, 0.07578419494628906, 0.07580435180664062, 0.07534214019775391, 0.07557939147949219, 0.07573299407958985, 0.0752451171875, 0.0753504638671875, 0.07558515167236328, 0.07620838165283203, 0.0778460464477539, 0.07608172607421874, 0.07611190032958984, 0.07537254333496093, 0.0755384292602539, 0.076548095703125, 0.0758292465209961, 0.07544972991943359, 0.0756578598022461, 0.0753623046875, 0.07525965118408204, 0.07547097778320312, 0.07541340637207031, 0.07554271697998047, 0.0757965087890625, 0.07587020874023437, 0.07601766204833985, 0.0764067840576172, 0.07674470520019532, 0.07613030242919921, 0.076548095703125, 0.0776800308227539, 0.0761882553100586, 0.0764659194946289, 0.07610377502441407, 0.07591529846191407, 0.07594802856445312, 0.07614035034179688, 0.07636614227294922, 0.07634902191162109, 0.07953858947753906, 0.0769201889038086, 0.07687641906738281, 0.07788543701171875, 0.07677337646484375, 0.07895442962646484, 0.07887872314453125, 0.07792639923095702, 0.07849574279785156, 0.07815577697753906, 0.07892991638183594, 0.0795791015625, 0.07985343933105468, 0.08058281707763672, 0.07910399627685546, 0.07905206298828125, 0.07965360260009766, 0.07909942626953125, 0.07872764587402344, 0.07910185241699219, 0.07967958068847657, 0.0791900177001953, 0.07892562866210938, 0.07787741088867188, 0.0785304946899414, 0.07829513549804687, 0.07909571075439453, 0.07860137939453125, 0.07814240264892579, 0.07955439758300781, 0.0784156494140625, 0.0781929931640625, 0.07882530975341796, 0.07802194976806641, 0.07881203460693359, 0.07914495849609375, 0.07934976196289062, 0.07855923461914062, 0.07930786895751953, 0.07973490905761718, 0.07785142517089844, 0.07847679901123047, 0.07735552215576172, 0.07810860443115235, 0.07840326690673828, 0.07995635223388672, 0.07858771514892578, 0.07851014709472656, 0.07953939056396485, 0.07967839813232422, 0.07948902130126953, 0.08034441375732422, 0.07960643005371094, 0.07952540588378906, 0.08076541137695313, 0.08036061096191406, 0.08126140594482421, 0.07969996643066406, 0.07895203399658203, 0.07890166473388673, 0.07956646728515625, 0.07932505798339844, 0.0780927963256836, 0.08044134521484375, 0.07775411224365235, 0.07710540771484375, 0.07682457733154296, 0.07714959716796875, 0.08023776245117188, 0.07978272247314454, 0.07799398040771484, 0.07802671813964844, 0.07791824340820312, 0.0780800018310547, 0.07784038543701172, 0.07783628845214843, 0.07751033782958984, 0.08031980895996094, 0.07813632202148438, 0.07863705444335937, 0.07621017456054688, 0.07702735900878906, 0.07598486328125, 0.07698841857910156, 0.0771296615600586, 0.07814559936523438, 0.07709033966064453, 0.0771732177734375, 0.07642928314208984, 0.07612560272216796, 0.07680419158935547, 0.0768927001953125, 0.07635763549804687, 0.07628492736816406, 0.07672115325927735, 0.08716201782226562, 0.07898812866210937, 0.07749836730957031, 0.07643651580810547, 0.07699958038330078, 0.07631827545166016, 0.07888658905029297, 0.07635231781005859, 0.0760995864868164, 0.07677747344970703, 0.07598019409179688, 0.07603011322021484, 0.07663046264648438, 0.07587385559082031, 0.07641305541992187, 0.07660371398925782, 0.07675462341308593, 0.0757410888671875, 0.07667139434814453, 0.07664617919921875, 0.07608470153808594, 0.07654271697998047, 0.0761364517211914, 0.07551590728759766, 0.07566336059570312, 0.07655423736572266, 0.07569990539550782, 0.07572102355957032, 0.07565280151367187, 0.07609580993652344, 0.07551763153076171, 0.07808236694335938, 0.07781289672851563, 0.07562531280517579, 0.07579443359375, 0.07574732971191406]",tokens/s,13.042281715668118,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,3203.125248,1350.434816,0.0,947.912704,879.697408,s,1,10.6864189453125,10.6864189453125,0.0,10.6864189453125,10.6864189453125,10.6864189453125,10.6864189453125,[10.6864189453125],,kWh,9.595316327918226e-05,1.0576686068796063e-05,3.0444191022022116e-05,0.00013697404037000044,,MB,3253.305344,1516.109824,0.0,1092.616192,1013.619712,s,10,0.7730811157226564,0.07730811157226564,0.0009299783582437096,0.07702891540527344,0.07791660232543945,0.07885359535217284,0.07960318977355957,"[0.07663017272949219, 0.07684194946289062, 0.07680137634277344, 0.07703600311279298, 0.07732262420654297, 0.07770838165283203, 0.07702182769775391, 0.07767088317871093, 0.07979058837890625, 0.07625730895996094]",tokens/s,3311.424827143757,kWh,2.2795989352860128e-06,2.513998901536754e-07,1.0028805766094072e-06,3.5338794020490955e-06,tokens/kWh,72441634.49707994,MB,3253.305344,1662.910464,0.0,1239.416832,1014.605312,s,10,47.7961435546875,4.77961435546875,0.014727232558166863,4.77805322265625,4.802131884765625,4.805575219726562,4.808329887695312,"[4.75874609375, 4.780072265625, 4.7816396484375, 4.7760341796875, 4.80136669921875, 4.77441015625, 4.7819189453125, 4.8090185546875, 4.76596484375, 4.76697216796875]",tokens/s,13.18097974325408,kWh,0.00014061680267887265,1.5510411306670315e-05,4.828982552418865e-05,0.00020441703950973165,tokens/kWh,308193.48597894533,,s,630,47.79013351440428,0.07585735478476874,0.000866323976655964,0.07562068939208985,0.07681070251464844,0.0774073486328125,0.07947453720092773,"[0.07515545654296875, 0.07549887847900391, 0.07570928192138672, 0.07520438385009766, 0.07522509002685547, 0.0754375991821289, 0.0749224624633789, 0.0754268798828125, 0.0755923843383789, 0.0751045150756836, 0.07583948516845704, 0.0754544677734375, 0.07560601806640625, 0.07648274993896484, 0.07603385925292969, 0.07549903869628906, 0.07523990631103515, 0.07488716888427735, 0.07488671875, 0.0754653091430664, 0.07594992065429687, 0.07543603515625, 0.07549337768554687, 0.07536415863037109, 0.07535430145263672, 0.07493590545654297, 0.07512310028076172, 0.07593494415283203, 0.07539766693115234, 0.0750182113647461, 0.07488902282714843, 0.0751231689453125, 0.07523868560791015, 0.07491862487792969, 0.07475814056396485, 0.07556841278076172, 0.07737369537353515, 0.07608367919921875, 0.07726473236083985, 0.07581724548339844, 0.07594960021972656, 0.07670841979980468, 0.07586998748779297, 0.07533366394042969, 0.07507555389404297, 0.07530252838134766, 0.07515487670898438, 0.07521932983398437, 0.07516815948486329, 0.0785841293334961, 0.07601277160644532, 0.07511923217773438, 0.07483187103271484, 0.07517798614501953, 0.07513401794433594, 0.07537350463867187, 0.0756566390991211, 0.07516738891601563, 0.0755447006225586, 0.07529961395263672, 0.07534732818603515, 0.07524620819091797, 0.0760791015625, 0.07495692443847657, 0.07487693023681641, 0.08015462493896484, 0.07583126068115234, 0.07578534698486328, 0.07514409637451172, 0.07513292694091797, 0.07534591674804687, 0.07559104156494141, 0.07519481658935546, 0.07541932678222656, 0.07537039947509766, 0.07638444519042968, 0.07609590148925781, 0.07624703979492188, 0.07635517120361328, 0.07612083435058593, 0.07590614318847656, 0.07537865447998048, 0.07523798370361329, 0.07515340423583984, 0.0754974365234375, 0.07545356750488282, 0.07542281341552734, 0.07564883422851562, 0.07558262634277343, 0.07566809844970702, 0.07561647796630859, 0.07566950225830078, 0.07560979461669921, 0.0755960922241211, 0.07513906860351563, 0.07511971282958985, 0.07505516815185546, 0.07517810821533204, 0.0750906524658203, 0.07512889862060547, 0.07517382049560548, 0.07500316619873047, 0.0753590087890625, 0.07546694183349609, 0.07508761596679688, 0.07868211364746094, 0.07646924591064454, 0.07647261047363281, 0.07568470764160157, 0.07661325073242188, 0.07736287689208984, 0.07588307189941407, 0.07559986877441406, 0.07563673400878906, 0.07544150543212891, 0.07617193603515625, 0.07836637115478516, 0.07614323425292968, 0.07592931365966797, 0.07865760040283203, 0.07648172760009765, 0.07584025573730468, 0.07623808288574219, 0.07577267456054687, 0.075757568359375, 0.07607315063476562, 0.07574073791503906, 0.07524384307861329, 0.07572268676757812, 0.07546240234375, 0.07503276824951172, 0.07579046630859375, 0.07729561614990234, 0.07504249572753906, 0.07539532470703125, 0.07624915313720704, 0.07498957061767578, 0.07489119720458984, 0.07546886444091797, 0.07590316772460938, 0.07538470458984375, 0.07590409851074219, 0.07642403411865234, 0.07668736267089844, 0.07606681823730468, 0.07651513671875, 0.07576799774169922, 0.07567155456542969, 0.07557484436035156, 0.07543196868896485, 0.07552979278564453, 0.07541635131835937, 0.07617120361328125, 0.07613862609863281, 0.07588044738769531, 0.0756490249633789, 0.07599513244628907, 0.07608934020996094, 0.07613030242919921, 0.07552227020263672, 0.07597853088378906, 0.07661567687988281, 0.07615673828125, 0.07547513580322265, 0.07512908935546875, 0.07532927703857421, 0.07649485015869141, 0.07662127685546875, 0.07823567962646484, 0.07617791748046875, 0.07618508911132812, 0.0789754867553711, 0.07643750762939452, 0.0759214096069336, 0.07540646362304687, 0.07552480316162109, 0.07546387481689452, 0.07514653015136719, 0.07521222686767579, 0.0756902084350586, 0.07890742492675781, 0.07538262176513671, 0.07521708679199218, 0.07568793487548828, 0.07612793731689453, 0.07561567687988281, 0.07512531280517579, 0.07503699493408203, 0.07565106964111327, 0.07491350555419922, 0.0752470703125, 0.075032958984375, 0.07511260986328125, 0.07500406646728516, 0.07490518188476562, 0.07509884643554687, 0.07593910217285156, 0.07521174621582032, 0.0750216293334961, 0.07512127685546875, 0.07497481536865234, 0.0750322265625, 0.07520518493652344, 0.07632281494140625, 0.07619379425048828, 0.07584767913818359, 0.07559782409667969, 0.07544172668457032, 0.07517174530029297, 0.075330078125, 0.0760606689453125, 0.07544012451171875, 0.07592720031738281, 0.07510368347167969, 0.07562969970703125, 0.07542470550537109, 0.07583625793457031, 0.0753031997680664, 0.07510704040527344, 0.07558860778808593, 0.07604656219482422, 0.07544400024414062, 0.07516118621826172, 0.07599696350097657, 0.07596406555175782, 0.07543901062011718, 0.07564096069335938, 0.07538236999511719, 0.07569852447509766, 0.07593164825439454, 0.0760626220703125, 0.07559990692138673, 0.07547910308837891, 0.07517705535888672, 0.07573596954345703, 0.07815167999267578, 0.07845887756347657, 0.07759065246582031, 0.07766000366210937, 0.07693119812011719, 0.07689206695556641, 0.07606626892089843, 0.07576246643066406, 0.07545958709716796, 0.07623347473144532, 0.0766197738647461, 0.0771973114013672, 0.07655833435058594, 0.07568508911132812, 0.07663638305664063, 0.07608579254150391, 0.07560323333740235, 0.07498957061767578, 0.07531110382080078, 0.07519641876220703, 0.0754977264404297, 0.07526374053955077, 0.07520441436767578, 0.07511027526855468, 0.07634156799316406, 0.07964466857910156, 0.07560192108154297, 0.07565721893310547, 0.07549504089355469, 0.07534982299804688, 0.0758583984375, 0.07585596466064454, 0.07620403289794922, 0.07949107360839844, 0.0768571548461914, 0.07574240112304688, 0.07592652893066407, 0.07681024169921875, 0.07637811279296874, 0.07626454162597657, 0.07630726623535156, 0.07928969573974609, 0.07581327819824218, 0.07615523529052734, 0.07533977508544921, 0.0754544677734375, 0.07570646667480468, 0.07555702209472656, 0.0761976318359375, 0.07560147094726563, 0.07528044891357422, 0.07634111785888673, 0.07558809661865235, 0.07573452758789062, 0.07580012512207031, 0.07691171264648437, 0.07685104370117188, 0.07653708648681641, 0.07563314819335938, 0.0756591033935547, 0.0761860122680664, 0.07690592193603515, 0.07636345672607422, 0.07943405151367187, 0.07649359893798828, 0.07672576141357422, 0.07658515167236328, 0.07710115051269531, 0.07576306915283203, 0.07581350708007813, 0.07551129913330078, 0.07549183654785156, 0.07591935729980469, 0.07563017272949218, 0.0761184310913086, 0.0799805450439453, 0.07575961303710937, 0.07536640167236328, 0.0757392349243164, 0.07605443572998047, 0.07562086486816406, 0.07630207824707032, 0.07641932678222656, 0.07553228759765625, 0.07517183685302735, 0.07548515319824219, 0.07533980560302735, 0.07537052917480469, 0.07494242858886718, 0.07511427307128907, 0.07574345397949218, 0.07529881286621094, 0.07510749053955078, 0.0758993911743164, 0.07579843139648437, 0.07683526611328124, 0.07667097473144531, 0.08137113952636718, 0.07589711761474609, 0.07525142669677734, 0.0753602523803711, 0.07528361511230469, 0.07520137786865234, 0.07538278198242188, 0.07553228759765625, 0.0753807373046875, 0.07551999664306641, 0.07526000213623046, 0.0761363525390625, 0.07572659301757813, 0.0753338851928711, 0.07602095794677734, 0.07543807983398437, 0.07545935821533203, 0.07544940948486328, 0.07515360260009765, 0.07517056274414062, 0.07531110382080078, 0.07601897430419922, 0.07600518035888672, 0.07557968139648437, 0.0755298843383789, 0.07528870391845703, 0.07615779113769532, 0.07526934051513672, 0.07500998687744141, 0.075170654296875, 0.07492998504638672, 0.07502483367919922, 0.07802371215820313, 0.07618211364746094, 0.07612019348144532, 0.07561174774169922, 0.07608982086181641, 0.07673017883300781, 0.07645196533203125, 0.07575312042236328, 0.075802978515625, 0.07554659271240234, 0.07523334503173829, 0.07721289825439454, 0.07628233337402343, 0.0755216293334961, 0.07615283203125, 0.07525990295410157, 0.07534544372558594, 0.07528822326660156, 0.07555769348144531, 0.07550678253173829, 0.07608003234863281, 0.07539868927001953, 0.0752171859741211, 0.07508956909179687, 0.07480147552490235, 0.0752580795288086, 0.07576780700683594, 0.07682252502441406, 0.07602790069580079, 0.07575977325439454, 0.07596543884277343, 0.07722480010986328, 0.07527584075927735, 0.07578166198730468, 0.0759869155883789, 0.07537350463867187, 0.07544012451171875, 0.0756836166381836, 0.07668348693847657, 0.0758187484741211, 0.07556531524658203, 0.07558739471435547, 0.07558367919921875, 0.0756019515991211, 0.07543772888183593, 0.07536982727050781, 0.07512937927246094, 0.07631712341308594, 0.07556690979003906, 0.07553993225097656, 0.07530774688720702, 0.07525785827636719, 0.07589641571044922, 0.07539958190917968, 0.07584358215332031, 0.07536166381835938, 0.07550016021728516, 0.07551129913330078, 0.07524403381347657, 0.07507305908203125, 0.075299072265625, 0.07666499328613281, 0.07695571136474609, 0.07651936340332031, 0.0790733413696289, 0.07594598388671875, 0.0768749771118164, 0.07668406677246094, 0.07624114990234375, 0.07719487762451172, 0.07621437072753906, 0.07548844909667969, 0.07536940765380859, 0.07612611389160157, 0.07618073272705078, 0.07545523071289062, 0.08013970947265625, 0.07548178863525391, 0.07621222686767579, 0.07675039672851562, 0.07647232055664062, 0.07707030487060547, 0.07760124969482422, 0.07672563171386719, 0.07705868530273438, 0.07711888122558594, 0.07743488311767578, 0.07674518585205078, 0.07770275115966797, 0.07828125, 0.0761256332397461, 0.07652214050292969, 0.07659305572509766, 0.07631235504150391, 0.07642892456054687, 0.0765544662475586, 0.07681484985351562, 0.0769056625366211, 0.07642144012451171, 0.07754380798339844, 0.07617330932617188, 0.07632691192626953, 0.07698665618896484, 0.07611363220214844, 0.07627728271484376, 0.076500732421875, 0.07635446166992188, 0.07623974609375, 0.07697446441650391, 0.07609769439697266, 0.0758460464477539, 0.07626892852783203, 0.07713597106933594, 0.07760265350341797, 0.07680182647705078, 0.07621024322509766, 0.07552700805664063, 0.07535139465332032, 0.07498137664794922, 0.07590294647216797, 0.07557190704345704, 0.07600297546386718, 0.07560800170898438, 0.07575804901123047, 0.07564691162109374, 0.07549132537841798, 0.07615673828125, 0.07588883209228516, 0.07543807983398437, 0.07538451385498048, 0.07820745849609376, 0.07569741058349609, 0.07528713226318359, 0.07524147033691406, 0.0770109405517578, 0.07661746978759766, 0.07546701049804687, 0.0752529296875, 0.07525801849365234, 0.07494723510742188, 0.07610726165771485, 0.07535308837890625, 0.07562156677246094, 0.07582137298583984, 0.07522755432128907, 0.0752159652709961, 0.07560899353027344, 0.07578828430175781, 0.07501398468017578, 0.07474748992919922, 0.07497923278808594, 0.07525433349609376, 0.07539926147460937, 0.07555276489257813, 0.07576371002197266, 0.07571660614013671, 0.07605657958984376, 0.07592700958251954, 0.07531779479980469, 0.07522300720214843, 0.07604557037353515, 0.07532621002197265, 0.0749742431640625, 0.07504589080810548, 0.07519026947021484, 0.07507305908203125, 0.07505353546142578, 0.07627490997314453, 0.07612854766845703, 0.07550806427001953, 0.07557545471191406, 0.07608262634277344, 0.07537500762939453, 0.07562051391601562, 0.07570636749267579, 0.075069091796875, 0.07507520294189453, 0.07717279815673828, 0.07576748657226562, 0.07574626922607422, 0.07535372924804687, 0.07560761260986328, 0.07540809631347656, 0.07576175689697266, 0.07601503753662109, 0.07514966583251953, 0.07537651062011719, 0.07574524688720703, 0.07524329376220704, 0.07486729431152343, 0.07544831848144531, 0.07614460754394531, 0.07645340728759766, 0.0797841567993164, 0.07608092498779297, 0.076185791015625, 0.07595040130615234, 0.07525955200195313, 0.0757454071044922, 0.07518844604492188, 0.07612416076660156, 0.07555001831054688, 0.07547564697265625, 0.07464028930664063, 0.07505516815185546, 0.07781378936767579, 0.07510652923583984, 0.07535782623291015, 0.07528431701660156, 0.07513238525390625, 0.07491667175292968, 0.07491993713378907, 0.07524540710449219, 0.07568399810791016, 0.07507520294189453, 0.07528873443603516, 0.07525167846679688, 0.07534822082519531, 0.0752168960571289, 0.07524489593505859, 0.074816162109375, 0.07542339324951172, 0.07544866943359375, 0.0751103973388672, 0.07482163238525391, 0.07523123168945313, 0.0764927978515625, 0.07563257598876953, 0.07523856353759766, 0.07515622711181641, 0.07501634979248047, 0.07540268707275391, 0.07512339019775391, 0.07505292510986328, 0.07564492797851563, 0.07686476898193359, 0.07596630096435547, 0.07576054382324218, 0.07602790069580079, 0.07589641571044922, 0.07586243438720704, 0.0757084197998047, 0.07593778991699218, 0.07555276489257813, 0.07524470520019531, 0.0751830062866211, 0.0752393569946289, 0.07542976379394531, 0.07590080261230468, 0.07665280151367188, 0.07656265258789062, 0.07663820648193359, 0.0775363540649414, 0.07643775939941407, 0.0756740493774414, 0.07585791778564453, 0.07520051574707032, 0.07522614288330078, 0.07524406433105468, 0.07540361785888672, 0.07773603057861328, 0.0763119659423828, 0.07641353607177734, 0.07540940856933594, 0.07702220916748047, 0.07623302459716796]",tokens/s,13.18263737032903,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,26318.917632,13989.96992,0.0,13587.447808,13583.186432,s,1,53.55781640625,53.55781640625,0.0,53.55781640625,53.55781640625,53.55781640625,53.55781640625,[53.55781640625],,kWh,0.0013516544748583177,0.0001490905013596786,0.0004399167408219995,0.0019406617170399957,,MB,1305.4976,14382.137344,0.0,13958.643712,13923.876352,s,10,1.7928385467529295,0.17928385467529298,0.0002154899050922263,0.17931748962402344,0.17947501831054688,0.1795636505126953,0.17963455627441405,"[0.1792658233642578, 0.17913427734375, 0.1791064910888672, 0.17882333374023437, 0.17939907836914062, 0.1792445831298828, 0.17936915588378907, 0.17938819885253907, 0.17965228271484374, 0.179455322265625]",tokens/s,1427.9032568975617,kWh,5.290528177604869e-06,5.834547435531527e-07,3.513961144500178e-06,9.3879440656582e-06,tokens/kWh,27269016.326638237,MB,1328.934912,14384.234496,0.0,13960.740864,13856.6528,s,10,88.1823955078125,8.81823955078125,0.02283885319505512,8.8186904296875,8.85125009765625,8.854098193359375,8.856376669921875,"[8.8506171875, 8.793611328125, 8.793921875, 8.7909521484375, 8.818333984375, 8.8359228515625, 8.7988037109375, 8.8242392578125, 8.819046875, 8.8569462890625]",tokens/s,7.144283123315529,kWh,0.0002600861288690591,2.868880108202818e-05,0.00013306753700949932,0.0004218424669605866,tokens/kWh,149344.85011412136,,s,630,88.18003881835936,0.13996831558469744,0.0012879793699532485,0.13974645996093749,0.14153371276855467,0.14221739196777342,0.14392456146240235,"[0.14127513122558594, 0.13967056274414064, 0.13997360229492187, 0.14067097473144533, 0.14057061767578125, 0.14096998596191407, 0.1401382751464844, 0.14124053955078125, 0.14005453491210937, 0.13992550659179687, 0.14106629943847657, 0.14006781005859376, 0.143361083984375, 0.14040873718261718, 0.14046995544433594, 0.13957356262207032, 0.13915267944335938, 0.14016342163085938, 0.1399478759765625, 0.13938531494140624, 0.1402464599609375, 0.1406553955078125, 0.14155964660644532, 0.1405849609375, 0.14022271728515626, 0.13952732849121094, 0.13892054748535157, 0.138756103515625, 0.13869056701660157, 0.13947494506835936, 0.1409815673828125, 0.14164799499511718, 0.14088847351074218, 0.14115213012695313, 0.14108291625976563, 0.1403750762939453, 0.14043437194824218, 0.14342112731933593, 0.1406262664794922, 0.141287109375, 0.1405135955810547, 0.14050006103515625, 0.1401005096435547, 0.14020806884765624, 0.14039865112304686, 0.14045388793945313, 0.14162940979003907, 0.1403572540283203, 0.14305635070800782, 0.1420557098388672, 0.14294444274902343, 0.14151455688476564, 0.1396676483154297, 0.14026754760742188, 0.1425166778564453, 0.1398062744140625, 0.1406591033935547, 0.1390281982421875, 0.13863331604003906, 0.1399707794189453, 0.14037586975097657, 0.13888067626953124, 0.13822137451171876, 0.14047637939453125, 0.13983021545410157, 0.1390751953125, 0.13967398071289064, 0.13867584228515625, 0.13964309692382812, 0.13910441589355468, 0.13956300354003906, 0.1391997833251953, 0.13955276489257812, 0.139004638671875, 0.13935411071777343, 0.13874330139160157, 0.13884873962402344, 0.13826666259765624, 0.1388400573730469, 0.13828300476074218, 0.1392168884277344, 0.13837312316894532, 0.14120515441894532, 0.1393456573486328, 0.14015866088867188, 0.13940010070800782, 0.14164781188964845, 0.1391063690185547, 0.1395274200439453, 0.1382623291015625, 0.1388143310546875, 0.13848774719238283, 0.14001683044433594, 0.13965107727050782, 0.1398916473388672, 0.14036309814453124, 0.13969065856933593, 0.1391964111328125, 0.14025318908691406, 0.14272825622558594, 0.13979539489746093, 0.1387376708984375, 0.14048870849609374, 0.13947453308105467, 0.13996275329589844, 0.14010368347167967, 0.1397855987548828, 0.1389595184326172, 0.14143283081054686, 0.13912640380859376, 0.13905520629882812, 0.13802726745605468, 0.1383302764892578, 0.13869772338867187, 0.1398874816894531, 0.13879295349121093, 0.14018150329589843, 0.1391306915283203, 0.13938076782226563, 0.1422068176269531, 0.13978175354003905, 0.13918074035644531, 0.13956915283203125, 0.1396961212158203, 0.14187519836425783, 0.14025039672851564, 0.14035968017578124, 0.13970182800292968, 0.13924806213378907, 0.13883970642089843, 0.13908412170410156, 0.1385648956298828, 0.13852925109863282, 0.13914665222167968, 0.1388552703857422, 0.13881344604492188, 0.13948005676269531, 0.14001654052734375, 0.14013157653808594, 0.1400036163330078, 0.13974771118164062, 0.13978028869628906, 0.13943923950195314, 0.1400984344482422, 0.14428131103515626, 0.14010012817382814, 0.13936819458007813, 0.13977296447753906, 0.13932546997070314, 0.13993260192871093, 0.13974444580078124, 0.14064108276367188, 0.14366105651855468, 0.14121145629882811, 0.14077561950683593, 0.14057839965820312, 0.14073078918457033, 0.14206361389160158, 0.14091673278808595, 0.1392762908935547, 0.13908377075195313, 0.13965869140625, 0.13854981994628907, 0.13990672302246093, 0.13924745178222656, 0.13914572143554688, 0.1386895751953125, 0.13872563171386718, 0.13860073852539062, 0.1385476531982422, 0.1388748779296875, 0.13829859924316407, 0.13863401794433594, 0.13841612243652343, 0.13824546813964844, 0.13946287536621094, 0.13895111083984374, 0.13903872680664062, 0.1411089324951172, 0.13923770141601563, 0.1394093475341797, 0.1390858917236328, 0.13876223754882813, 0.13957530212402344, 0.13830157470703125, 0.1390301513671875, 0.13912908935546875, 0.1397430419921875, 0.13802659606933593, 0.140400634765625, 0.139399169921875, 0.13814579772949218, 0.13857586669921876, 0.13985530090332032, 0.14815699768066407, 0.1386762237548828, 0.14002175903320313, 0.13905509948730468, 0.13933676147460938, 0.13886968994140625, 0.13963877868652344, 0.13938275146484375, 0.1393185272216797, 0.1404587860107422, 0.13868556213378908, 0.14046502685546874, 0.13847142028808593, 0.13834034729003905, 0.1392701416015625, 0.13982710266113282, 0.1399779815673828, 0.13940985107421874, 0.13937234497070314, 0.138361083984375, 0.13909231567382813, 0.13864346313476564, 0.13876223754882813, 0.138176513671875, 0.13919027709960938, 0.13861663818359374, 0.1385020751953125, 0.13803706359863283, 0.1382092742919922, 0.14174681091308594, 0.13972041320800782, 0.13965936279296876, 0.13956300354003906, 0.1384099884033203, 0.13932858276367188, 0.13827072143554686, 0.13934480285644532, 0.13895887756347655, 0.14128512573242188, 0.1397987823486328, 0.14030169677734375, 0.1397655029296875, 0.1396183319091797, 0.13940208435058593, 0.1391143341064453, 0.13828726196289062, 0.13940325927734376, 0.139208251953125, 0.13990956115722655, 0.1391226806640625, 0.13933296203613282, 0.1390451202392578, 0.13985833740234374, 0.1395465545654297, 0.1420793914794922, 0.1427599639892578, 0.14028659057617188, 0.13887910461425781, 0.14074281311035156, 0.13982925415039063, 0.1399493103027344, 0.1391848907470703, 0.14056243896484374, 0.13907562255859374, 0.1416763916015625, 0.14067315673828126, 0.13954867553710937, 0.14016415405273439, 0.13934915161132813, 0.13897296142578125, 0.1411092529296875, 0.14002700805664062, 0.13929682922363282, 0.13884425354003907, 0.13923606872558594, 0.13987942504882814, 0.13904281616210937, 0.1387999725341797, 0.13928054809570312, 0.14027171325683593, 0.1390016326904297, 0.13957273864746095, 0.139227783203125, 0.14021632385253907, 0.13872029113769532, 0.14080256652832032, 0.14050303649902343, 0.13985171508789063, 0.13932170104980468, 0.14044956970214845, 0.13995362854003907, 0.14013229370117186, 0.1395734100341797, 0.13943026733398436, 0.14021641540527344, 0.14095721435546876, 0.13898822021484375, 0.13936041259765625, 0.13892608642578125, 0.14154502868652344, 0.140517822265625, 0.14016717529296874, 0.13966744995117186, 0.13972627258300782, 0.13974520874023438, 0.13937420654296875, 0.1390676727294922, 0.13901078796386718, 0.1388605499267578, 0.139509765625, 0.14007296752929688, 0.13934707641601562, 0.14140473937988282, 0.1403469696044922, 0.14032969665527345, 0.1410149383544922, 0.14073251342773438, 0.14138983154296875, 0.14357635498046875, 0.14172979736328126, 0.14022285461425782, 0.14133212280273438, 0.13971900939941406, 0.1398389129638672, 0.14196742248535157, 0.1400491180419922, 0.13948924255371092, 0.1397737579345703, 0.14005043029785155, 0.14003996276855468, 0.14001174926757812, 0.1398489532470703, 0.14164044189453126, 0.14010786437988282, 0.14227212524414062, 0.140896484375, 0.14087379455566407, 0.14026138305664063, 0.1402037811279297, 0.14118873596191406, 0.14394227600097656, 0.140548095703125, 0.13988453674316406, 0.13963189697265624, 0.1401630096435547, 0.14008306884765626, 0.1406246337890625, 0.14005850219726562, 0.14048284912109374, 0.14265536499023437, 0.1413623046875, 0.14063923645019533, 0.14041497802734376, 0.14023475646972655, 0.13994557189941406, 0.1405312957763672, 0.14058554077148439, 0.13961651611328124, 0.1396285400390625, 0.14004953002929688, 0.1395569610595703, 0.13952899169921876, 0.13962176513671876, 0.14071369934082031, 0.14167510986328125, 0.14205778503417968, 0.14080419921875, 0.14023260498046874, 0.13913906860351563, 0.14242201232910157, 0.13997669982910158, 0.140769287109375, 0.1397022705078125, 0.13936639404296874, 0.1387315216064453, 0.13951795959472657, 0.1395155792236328, 0.14025759887695313, 0.1392815399169922, 0.13827334594726562, 0.1389790344238281, 0.13874237060546876, 0.1383096923828125, 0.13788563537597656, 0.13955938720703126, 0.1386270751953125, 0.13834854125976562, 0.13994131469726562, 0.1380494689941406, 0.13853945922851563, 0.1377978210449219, 0.13910336303710938, 0.13948728942871094, 0.1388408966064453, 0.14245274353027343, 0.13888681030273436, 0.1386130828857422, 0.13886979675292968, 0.14075990295410157, 0.13916905212402345, 0.13916860961914063, 0.14025523376464843, 0.13974298095703125, 0.1388891143798828, 0.1399340515136719, 0.14019366455078125, 0.1385841979980469, 0.1390603790283203, 0.1382586212158203, 0.1387189178466797, 0.1507911376953125, 0.13932710266113282, 0.13843084716796875, 0.1390645751953125, 0.13791459655761718, 0.13847196960449218, 0.13789324951171875, 0.13839013671875, 0.13782546997070313, 0.13826080322265624, 0.13912025451660157, 0.14199848937988283, 0.13893814086914064, 0.14235621643066407, 0.13955168151855468, 0.13933485412597657, 0.13947775268554688, 0.14004844665527344, 0.1397514190673828, 0.13975523376464843, 0.13873289489746093, 0.13937759399414062, 0.13846710205078125, 0.13922297668457032, 0.13970870971679689, 0.14024295043945312, 0.13976931762695313, 0.14000358581542968, 0.13941548156738282, 0.1425636749267578, 0.1411952667236328, 0.14153245544433593, 0.14099325561523438, 0.14125418090820313, 0.13981462097167968, 0.14025190734863283, 0.13947698974609374, 0.14099058532714845, 0.1408942108154297, 0.14030233764648437, 0.140115966796875, 0.139774169921875, 0.14051715087890626, 0.13920768737792968, 0.13972119140625, 0.13954051208496093, 0.13993142700195313, 0.1400224914550781, 0.14001766967773438, 0.13929881286621093, 0.1395056610107422, 0.1386658935546875, 0.13936422729492187, 0.13924703979492187, 0.13927830505371094, 0.14177772521972656, 0.13922714233398437, 0.13862860107421876, 0.13899388122558592, 0.13911477661132812, 0.13987840270996094, 0.14047555541992188, 0.1420358123779297, 0.13994598388671875, 0.14052908325195312, 0.14471449279785156, 0.14157734680175782, 0.1418797149658203, 0.1400358428955078, 0.1396529541015625, 0.14388275146484375, 0.14033734130859374, 0.14007296752929688, 0.1399869384765625, 0.13925990295410157, 0.13949261474609376, 0.13980441284179687, 0.14002642822265626, 0.13980511474609375, 0.1391513671875, 0.14250550842285156, 0.1397908172607422, 0.14093081665039062, 0.13946083068847656, 0.13902572631835938, 0.13929341125488282, 0.140420166015625, 0.13968888854980469, 0.13942579650878906, 0.13956915283203125, 0.13960601806640624, 0.140340576171875, 0.13936502075195312, 0.1391841278076172, 0.13895680236816407, 0.13926809692382813, 0.1399102325439453, 0.1395987548828125, 0.1397487030029297, 0.14121849060058594, 0.14201242065429687, 0.14043341064453124, 0.13997789001464844, 0.14048342895507812, 0.13985296630859376, 0.139455322265625, 0.1394910125732422, 0.13965335083007813, 0.14049232482910157, 0.13944479370117188, 0.13941285705566406, 0.14014323425292968, 0.13907763671875, 0.13885545349121095, 0.1391664276123047, 0.13848602294921875, 0.13876412963867188, 0.14300294494628907, 0.1401859588623047, 0.13952386474609374, 0.13934664916992187, 0.1391017303466797, 0.1392539825439453, 0.1394520263671875, 0.13911013793945312, 0.14222604370117187, 0.14005264282226562, 0.13955462646484376, 0.14037635803222656, 0.13883981323242187, 0.13978866577148438, 0.13896675109863282, 0.14088156127929688, 0.13994239807128905, 0.13914437866210938, 0.14393954467773437, 0.1401189727783203, 0.14014204406738281, 0.13990342712402343, 0.13964277648925782, 0.14280508422851562, 0.14624972534179687, 0.14144284057617187, 0.14008341979980468, 0.13993910217285158, 0.14006550598144532, 0.13953814697265626, 0.13969602966308595, 0.13965965270996095, 0.1392865295410156, 0.1394687957763672, 0.13929472351074218, 0.13918617248535156, 0.13942530822753907, 0.13914775085449219, 0.13976374816894532, 0.14012985229492186, 0.1397989501953125, 0.13911654663085937, 0.13908787536621095, 0.13930604553222656, 0.13936038208007812, 0.1392643585205078, 0.13944166564941407, 0.13877900695800782, 0.1399053497314453, 0.13891583251953124, 0.13884967041015625, 0.14346713256835938, 0.14046147155761718, 0.14030812072753907, 0.13930592346191406, 0.1389240264892578, 0.13894210815429686, 0.13861923217773436, 0.13855743408203125, 0.14010552978515625, 0.14129580688476562, 0.13984892272949218, 0.14027810668945312, 0.14065065002441407, 0.14046432495117187, 0.1411068115234375, 0.14104380798339844, 0.14200051879882813, 0.14071603393554688, 0.14373039245605468, 0.14141468811035157, 0.14147378540039063, 0.141082275390625, 0.14109117126464843, 0.14178233337402343, 0.141503173828125, 0.14137667846679688, 0.14176918029785157, 0.1437162628173828, 0.1412960968017578, 0.1406033935546875, 0.14064988708496093, 0.1403213806152344, 0.14114816284179688, 0.14179737854003907, 0.14388787841796874, 0.1417789764404297, 0.141822265625, 0.14192860412597658, 0.14016204833984375, 0.1399833221435547, 0.14031021118164064, 0.13953724670410156, 0.1419562225341797, 0.14046298217773437, 0.13977804565429688, 0.13947494506835936, 0.1396712646484375, 0.1400314178466797, 0.13941641235351562, 0.13953433227539064, 0.13908992004394533, 0.1392431945800781, 0.14029379272460937, 0.14015267944335938, 0.1430597381591797, 0.13988662719726563, 0.13952365112304688, 0.13895106506347657]",tokens/s,7.144474060594673,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,14385.012736,10142.810112,0.0,9747.562496,9611.730944,s,1,34.31497265625,34.31497265625,0.0,34.31497265625,34.31497265625,34.31497265625,34.31497265625,[34.31497265625],,kWh,0.0007788282409832997,8.590274566197604e-05,0.00029462440236599496,0.0011593553890112706,,MB,4722.757632,10507.71456,0.0,10091.495424,9989.953536,s,10,1.2657408599853515,0.12657408599853515,0.0014093910157059629,0.12633460998535156,0.12765541381835938,0.12900365600585936,0.13008224975585939,"[0.12648461151123047, 0.1268326110839844, 0.12621836853027343, 0.12543926239013672, 0.12524419403076173, 0.12735580444335937, 0.1264508514404297, 0.1259213409423828, 0.13035189819335938, 0.12544191741943359]",tokens/s,2022.5308994367356,kWh,3.729289157172676e-06,4.1116035866514746e-07,2.4903958038480642e-06,6.630845319685888e-06,tokens/kWh,38607445.60576283,MB,4722.757632,10509.811712,0.0,10093.592576,9989.956096,s,10,76.04738525390624,7.6047385253906254,0.028115135723231145,7.601865478515625,7.63665849609375,7.638342919921875,7.639690458984375,"[7.6362841796875, 7.64002734375, 7.606771484375, 7.59695947265625, 7.629376953125, 7.632111328125, 7.59613720703125, 7.57292041015625, 7.584455078125, 7.552341796875]",tokens/s,8.28430849918853,kWh,0.00022130831102241136,2.4411459785056264e-05,0.00011673431068695224,0.00036245408149441984,tokens/kWh,173815.12091199867,,s,630,76.04411956024167,0.12070495168292335,0.0012465179445850338,0.12048220825195313,0.12197726821899414,0.12290295524597168,0.1254197380065918,"[0.12093644714355468, 0.12052684783935547, 0.12114031982421875, 0.12114559936523438, 0.12024076843261719, 0.12042652893066406, 0.12247039794921875, 0.12105907440185547, 0.11997347259521485, 0.1235728988647461, 0.12073308563232422, 0.12119087982177734, 0.120115234375, 0.12042652893066406, 0.12082134246826172, 0.12290505981445313, 0.12082717132568359, 0.12087747192382813, 0.1215511703491211, 0.12336518096923828, 0.12130732727050782, 0.12086624145507813, 0.11998675537109375, 0.12129203033447265, 0.121053955078125, 0.12080947113037109, 0.12175052642822265, 0.12255538940429687, 0.12162627410888673, 0.1309876403808594, 0.11998486328125, 0.12093350219726562, 0.12089024353027343, 0.12006195068359375, 0.12007628631591796, 0.12049215698242187, 0.1212326431274414, 0.11952710723876953, 0.11978777313232422, 0.12054946899414062, 0.12071587371826172, 0.11957453155517578, 0.11993292999267578, 0.12310918426513671, 0.1218309097290039, 0.11992339324951172, 0.12047769927978516, 0.11982201385498047, 0.12151430511474609, 0.12066815948486329, 0.12031807708740234, 0.12014374542236328, 0.12162662506103515, 0.12161148834228516, 0.12151888275146484, 0.12299049377441407, 0.12170454406738282, 0.12071513366699219, 0.12131507110595703, 0.12122354888916016, 0.12232498931884765, 0.12214832305908203, 0.1206747817993164, 0.12229942321777344, 0.12165753936767579, 0.12072425842285156, 0.12099174499511718, 0.12197682952880859, 0.1211883544921875, 0.12135833740234375, 0.12080947113037109, 0.12072774505615234, 0.12190636444091797, 0.1197492446899414, 0.11997388458251954, 0.12029132843017579, 0.12101773071289063, 0.12034304046630859, 0.12006790161132813, 0.12008806610107423, 0.12207801818847656, 0.12089055633544922, 0.11990220642089844, 0.12062777709960938, 0.12097357177734375, 0.12062265777587891, 0.1205498275756836, 0.12089344024658204, 0.12103065490722656, 0.12200287628173828, 0.12112694549560547, 0.12624444580078126, 0.1206827163696289, 0.12104691314697266, 0.12128342437744141, 0.1218720932006836, 0.1232259521484375, 0.12245152282714844, 0.12116214752197266, 0.12153699493408203, 0.12116928100585937, 0.12054950714111329, 0.12096326446533204, 0.12128050994873046, 0.12099721527099609, 0.12186726379394532, 0.1216669464111328, 0.12133232116699219, 0.1215283203125, 0.12116377258300781, 0.12213862609863281, 0.12441347503662109, 0.12052297973632813, 0.12073395538330078, 0.1220315170288086, 0.1205759048461914, 0.12130374145507812, 0.12244739532470703, 0.12072128295898438, 0.11985497283935546, 0.1197903060913086, 0.1198919677734375, 0.12131737518310547, 0.1203828125, 0.12005996704101562, 0.12562902069091797, 0.12049651336669921, 0.120174560546875, 0.11925299072265624, 0.11983872222900391, 0.12050227355957031, 0.1197768325805664, 0.12025286102294921, 0.1210871353149414, 0.12058029174804688, 0.12023875427246093, 0.12007958221435547, 0.12050307464599609, 0.12152543640136719, 0.12071609497070312, 0.12092415618896485, 0.12148294067382813, 0.12031622314453125, 0.11979679870605468, 0.11986220550537109, 0.1201270751953125, 0.12120105743408204, 0.11987916564941406, 0.11959552001953125, 0.12033971405029296, 0.1204245147705078, 0.12007891082763672, 0.12032627105712891, 0.1209692153930664, 0.120774658203125, 0.12033843231201172, 0.1203076171875, 0.12059043121337891, 0.12049635314941406, 0.11988047790527344, 0.12030668640136719, 0.12090179443359375, 0.12027008056640626, 0.12090633392333984, 0.11986739349365234, 0.1207537612915039, 0.12154716491699219, 0.12094054412841797, 0.12024323272705079, 0.12014281463623047, 0.11995257568359376, 0.12143494415283203, 0.12211952209472657, 0.12065821075439453, 0.12162652587890625, 0.12019145965576172, 0.12042854309082031, 0.12349849700927734, 0.13049568176269533, 0.12055225372314453, 0.12093849945068359, 0.12003533172607422, 0.12061414337158204, 0.12140825653076172, 0.12044422149658203, 0.12101087951660157, 0.12060610961914063, 0.12011580657958984, 0.12172227478027343, 0.12047154998779297, 0.12102877044677735, 0.12364179229736329, 0.12290038299560548, 0.11981414031982422, 0.12044083404541016, 0.12103065490722656, 0.11961737823486328, 0.12001910400390625, 0.1198878402709961, 0.12020089721679687, 0.11991410827636718, 0.11956707000732422, 0.12073369598388672, 0.11995462036132812, 0.12044576263427734, 0.12099174499511718, 0.12387942504882812, 0.11965030670166016, 0.12062483215332032, 0.12080944061279297, 0.11953997039794922, 0.12006819152832031, 0.12116172790527344, 0.11973632049560547, 0.120700927734375, 0.12094185638427735, 0.12071298980712891, 0.12090054321289062, 0.120166015625, 0.1196732177734375, 0.12081472015380859, 0.12027954864501954, 0.12056166076660156, 0.121162109375, 0.1203133773803711, 0.12025494384765625, 0.12021273803710937, 0.12043341064453125, 0.12041385650634766, 0.12217708587646485, 0.12042230224609375, 0.12081446075439453, 0.1203220443725586, 0.12011724853515625, 0.11988719940185547, 0.11987010955810547, 0.12074320220947266, 0.12102524566650391, 0.1207391357421875, 0.12014598083496093, 0.12039759826660157, 0.12019907379150391, 0.11980076599121094, 0.11972402954101563, 0.12099174499511718, 0.12059974670410156, 0.12116255950927735, 0.12072665405273438, 0.12122364807128906, 0.12068495941162109, 0.120340576171875, 0.12084825897216797, 0.12175769805908203, 0.12599501037597657, 0.12045696258544922, 0.12127462768554688, 0.12070057678222657, 0.12022767639160156, 0.12110694122314453, 0.12465766143798829, 0.12062252807617188, 0.11955462646484374, 0.11983599853515625, 0.11924342346191406, 0.12044083404541016, 0.12002304077148437, 0.12080329895019531, 0.12011280059814453, 0.11973875427246093, 0.12054118347167969, 0.12097740936279297, 0.12075635528564453, 0.12074518585205078, 0.1207384033203125, 0.12399343872070312, 0.12022978973388672, 0.12038739013671874, 0.1201042251586914, 0.1210667495727539, 0.12024179077148438, 0.12044537353515625, 0.12058870697021484, 0.11973744201660157, 0.11992787170410156, 0.12121398162841797, 0.12085327911376953, 0.12023955535888672, 0.12321238708496093, 0.12340025329589843, 0.1209151382446289, 0.12047824096679688, 0.12052851104736328, 0.11967139434814453, 0.12126822662353516, 0.12048588562011718, 0.12127027130126954, 0.12112019348144532, 0.1218414077758789, 0.12148191833496094, 0.12422713470458985, 0.11996598052978516, 0.12059677124023438, 0.12057379150390625, 0.12152028656005859, 0.12085993957519531, 0.12108258819580078, 0.12210176086425781, 0.12171453094482422, 0.12121875, 0.12198121643066406, 0.12071488189697266, 0.1223724136352539, 0.1217702407836914, 0.12202188873291016, 0.1212968978881836, 0.12316252899169922, 0.12113520050048829, 0.12116361236572265, 0.12188790130615235, 0.12252422332763672, 0.12188422393798828, 0.1215447006225586, 0.12178931427001953, 0.12183334350585938, 0.12167167663574219, 0.12115334320068359, 0.12154646301269531, 0.1224803237915039, 0.12275804901123047, 0.12201967620849609, 0.12106768035888672, 0.12126822662353516, 0.12120188903808594, 0.12188671875, 0.12310546875, 0.12288060760498047, 0.1220322265625, 0.1215282211303711, 0.11992195129394531, 0.12068889617919921, 0.12622412872314454, 0.12080611419677735, 0.12105532836914062, 0.11988572692871094, 0.12245536041259765, 0.12089965057373046, 0.12023462677001953, 0.11957987213134766, 0.11952381134033203, 0.12026911926269532, 0.12054729461669922, 0.12026882934570313, 0.12095699310302735, 0.11983385467529296, 0.12065190124511718, 0.12455174255371093, 0.12037939453125, 0.12068867492675782, 0.11992668914794923, 0.12038150024414063, 0.12153849792480469, 0.12094246673583985, 0.12005315399169922, 0.11988166046142579, 0.11942739105224609, 0.11968895721435546, 0.11972073364257813, 0.11997337341308593, 0.11971379089355469, 0.12490735626220703, 0.12194064331054688, 0.12115570831298828, 0.12002623748779297, 0.11907968139648438, 0.11986032104492188, 0.12082579040527344, 0.12014886474609375, 0.11965245056152343, 0.12067635345458984, 0.12082479858398437, 0.1200755844116211, 0.1204804458618164, 0.12036243438720703, 0.12018335723876954, 0.12026265716552734, 0.12046246337890625, 0.12080422210693359, 0.11998003387451171, 0.11966464233398437, 0.12013481903076172, 0.11999318695068359, 0.12061238098144532, 0.12016278076171875, 0.12100137329101562, 0.12034518432617188, 0.12040953826904296, 0.11986492919921875, 0.11989500427246094, 0.12054937744140624, 0.12044601440429688, 0.120531005859375, 0.12054003143310547, 0.12138233947753906, 0.12076099395751953, 0.12067945861816406, 0.12031254577636719, 0.11957263946533203, 0.1212907485961914, 0.12094054412841797, 0.12033952331542969, 0.12221126556396485, 0.12026470184326171, 0.12041206359863281, 0.12102870178222656, 0.11988956451416016, 0.11977378845214844, 0.1234593276977539, 0.1204735336303711, 0.12113516998291016, 0.12024012756347656, 0.12034877014160156, 0.12618128204345702, 0.12249702453613281, 0.12015577697753907, 0.12044940948486328, 0.12072140502929687, 0.12012973022460938, 0.1200508804321289, 0.1199130859375, 0.12012278747558594, 0.11981884765625, 0.12018013000488281, 0.12022831726074219, 0.11910566711425781, 0.11931648254394531, 0.12052051544189453, 0.12082809448242188, 0.12214284515380859, 0.1204796142578125, 0.1196723861694336, 0.12048397064208985, 0.1200513916015625, 0.12075382232666015, 0.12053276824951172, 0.12000550079345704, 0.11943526458740235, 0.11951308441162109, 0.11965977478027344, 0.11943807983398437, 0.11923455810546875, 0.12327350616455078, 0.11873654174804688, 0.1203776626586914, 0.12051634979248046, 0.11923273468017578, 0.11963938903808594, 0.12034825897216797, 0.11953033447265625, 0.11957987213134766, 0.1201725082397461, 0.11972281646728515, 0.12014726257324218, 0.12026032257080078, 0.11960540771484375, 0.12021331024169922, 0.11956880187988281, 0.11966320037841797, 0.11966464233398437, 0.11982157135009766, 0.11993164825439454, 0.11918131256103516, 0.12023193359375, 0.11950284576416016, 0.11984281921386719, 0.12008585357666016, 0.12006671905517578, 0.12037503814697266, 0.11967513275146484, 0.1200926742553711, 0.11982601928710937, 0.1196253433227539, 0.12016925048828125, 0.11935948944091797, 0.11961516571044922, 0.121653564453125, 0.12012525177001954, 0.12089539337158203, 0.11990854644775391, 0.11968931579589843, 0.1203220443725586, 0.12011929321289062, 0.12022169494628906, 0.12099993896484375, 0.12016435241699219, 0.12067839813232421, 0.12176112365722656, 0.11925981140136718, 0.12039373016357421, 0.12212838745117187, 0.12419276428222656, 0.12163276672363281, 0.12094464111328125, 0.12077568054199218, 0.12043135833740234, 0.12124556732177734, 0.11983712005615234, 0.1209288330078125, 0.120710205078125, 0.12094767761230468, 0.12081484985351562, 0.12153446197509765, 0.1206025619506836, 0.12058294677734376, 0.12111872100830078, 0.12083631896972656, 0.11980786895751953, 0.12003628540039063, 0.12058246612548829, 0.12110684967041016, 0.12276310729980469, 0.11899945831298828, 0.11971379089355469, 0.11993497467041016, 0.11981951904296875, 0.12069500732421876, 0.12100457763671875, 0.12158566284179688, 0.1207227554321289, 0.12036576080322266, 0.12053708648681641, 0.11975475311279297, 0.1196789779663086, 0.12126825714111328, 0.12005888366699219, 0.12034556579589843, 0.11950796508789062, 0.11958927917480469, 0.1199642562866211, 0.12013948822021485, 0.11955238342285156, 0.1195492172241211, 0.12046399688720703, 0.12069174194335938, 0.123478271484375, 0.11956502532958985, 0.11984889221191407, 0.12021526336669922, 0.1200327377319336, 0.11988057708740234, 0.12060671997070313, 0.12110578918457031, 0.11995750427246094, 0.12077120208740234, 0.12049612426757812, 0.12022169494628906, 0.12036656188964844, 0.11981059265136719, 0.1197875213623047, 0.12067142486572266, 0.12016518402099609, 0.12002098846435547, 0.11983439636230468, 0.11994882965087891, 0.12025926208496093, 0.11958067321777344, 0.11980140686035157, 0.12032044982910156, 0.12102098846435547, 0.12123529815673828, 0.11997325134277344, 0.12013958740234375, 0.11978781127929687, 0.11942169952392578, 0.12139622497558594, 0.12052703857421875, 0.12007266998291016, 0.12022966766357422, 0.11962947082519532, 0.12013654327392578, 0.1246740493774414, 0.120610595703125, 0.11959318542480468, 0.11972342681884765, 0.12091043090820312, 0.12011910247802735, 0.1235027847290039, 0.11964415740966797, 0.11975475311279297, 0.1206839370727539, 0.11958934020996094, 0.120150146484375, 0.11989984130859375, 0.11979590606689453, 0.11912754821777344, 0.11915532684326172, 0.11973632049560547, 0.12029132843017579, 0.11915001678466797, 0.11998806762695312, 0.11948515319824218, 0.12000665283203125, 0.12285951995849609, 0.11932825469970704, 0.11902355194091797, 0.11969522857666015, 0.1187212142944336, 0.1191357421875, 0.11905455780029296, 0.11926585388183594, 0.11857686614990234, 0.11894364929199219, 0.11845407867431641, 0.11928572845458985, 0.11911404418945312, 0.1216880645751953, 0.11921612548828125, 0.11875260925292969, 0.11916710662841797, 0.11868217468261719, 0.11807536315917969, 0.11918950653076171, 0.11968704223632813, 0.11968482971191406, 0.11958512115478516, 0.11995961761474609, 0.11961740875244141, 0.1197733154296875, 0.11948607635498047, 0.1198329620361328, 0.11899494171142579]",tokens/s,8.284664266523826,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,7886.938112,4726.849536,0.0,4324.327424,4324.229632,s,1,19.79137890625,19.79137890625,0.0,19.79137890625,19.79137890625,19.79137890625,19.79137890625,[19.79137890625],,kWh,0.0003650106653957664,4.025606318194709e-05,0.0001188723173199957,0.0005241390458977092,,MB,1703.698432,5058.199552,0.0,4634.70592,4579.227136,s,10,0.4585664939880371,0.04585664939880372,0.0002911707032246318,0.0457652473449707,0.04594910049438477,0.04633104705810547,0.046636604309082036,"[0.046712993621826175, 0.04579827117919922, 0.045708831787109376, 0.04569247817993164, 0.04582495880126953, 0.04586422348022461, 0.045814849853515624, 0.04573222351074219, 0.04572060775756836, 0.04569705581665039]",tokens/s,5582.614590386501,kWh,1.3879453434043173e-06,1.5306503708340506e-07,9.17829854853176e-07,2.4588402353408986e-06,tokens/kWh,104114125.15563771,MB,1707.769856,5058.199552,0.0,4634.70592,4519.089152,s,10,23.733406982421876,2.3733406982421874,0.01144744282825978,2.3749954833984375,2.3837395263671874,2.389226208496094,2.393615554199219,"[2.394712890625, 2.380418701171875, 2.36219775390625, 2.3627783203125, 2.370325927734375, 2.3796650390625, 2.380753662109375, 2.363517822265625, 2.382520263671875, 2.3565166015625]",tokens/s,26.544861446424818,kWh,6.998858802576803e-05,7.719084135744982e-06,3.663747796714828e-05,0.00011434515012866128,tokens/kWh,550963.463943266,,s,630,23.73155454254148,0.037669134194510324,0.0005000768077473787,0.0375581111907959,0.03799884643554687,0.038282036781311035,0.039966547508239754,"[0.037789825439453126, 0.037754878997802735, 0.038003742218017576, 0.03761337661743164, 0.037674880981445315, 0.03800691223144531, 0.037621185302734374, 0.03775920104980469, 0.03775932693481445, 0.037806079864501956, 0.03792486572265625, 0.03817859268188477, 0.03816265487670899, 0.03768953704833984, 0.03784268951416016, 0.03791606521606445, 0.03799286270141602, 0.03774643325805664, 0.038866977691650394, 0.037769119262695314, 0.037806270599365234, 0.037859710693359375, 0.03763324737548828, 0.03776559829711914, 0.037830974578857424, 0.04101459121704101, 0.037959487915039065, 0.037825054168701175, 0.037798240661621095, 0.03812934494018555, 0.0380700798034668, 0.03804620742797851, 0.037976001739501955, 0.03790367889404297, 0.03781894302368164, 0.03784499359130859, 0.03982150268554688, 0.03881369781494141, 0.03828940963745117, 0.038199295043945314, 0.03822335815429687, 0.03798681640625, 0.03793123245239258, 0.03795769500732422, 0.03795523071289063, 0.037883968353271485, 0.03780176162719726, 0.037806304931640625, 0.037703968048095705, 0.03786428833007813, 0.03798908615112305, 0.03799612808227539, 0.03790496063232422, 0.03788800048828125, 0.037910526275634765, 0.03792486572265625, 0.037908065795898435, 0.03820995330810547, 0.03825600051879883, 0.03800899124145508, 0.03786956787109375, 0.03760790252685547, 0.03764944076538086, 0.03908313751220703, 0.03920659255981445, 0.038887649536132815, 0.038636161804199216, 0.03832627105712891, 0.03827302551269531, 0.03827110290527344, 0.03827289581298828, 0.037773311614990236, 0.037699073791503904, 0.03760575866699219, 0.03762550354003906, 0.037507553100585934, 0.03758489608764649, 0.038150142669677735, 0.037378047943115236, 0.03746582412719727, 0.03778499221801758, 0.03758086395263672, 0.03758777618408203, 0.03765891265869141, 0.037539264678955075, 0.03756675338745117, 0.037406494140625, 0.03748067092895508, 0.037666465759277346, 0.03764031982421875, 0.03764179229736328, 0.03791734313964844, 0.03813542556762695, 0.03762623977661133, 0.03762156677246094, 0.0374571533203125, 0.03746297454833984, 0.03747225570678711, 0.037378047943115236, 0.03746201705932617, 0.03746201705932617, 0.03752707290649414, 0.037574302673339846, 0.037525375366210936, 0.037753215789794924, 0.03746652984619141, 0.03749903869628906, 0.03760902404785156, 0.0376868782043457, 0.03751203155517578, 0.03759487915039063, 0.03735772705078125, 0.03748207855224609, 0.037519775390625, 0.037533790588378906, 0.03751900863647461, 0.037615230560302734, 0.03752214431762695, 0.03761062240600586, 0.04054924774169922, 0.03851996612548828, 0.03773302459716797, 0.03756051254272461, 0.03760332870483398, 0.03756851196289063, 0.03750502395629883, 0.03746815872192383, 0.037459968566894535, 0.037371009826660154, 0.03752374267578125, 0.037335647583007815, 0.03735504150390625, 0.03738262557983398, 0.037401630401611326, 0.037276641845703125, 0.03730022430419922, 0.03727769470214844, 0.03734640121459961, 0.03864873504638672, 0.037942943572998045, 0.039420257568359374, 0.03743539047241211, 0.037557758331298825, 0.03743084716796875, 0.03743596649169922, 0.037668895721435544, 0.037724254608154296, 0.037506847381591796, 0.03746403121948242, 0.03745257568359375, 0.03731222534179687, 0.03741811370849609, 0.03737689590454102, 0.03737955093383789, 0.03733126449584961, 0.0374417610168457, 0.03740777587890625, 0.03739068984985352, 0.037284481048583985, 0.03740671920776367, 0.03731884765625, 0.03733667373657226, 0.037279041290283206, 0.03748751831054688, 0.03738579177856445, 0.037470657348632815, 0.037375999450683595, 0.037478206634521484, 0.03735980987548828, 0.03739852905273437, 0.037512321472167966, 0.03738044738769531, 0.03735504150390625, 0.03759001541137695, 0.03742329788208008, 0.03789753723144531, 0.037427295684814454, 0.03732915115356445, 0.03731846237182617, 0.03724492645263672, 0.03728828811645508, 0.037572608947753904, 0.0380167350769043, 0.037649791717529295, 0.037371967315673826, 0.037464447021484375, 0.03738284683227539, 0.037451614379882814, 0.037513153076171875, 0.037954334259033204, 0.03778355026245117, 0.03744707107543945, 0.037566944122314455, 0.03746214294433594, 0.03768316650390625, 0.03817504119873047, 0.038081279754638674, 0.037711071014404296, 0.03752092742919922, 0.0375928955078125, 0.037396896362304685, 0.0372674560546875, 0.03837747192382813, 0.03746815872192383, 0.037306655883789064, 0.03717283248901367, 0.03735526275634766, 0.0372985610961914, 0.03739398574829102, 0.03731296157836914, 0.037254207611083986, 0.037311424255371095, 0.03724867248535156, 0.03732118225097656, 0.037848960876464846, 0.03864089584350586, 0.037423873901367186, 0.037384159088134764, 0.03732073593139648, 0.03724662399291992, 0.037156223297119144, 0.03736265563964844, 0.03755417633056641, 0.03729555130004883, 0.037294654846191405, 0.03728319931030273, 0.03732112121582031, 0.03786137771606445, 0.03738851165771485, 0.037394432067871096, 0.037248737335205076, 0.03727987289428711, 0.03729593658447266, 0.03729852676391601, 0.03751059341430664, 0.037335006713867185, 0.03731705474853515, 0.03715907287597656, 0.037230209350585936, 0.03732227325439453, 0.03728854370117188, 0.03727999877929687, 0.03733299255371094, 0.037302272796630856, 0.03728390502929688, 0.03731148910522461, 0.0372457275390625, 0.03965353775024414, 0.03774844741821289, 0.03756175994873047, 0.037555072784423826, 0.037770912170410155, 0.03787436676025391, 0.0379983024597168, 0.03781836700439453, 0.037449726104736326, 0.037461952209472654, 0.0378039665222168, 0.03748876953125, 0.03743030548095703, 0.037370849609375, 0.03753574371337891, 0.03744921493530273, 0.03746572875976562, 0.03737689590454102, 0.03743539047241211, 0.037353473663330077, 0.03747974395751953, 0.03742176055908203, 0.03746227264404297, 0.03778070449829102, 0.03755062484741211, 0.037425151824951174, 0.03741491317749023, 0.037564414978027344, 0.03779507064819336, 0.037481216430664065, 0.03761971282958984, 0.03741491317749023, 0.03785257720947265, 0.03750073623657227, 0.0377147216796875, 0.03744720077514648, 0.037433822631835936, 0.03749273681640625, 0.03755120086669922, 0.040616863250732424, 0.03790787124633789, 0.03777996826171875, 0.0377344970703125, 0.03759503936767578, 0.03758703994750977, 0.0375684814453125, 0.03754512023925781, 0.03759193420410156, 0.03754102325439453, 0.037499744415283205, 0.03756963348388672, 0.03751804733276367, 0.03749417495727539, 0.03747836685180664, 0.03786630249023438, 0.03773440170288086, 0.03758489608764649, 0.037537952423095704, 0.037513057708740236, 0.03755110549926758, 0.03753574371337891, 0.03761971282958984, 0.037602302551269534, 0.037580799102783204, 0.03745737457275391, 0.03762611389160156, 0.03758256149291992, 0.03760915374755859, 0.03756467056274414, 0.03772377777099609, 0.0426929931640625, 0.03747200012207031, 0.03744384002685547, 0.037463169097900394, 0.037360321044921874, 0.03755846405029297, 0.03741491317749023, 0.037384414672851564, 0.037484321594238285, 0.03769548797607422, 0.037765342712402346, 0.03794409561157226, 0.037482528686523436, 0.03753590393066406, 0.03785612869262695, 0.037777473449707034, 0.037623294830322264, 0.03783513641357422, 0.03802239990234375, 0.037900096893310545, 0.03790943908691406, 0.03823616027832031, 0.03776716613769531, 0.038019073486328124, 0.03796582412719727, 0.03798220825195313, 0.03833436965942383, 0.03789401626586914, 0.03784339141845703, 0.03779257583618164, 0.03773129653930664, 0.03777328109741211, 0.037777439117431644, 0.03768115234375, 0.03749273681640625, 0.03751731109619141, 0.03790028762817383, 0.03785318374633789, 0.03777740859985351, 0.03779379272460937, 0.03781625747680664, 0.03780614471435547, 0.03769705581665039, 0.03762633514404297, 0.037625438690185545, 0.037909950256347656, 0.03767516708374023, 0.03777824020385742, 0.03752345657348633, 0.03753574371337891, 0.03748454284667969, 0.037545089721679685, 0.03743628692626953, 0.03746201705932617, 0.03745548629760742, 0.037335487365722654, 0.03762102508544922, 0.037505535125732424, 0.03761711883544922, 0.037462688446044924, 0.037513248443603514, 0.03764681625366211, 0.04048486328125, 0.03788800048828125, 0.03790028762817383, 0.03805593490600586, 0.037548030853271484, 0.03756617736816406, 0.037571903228759765, 0.03740310287475586, 0.037716545104980466, 0.037695423126220706, 0.037736446380615234, 0.037599231719970705, 0.037692928314208986, 0.038789630889892575, 0.037666175842285154, 0.037563007354736326, 0.03759487915039063, 0.037505279541015624, 0.03755619049072265, 0.03778972625732422, 0.038662143707275394, 0.037754878997802735, 0.03922652816772461, 0.03771478271484375, 0.03760316848754883, 0.03754524612426758, 0.03768000030517578, 0.038817790985107424, 0.037574657440185545, 0.037701217651367185, 0.03743923187255859, 0.03758147048950195, 0.03732489776611328, 0.03743648147583008, 0.03752201461791992, 0.037504638671875, 0.03751119995117187, 0.03777407836914062, 0.0374920654296875, 0.037933216094970704, 0.037566814422607425, 0.03742310333251953, 0.037515262603759765, 0.037571998596191404, 0.03742115020751953, 0.03757881546020508, 0.03741331100463867, 0.03749683380126953, 0.04195894241333008, 0.03758729553222656, 0.03751948928833008, 0.03737782287597656, 0.03748067092895508, 0.03757033538818359, 0.03742307281494141, 0.037656833648681644, 0.037525505065917966, 0.03793305587768555, 0.0374557113647461, 0.037351585388183596, 0.0373770866394043, 0.037600193023681644, 0.03804694366455078, 0.03759958267211914, 0.037383743286132816, 0.03742572784423828, 0.03735993576049805, 0.03740262222290039, 0.037455680847167966, 0.037504833221435545, 0.03740095901489258, 0.037367809295654295, 0.03734841537475586, 0.03742406463623047, 0.03735744094848633, 0.03740390396118164, 0.03751308822631836, 0.0379156494140625, 0.037899871826171876, 0.03781264114379883, 0.03737011337280274, 0.0374126091003418, 0.03752159881591797, 0.03736966323852539, 0.03733411026000977, 0.037254047393798825, 0.03752755355834961, 0.03744588851928711, 0.037465408325195314, 0.03758486557006836, 0.03744406509399414, 0.03753779220581055, 0.03742057418823242, 0.03718201446533203, 0.03761539077758789, 0.037452129364013674, 0.037389217376708986, 0.03727561569213867, 0.03737692642211914, 0.03740403366088867, 0.03746995162963867, 0.03749977493286133, 0.03755417633056641, 0.037424415588378904, 0.037335777282714845, 0.03760537719726562, 0.037556224822998044, 0.037660446166992184, 0.037537857055664064, 0.037607391357421874, 0.037586433410644535, 0.03742790222167969, 0.03739833450317383, 0.03762198257446289, 0.03753763198852539, 0.03744166564941406, 0.037986305236816405, 0.037498878479003905, 0.03782428741455078, 0.037624031066894534, 0.03762979125976563, 0.03781753540039062, 0.03755926513671875, 0.0376360969543457, 0.03749635314941406, 0.037553985595703124, 0.0376162223815918, 0.03871289443969726, 0.039001953125, 0.03774140930175781, 0.037731136322021484, 0.03797702407836914, 0.03845939254760742, 0.03830361557006836, 0.03808063888549805, 0.03811865615844726, 0.03797603225708008, 0.03766675186157226, 0.03765744018554688, 0.03749641418457031, 0.03765827178955078, 0.037563137054443356, 0.03760083389282227, 0.037697982788085935, 0.03821686553955078, 0.03792351913452149, 0.037777568817138674, 0.03787776184082031, 0.03749184036254883, 0.03767299270629883, 0.03764105606079102, 0.037932769775390625, 0.03757699203491211, 0.03769343948364258, 0.03768940734863281, 0.03747628784179687, 0.03790028762817383, 0.03765167999267578, 0.038253345489501954, 0.03805184173583984, 0.038107425689697265, 0.03753091049194336, 0.03763180923461914, 0.037438079833984374, 0.037926559448242185, 0.03783430480957031, 0.03764713668823242, 0.0378719367980957, 0.04002579116821289, 0.03780918502807617, 0.03774563217163086, 0.03758899307250976, 0.037835872650146485, 0.03788880157470703, 0.03744371032714844, 0.03752755355834961, 0.03745587158203125, 0.03728736114501953, 0.037552703857421876, 0.03750003051757812, 0.03752553558349609, 0.03751926422119141, 0.03735647964477539, 0.03756630325317383, 0.0376833610534668, 0.03758899307250976, 0.03843628692626953, 0.03757833480834961, 0.03758787155151367, 0.037269153594970704, 0.037368160247802734, 0.037330623626708984, 0.03747257614135742, 0.03736134338378906, 0.03728620910644531, 0.03724697494506836, 0.037383872985839846, 0.03735561752319336, 0.03757078552246094, 0.0379268798828125, 0.037326881408691406, 0.03753779220581055, 0.03724492645263672, 0.03730422210693359, 0.03725894546508789, 0.03751356887817383, 0.037254432678222656, 0.03756316757202149, 0.03758448028564453, 0.037480350494384765, 0.03734479904174805, 0.037555168151855466, 0.03749260711669922, 0.03742937469482422, 0.03721571350097656, 0.03722623825073242, 0.037292545318603515, 0.03718694305419922, 0.03715375900268555, 0.03776300811767578, 0.037326847076416016, 0.037244800567626954, 0.03727584075927735, 0.037246463775634765, 0.03725151824951172, 0.03765011215209961, 0.037593631744384765, 0.037580577850341794, 0.03747430419921875, 0.0372305908203125, 0.037384193420410154, 0.03732809448242187, 0.03730031967163086, 0.03737408065795898, 0.03725894546508789, 0.03757555389404297, 0.037619903564453126, 0.03739424133300781, 0.0375252799987793, 0.03786950302124024, 0.03730223846435547, 0.03736812973022461, 0.03724240112304687, 0.03727740859985352, 0.037563137054443356, 0.03727974319458008, 0.037404640197753906, 0.0374186897277832, 0.037284191131591794, 0.037404640197753906, 0.03736988830566406]",tokens/s,26.54693348767581,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,12193.357824,7099.84256,0.0,6704.594944,6690.791936,s,1,28.44133203125,28.44133203125,0.0,28.44133203125,28.44133203125,28.44133203125,28.44133203125,[28.44133203125],,kWh,0.0006235108196625006,6.877042901585516e-05,0.00020887405598800195,0.0009011553046663577,,MB,1414.0416,7313.752064,0.0,6897.532928,6816.50432,s,10,1.1857021408081054,0.11857021408081055,0.0010024997860044512,0.11849817657470704,0.1201054313659668,0.12013938941955567,0.12016655586242676,"[0.12009788513183593, 0.11803987121582031, 0.11808700561523437, 0.11830169677734376, 0.11748175811767578, 0.11901964569091797, 0.11683843231201171, 0.11869465637207031, 0.11896784210205077, 0.12017334747314454]",tokens/s,2159.058259146984,kWh,3.4711949982843576e-06,3.828096586059955e-07,2.307318839317635e-06,6.161323496207988e-06,tokens/kWh,41549514.5089454,MB,1429.864448,7320.04352,0.0,6903.824384,6816.50688,s,10,72.63889843749999,7.26388984375,0.015049420714715026,7.2661413574218745,7.280953271484375,7.285970288085937,7.289983901367187,"[7.2909873046875, 7.26429052734375, 7.27105419921875, 7.246630859375, 7.2679921875, 7.25025634765625, 7.2623486328125, 7.26813818359375, 7.27983837890625, 7.23736181640625]",tokens/s,8.67303901286533,kWh,0.00021200965658713236,2.3385078023517928e-05,9.511884243488237e-05,0.00033051357704553267,tokens/kWh,190612.44189469682,,s,630,72.6361952819824,0.11529554806663876,0.0010446708919572187,0.115090576171875,0.11633906249999999,0.11714389457702637,0.11896890357971192,"[0.11530003356933594, 0.11442550659179687, 0.11449600219726562, 0.11477782440185547, 0.11467190551757812, 0.11597782135009765, 0.11590493011474609, 0.1181503677368164, 0.1160010223388672, 0.11474934387207031, 0.11423603057861328, 0.11417788696289062, 0.1152425308227539, 0.11499724578857422, 0.11604441833496094, 0.12042675018310547, 0.11649612426757812, 0.11655500793457031, 0.11493247985839844, 0.11514998626708985, 0.11507798767089844, 0.11556893157958985, 0.11576290893554687, 0.11464508819580078, 0.11562179565429688, 0.11634483337402343, 0.11534073638916016, 0.11492617797851562, 0.11450511932373048, 0.11523772430419922, 0.11565200042724609, 0.11610710144042968, 0.11600873565673828, 0.11528262329101563, 0.11634473419189453, 0.11496662139892579, 0.11498291015625, 0.11481906890869141, 0.11617485046386719, 0.11482931518554687, 0.11630754852294922, 0.11550761413574219, 0.11574681854248046, 0.11531468963623047, 0.11623388671875, 0.12600972747802736, 0.11497894287109375, 0.11875052642822266, 0.11647567749023438, 0.11615718078613281, 0.11529625701904297, 0.11559321594238281, 0.11422720336914062, 0.11409548950195313, 0.11512464141845703, 0.11666044616699218, 0.11514262390136719, 0.1166060791015625, 0.11510633850097657, 0.11519219207763672, 0.11556454467773437, 0.11493689727783203, 0.11478102111816406, 0.11487741088867187, 0.11472191619873047, 0.11617779541015626, 0.11568742370605468, 0.11547647857666016, 0.11493785858154297, 0.11429248046875, 0.11434355163574218, 0.11523455810546875, 0.11492153930664062, 0.116283203125, 0.117570556640625, 0.11477922821044922, 0.11559152221679687, 0.11497119903564453, 0.11427635192871094, 0.11439513397216797, 0.11502793884277343, 0.11443756866455078, 0.11583958435058593, 0.11552931213378906, 0.114968994140625, 0.1166346206665039, 0.11602812957763672, 0.11464463806152343, 0.11552543640136718, 0.11564064025878906, 0.11542176055908203, 0.11569760131835938, 0.11571119689941406, 0.11521218872070313, 0.11535372924804688, 0.11471894073486329, 0.11477455902099609, 0.11447641754150391, 0.11445916748046875, 0.11548477172851562, 0.11646566772460938, 0.11871561431884765, 0.1153054428100586, 0.11541814422607422, 0.11486022186279297, 0.11465174102783203, 0.11518966674804687, 0.11540294647216796, 0.11669884490966796, 0.11518915557861328, 0.11437340545654297, 0.11584512329101562, 0.11483920288085937, 0.11444608306884765, 0.11628141021728515, 0.1153986587524414, 0.11510816192626953, 0.11628771209716797, 0.1155455322265625, 0.11538079833984374, 0.11531641387939454, 0.11446918487548828, 0.11434803009033204, 0.11413654327392578, 0.11474179077148437, 0.11548419189453125, 0.11519884490966797, 0.11557833862304688, 0.11564701080322265, 0.11837235260009765, 0.11480802917480469, 0.11720188903808594, 0.11519366455078126, 0.11558297729492187, 0.11497062683105469, 0.11751833343505859, 0.11452825927734375, 0.11466957092285156, 0.11468342590332031, 0.11784758758544922, 0.11503507232666016, 0.11471593475341797, 0.11548336029052735, 0.11580413055419922, 0.11510137939453124, 0.11875977325439453, 0.11483689880371094, 0.11418685150146485, 0.11474329376220703, 0.11525529479980469, 0.11536294555664063, 0.1157161636352539, 0.11509228515625, 0.11497401428222656, 0.11522860717773438, 0.11466194915771484, 0.11441490936279297, 0.11542617797851562, 0.11470162963867188, 0.11561235046386718, 0.1167227554321289, 0.11475244903564454, 0.11542063903808594, 0.11472541046142579, 0.11464704132080078, 0.11466751861572265, 0.11626290893554687, 0.11459693145751954, 0.11620652770996094, 0.11579801940917969, 0.11555987548828126, 0.11446044921875, 0.11532323455810548, 0.11487664031982422, 0.11470281219482421, 0.11524697875976563, 0.1159842529296875, 0.11503119659423829, 0.11449839782714843, 0.11491276550292968, 0.1147479019165039, 0.11485164642333984, 0.1145304946899414, 0.1149333724975586, 0.11499510192871094, 0.11596233367919923, 0.11518726348876954, 0.1189883804321289, 0.11527254486083985, 0.1148098907470703, 0.11480572509765626, 0.11504630279541016, 0.11403385925292969, 0.11465206146240234, 0.11511138916015624, 0.11513910675048829, 0.11455423736572265, 0.11495692443847656, 0.11450367736816407, 0.1146692123413086, 0.1152290267944336, 0.11548262023925782, 0.11578982543945313, 0.11466537475585938, 0.11506674957275391, 0.11436259460449219, 0.11447529602050781, 0.11413270568847657, 0.11427606201171875, 0.11414147186279297, 0.11510578918457032, 0.11496060943603516, 0.11427804565429688, 0.11533222198486329, 0.11493888092041016, 0.11666448211669922, 0.1141451187133789, 0.11750109100341796, 0.11460079956054688, 0.11538745880126954, 0.11566563415527344, 0.11498438262939453, 0.11390975952148437, 0.11476873779296876, 0.11435206604003906, 0.11637267303466797, 0.11480579376220704, 0.11611103820800782, 0.114781982421875, 0.11624838256835937, 0.11534937286376953, 0.11459954833984375, 0.11450777435302735, 0.11481804656982422, 0.11452227020263672, 0.1145588150024414, 0.11580006408691407, 0.11524710083007812, 0.1148436508178711, 0.11507679748535156, 0.1149948501586914, 0.11492607879638672, 0.1148416976928711, 0.11518572998046875, 0.11476150512695313, 0.11867568206787109, 0.11573801422119141, 0.11484630584716797, 0.11462451171875, 0.11594342041015625, 0.11438089752197265, 0.11330960083007813, 0.11523149108886718, 0.11573366546630859, 0.11542790222167969, 0.1154276123046875, 0.11474944305419922, 0.11482854461669922, 0.1148485107421875, 0.1144438705444336, 0.11714396667480469, 0.11630182647705078, 0.11567072296142578, 0.11788726043701171, 0.1154416961669922, 0.11462620544433594, 0.1146712646484375, 0.11444294738769531, 0.11452178955078125, 0.11503033447265625, 0.11633843231201171, 0.11616860961914062, 0.11966422271728516, 0.11536978912353515, 0.11477903747558593, 0.11474127960205079, 0.11468185424804687, 0.1150013427734375, 0.115797119140625, 0.11456355285644532, 0.11532546997070313, 0.11565052795410156, 0.11422447967529296, 0.1141537628173828, 0.1142188491821289, 0.11428294372558594, 0.11504000091552734, 0.11550508880615235, 0.11590892791748048, 0.11477401733398437, 0.11556422424316407, 0.11508921813964844, 0.114498046875, 0.1143746566772461, 0.11510784149169923, 0.11459359741210938, 0.11510006713867188, 0.11539788818359376, 0.11538220977783203, 0.11490972900390625, 0.11511113739013672, 0.11630016326904297, 0.11515542602539063, 0.11800704193115234, 0.1153617935180664, 0.11457357025146485, 0.11516681671142578, 0.11497465515136719, 0.11519599914550781, 0.11435887908935546, 0.11435846710205078, 0.11938211059570313, 0.11566851043701172, 0.11587948608398438, 0.11559414672851563, 0.11486412811279297, 0.11480604553222656, 0.1143057632446289, 0.11507939147949219, 0.11466294097900391, 0.11474969482421875, 0.11553507232666016, 0.11560755157470703, 0.11511273956298829, 0.11468800354003907, 0.1145789794921875, 0.11466185760498047, 0.11429682922363281, 0.11502496337890625, 0.11581468963623047, 0.11601372528076172, 0.11581849670410156, 0.11576694488525391, 0.11481126403808593, 0.1144258270263672, 0.11421491241455078, 0.11523686218261718, 0.11477401733398437, 0.11584921264648437, 0.11524915313720703, 0.11508943939208985, 0.1150893783569336, 0.11420374298095703, 0.11447388458251953, 0.11598847961425782, 0.11478813171386719, 0.11490531158447266, 0.11530025482177735, 0.11522665405273437, 0.11606204986572266, 0.11476195526123047, 0.1148326416015625, 0.11461504364013672, 0.11632809448242187, 0.115714111328125, 0.11517699432373046, 0.1149849624633789, 0.11546851348876953, 0.117772705078125, 0.11456086730957031, 0.11432726287841796, 0.11463491058349609, 0.11659715270996093, 0.115212158203125, 0.11681372833251953, 0.11538835144042969, 0.11449788665771485, 0.11504828643798828, 0.11428361511230468, 0.11424665832519532, 0.11443814086914063, 0.11428044891357422, 0.11459401702880859, 0.11599030303955078, 0.11507711791992188, 0.11469414520263672, 0.11491526031494141, 0.11366515350341796, 0.1146569595336914, 0.11406777954101563, 0.11620966339111329, 0.11529420471191407, 0.11569097900390625, 0.11585753631591797, 0.11516150665283204, 0.1166192626953125, 0.1149725112915039, 0.11506089782714844, 0.11773542022705077, 0.11578572845458984, 0.11456511688232422, 0.11670118713378906, 0.11466957092285156, 0.11408383941650391, 0.11431526184082032, 0.1150218276977539, 0.11792998504638671, 0.11526780700683593, 0.11665952301025391, 0.11611305236816406, 0.11479532623291015, 0.11514182281494141, 0.11478237152099609, 0.11476233673095704, 0.11493154907226563, 0.11522684478759766, 0.11521024322509765, 0.11600691223144531, 0.11580006408691407, 0.11502191925048828, 0.11544265747070312, 0.11473811340332031, 0.11489446258544922, 0.11488236999511718, 0.11540332794189453, 0.11580973052978516, 0.11656864166259766, 0.11531190490722656, 0.1154849624633789, 0.11474169921875, 0.11482726287841796, 0.1148231658935547, 0.1147894058227539, 0.11486502075195312, 0.11580384063720703, 0.11643331146240235, 0.11472035217285156, 0.11501200103759765, 0.114155517578125, 0.11429273223876953, 0.11445977783203125, 0.11524114990234376, 0.1150030746459961, 0.11604624176025391, 0.11539692687988282, 0.11523260498046875, 0.11465484619140626, 0.11455149078369141, 0.11431922912597656, 0.11480006408691407, 0.11523689270019531, 0.11590259552001952, 0.11552191925048828, 0.11892121887207031, 0.1152020492553711, 0.11444019317626954, 0.11510784149169923, 0.1150978240966797, 0.11479837036132813, 0.11547200012207032, 0.11530278778076172, 0.11479862213134766, 0.11516105651855468, 0.1146081314086914, 0.11409139251708984, 0.11437734222412109, 0.1147146224975586, 0.11444188690185547, 0.11503651428222657, 0.11565670776367187, 0.11582169342041015, 0.11448614501953125, 0.1142108154296875, 0.11457852935791016, 0.1143609619140625, 0.11462268829345704, 0.1162550048828125, 0.11574044799804688, 0.11515494537353516, 0.11520169830322266, 0.11512380981445312, 0.11839686584472656, 0.11520012664794922, 0.11944172668457032, 0.116129150390625, 0.1164967041015625, 0.11557548522949218, 0.1151488037109375, 0.11549247741699219, 0.11479689788818359, 0.11493379211425782, 0.1168436508178711, 0.11614297485351563, 0.11552092742919921, 0.11570175933837891, 0.11628173065185547, 0.11518531036376953, 0.11494588470458984, 0.11457814025878907, 0.11489405059814453, 0.11391776275634766, 0.11499187469482422, 0.1157245101928711, 0.11525325012207031, 0.1145323486328125, 0.11486617279052734, 0.11455693054199219, 0.11442546844482422, 0.11449609375, 0.11505372619628906, 0.11517523193359375, 0.11619574737548828, 0.117266845703125, 0.11549209594726563, 0.11487596893310546, 0.1147713623046875, 0.11475424194335937, 0.11454774475097657, 0.11512105560302735, 0.11725193786621094, 0.11566102600097657, 0.11614002990722656, 0.11481702423095703, 0.11505868530273437, 0.11491327667236328, 0.11489199829101562, 0.11566716766357422, 0.11633309173583985, 0.11559849548339844, 0.11927417755126953, 0.1157480926513672, 0.1157778549194336, 0.11411929321289062, 0.11422720336914062, 0.11543142700195312, 0.11563593292236328, 0.1164393310546875, 0.1168476791381836, 0.11697401428222656, 0.1173939208984375, 0.11492697906494141, 0.11486790466308594, 0.1149736328125, 0.11570585632324219, 0.11637344360351562, 0.11555232238769532, 0.115504638671875, 0.11520674896240235, 0.11470982360839843, 0.11443587493896484, 0.11461014556884766, 0.11538518524169922, 0.11500543975830078, 0.11642060852050781, 0.11636531066894532, 0.11544684600830078, 0.11583106994628906, 0.11501615905761718, 0.11484384155273437, 0.11452623748779298, 0.11461830139160156, 0.11531874847412109, 0.11593529510498046, 0.11575657653808594, 0.11562544250488281, 0.11472179412841797, 0.11470848083496094, 0.11797503662109375, 0.11549491119384765, 0.11850982666015625, 0.11613884735107421, 0.115880126953125, 0.11568370819091797, 0.11546249389648437, 0.11477811431884766, 0.11424671936035156, 0.11472172546386719, 0.11443225860595703, 0.11664653015136718, 0.1154703369140625, 0.11515058898925781, 0.11509171295166015, 0.11402265930175781, 0.11402384185791016, 0.11424348449707031, 0.11435842895507813, 0.11463459014892578, 0.11623673248291015, 0.11534130859375, 0.1161396484375, 0.1152311019897461, 0.11419843292236329, 0.11403478240966797, 0.11410636901855468, 0.11421900939941407, 0.11532028961181641, 0.11669558715820312, 0.115115234375, 0.11482323455810547, 0.11423567962646484, 0.11454428863525391, 0.11415017700195312, 0.11412226867675782, 0.11443001556396484, 0.11598070526123047, 0.1154378204345703, 0.11452995300292969, 0.11487996673583985, 0.11422579193115234, 0.11403798675537109, 0.11398633575439453, 0.11445145416259765, 0.1137242202758789, 0.11554563140869141, 0.11470111846923828, 0.11480608367919921, 0.11516336059570312, 0.11460025787353516, 0.11434188842773438, 0.11476787567138672, 0.11440537261962891, 0.11445043182373046, 0.11552745819091798, 0.11581257629394531, 0.1156136932373047, 0.11831008148193359, 0.11445088195800782, 0.11424806213378906, 0.1141739501953125, 0.11436844635009766, 0.11565267181396484, 0.11437641906738281, 0.11532681274414062, 0.11503593444824219, 0.11431587219238282, 0.1146921615600586, 0.11411670684814453, 0.11424460601806641, 0.11462928009033203, 0.11714380645751953]",tokens/s,8.673361779953705,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,,MB,18991.865856,9680.32256,0.0,9277.800448,9256.005632,s,1,39.85782421875,39.85782421875,0.0,39.85782421875,39.85782421875,39.85782421875,39.85782421875,[39.85782421875],,kWh,0.0009481576373791539,0.00010458169004561487,0.0003198433114299931,0.0013725826388547619,,MB,2170.888192,9862.774784,0.0,9439.281152,9413.454848,s,10,1.5709284667968748,0.1570928466796875,0.0009790167936061852,0.15696588897705077,0.15816301422119142,0.1585912956237793,0.1589339207458496,"[0.15806784057617188, 0.1570900421142578, 0.15802310180664061, 0.15901957702636718, 0.15660496520996095, 0.15569471740722657, 0.15580303955078126, 0.15690159606933593, 0.1570301818847656, 0.15669340515136718]",tokens/s,1629.6095297196077,kWh,4.593446408723938e-06,5.065726889906215e-07,3.0451760472500426e-06,8.1451951449646e-06,tokens/kWh,31429572.336061273,MB,2183.389184,9875.357696,0.0,9451.864064,9358.127616,s,10,97.74451855468752,9.77445185546875,0.03576339654222028,9.769865722656249,9.82290478515625,9.838889892578125,9.851677978515625,"[9.7633349609375, 9.854875, 9.7711552734375, 9.729509765625, 9.7692578125, 9.7418134765625, 9.74298046875, 9.781765625, 9.7704736328125, 9.8193525390625]",tokens/s,6.445374219604127,kWh,0.00028361039471169156,3.1283777025815956e-05,0.00013058426418955014,0.00044547843592705765,tokens/kWh,141420.98678445476,,s,630,97.74119371032717,0.15514475192115418,0.00171058807896431,0.1547769317626953,0.15677823638916016,0.15819857330322265,0.1623586624145508,"[0.15609423828125, 0.1544153289794922, 0.15576678466796876, 0.15642784118652345, 0.15412454223632813, 0.15515052795410156, 0.15404978942871095, 0.15397760009765624, 0.15437210083007813, 0.15553141784667968, 0.1556334991455078, 0.15565823364257814, 0.1545154266357422, 0.1553424072265625, 0.15497468566894532, 0.1554014434814453, 0.15563014221191407, 0.1579967041015625, 0.1552451171875, 0.15449229431152345, 0.15444851684570313, 0.15442086791992188, 0.1542718048095703, 0.15534521484375, 0.15501925659179688, 0.1549327392578125, 0.1606108093261719, 0.1559306182861328, 0.15473049926757812, 0.15523126220703126, 0.15525167846679688, 0.15393792724609376, 0.1537249298095703, 0.15608966064453125, 0.15415776062011718, 0.15463014221191407, 0.15497433471679686, 0.15474470520019531, 0.1539575653076172, 0.15457363891601564, 0.15461581420898438, 0.15542886352539062, 0.15413247680664063, 0.1539932098388672, 0.15387251281738282, 0.15813346862792968, 0.15363958740234376, 0.15400483703613282, 0.1547288055419922, 0.15464405822753907, 0.15420489501953125, 0.15531007385253906, 0.1533824920654297, 0.1576280975341797, 0.1555383605957031, 0.15425485229492186, 0.1539033966064453, 0.1540137023925781, 0.15403622436523437, 0.15468089294433593, 0.154482177734375, 0.15449798583984375, 0.15412841796875, 0.1543258819580078, 0.15407513427734376, 0.15429837036132812, 0.15558035278320312, 0.15482870483398437, 0.1553368682861328, 0.15894140625, 0.15472233581542968, 0.154702880859375, 0.15462847900390625, 0.15491104125976562, 0.15585910034179687, 0.1561636199951172, 0.15468374633789062, 0.15486054992675782, 0.15440797424316408, 0.15495164489746094, 0.15555532836914063, 0.15504435729980467, 0.15586679077148438, 0.15675018310546876, 0.15579750061035155, 0.15670045471191407, 0.15677871704101562, 0.15709593200683594, 0.15993008422851562, 0.16317674255371092, 0.16008192443847657, 0.1591742401123047, 0.1620320281982422, 0.1598323211669922, 0.158234375, 0.15730819702148438, 0.1585118408203125, 0.1569259490966797, 0.1563013458251953, 0.15519766235351562, 0.15561228942871094, 0.1566498565673828, 0.15628636169433593, 0.1562530517578125, 0.15743997192382814, 0.15673139953613283, 0.15732298278808593, 0.15534109497070311, 0.1554750061035156, 0.1548297576904297, 0.155182373046875, 0.15492169189453125, 0.1551564178466797, 0.15554771423339844, 0.15647296142578124, 0.1585094451904297, 0.1590786590576172, 0.1566175079345703, 0.15557321166992188, 0.15815475463867187, 0.15636837768554687, 0.15642672729492188, 0.15539736938476562, 0.1550323486328125, 0.15499618530273437, 0.1556260223388672, 0.15411978149414063, 0.1539407958984375, 0.15414886474609374, 0.15515225219726564, 0.1541893768310547, 0.15576223754882812, 0.1543211212158203, 0.15423773193359375, 0.1552711639404297, 0.15460966491699218, 0.15443510437011718, 0.15498696899414063, 0.15470550537109376, 0.15513232421875, 0.15457682800292968, 0.15468141174316405, 0.1555292205810547, 0.1632030029296875, 0.156220703125, 0.15498320007324218, 0.155491455078125, 0.15489459228515626, 0.15422697448730469, 0.1550524444580078, 0.15412979125976561, 0.15444026184082033, 0.1558671417236328, 0.15414813232421876, 0.15401817321777345, 0.1554981689453125, 0.15448304748535155, 0.1543744659423828, 0.1544656982421875, 0.1545523223876953, 0.1563817596435547, 0.1549250946044922, 0.15400550842285157, 0.15426553344726562, 0.15424514770507813, 0.15332765197753906, 0.15323866271972655, 0.15441622924804688, 0.15459715270996094, 0.15404031372070312, 0.15450930786132813, 0.15516822814941406, 0.1541802520751953, 0.15450303649902344, 0.15523788452148438, 0.1541837158203125, 0.15418333435058593, 0.15466770935058594, 0.1588777618408203, 0.15489030456542968, 0.1586851806640625, 0.1551853790283203, 0.15543475341796875, 0.15609245300292968, 0.15505305480957032, 0.15450828552246093, 0.15442460632324218, 0.15526576232910155, 0.1623420867919922, 0.15479849243164062, 0.15452365112304686, 0.1542791290283203, 0.15461865234375, 0.15366108703613282, 0.153710205078125, 0.15434938049316407, 0.15288764953613282, 0.15272966003417968, 0.153176513671875, 0.15372489929199218, 0.1530983428955078, 0.15302607727050782, 0.15377865600585938, 0.15331715393066406, 0.15411016845703124, 0.15383305358886717, 0.1531703338623047, 0.163571044921875, 0.1537379913330078, 0.1528319091796875, 0.15317616271972656, 0.15214582824707032, 0.15265379333496093, 0.152657958984375, 0.15264767456054687, 0.15297325134277343, 0.15589328002929687, 0.15324185180664063, 0.15562745666503905, 0.15313542175292968, 0.1537393035888672, 0.153415771484375, 0.154285400390625, 0.15421318054199218, 0.15309388732910156, 0.1547244873046875, 0.15510890197753907, 0.15569705200195313, 0.15436412048339843, 0.15381864929199218, 0.15413894653320312, 0.15499711608886718, 0.15461526489257812, 0.15428866577148437, 0.15406898498535157, 0.15423692321777344, 0.155430908203125, 0.15433663940429687, 0.15463430786132812, 0.1548355255126953, 0.15552511596679688, 0.15645440673828126, 0.15877328491210937, 0.15761456298828125, 0.1562499237060547, 0.1542056579589844, 0.15492579650878907, 0.15459280395507813, 0.1568149108886719, 0.15531864929199218, 0.1548252410888672, 0.1546929931640625, 0.15637432861328124, 0.15677818298339843, 0.15587020874023438, 0.15483290100097657, 0.15629318237304687, 0.15666726684570312, 0.15604579162597657, 0.1549988555908203, 0.15505821228027344, 0.15463629150390626, 0.15539820861816406, 0.15475648498535155, 0.15460409545898438, 0.15503759765625, 0.1552049865722656, 0.15493724060058595, 0.15444256591796876, 0.15594496154785156, 0.1551558380126953, 0.1577532501220703, 0.15549104309082032, 0.1548001251220703, 0.15409561157226562, 0.15460147094726562, 0.1543065643310547, 0.1547459259033203, 0.15429507446289062, 0.15465843200683593, 0.15629878234863281, 0.15380787658691406, 0.15862374877929689, 0.15450111389160157, 0.15397584533691405, 0.1547512664794922, 0.15562208557128906, 0.1557954864501953, 0.155942138671875, 0.15436163330078126, 0.15383238220214843, 0.15371852111816406, 0.15430886840820313, 0.1537244873046875, 0.15406326293945313, 0.15462757873535157, 0.15429685974121093, 0.15419183349609375, 0.15413401794433593, 0.15452595520019533, 0.15408087158203124, 0.15387901306152343, 0.15389840698242188, 0.1538527374267578, 0.15447622680664064, 0.15400291442871095, 0.1593639373779297, 0.15762760925292968, 0.15514076232910157, 0.154783203125, 0.15437277221679688, 0.1551985321044922, 0.1561199951171875, 0.1543741455078125, 0.15488729858398437, 0.15417202758789061, 0.15431631469726562, 0.15401017761230468, 0.1549264373779297, 0.15432479858398437, 0.15607417297363282, 0.15385462951660156, 0.15446937561035157, 0.15484019470214844, 0.15407510375976563, 0.15430032348632813, 0.15372198486328126, 0.15431974792480468, 0.15471002197265624, 0.1539066925048828, 0.15488829040527344, 0.1540714569091797, 0.15456460571289063, 0.15412147521972655, 0.1650145263671875, 0.15452467346191406, 0.1561464385986328, 0.1562931213378906, 0.15504383850097656, 0.15465267944335936, 0.15483477783203126, 0.15401094055175782, 0.1541821746826172, 0.1576197509765625, 0.15397148132324218, 0.15514009094238282, 0.1550984649658203, 0.1543275146484375, 0.15324998474121093, 0.15387615966796875, 0.15553158569335937, 0.15423898315429688, 0.15419187927246095, 0.15444569396972657, 0.15325506591796875, 0.15437103271484376, 0.1541898193359375, 0.15379779052734374, 0.15436032104492187, 0.15341007995605468, 0.15378150939941407, 0.15304499816894532, 0.15366201782226563, 0.15390931701660157, 0.15310015869140625, 0.153679931640625, 0.1539436798095703, 0.15405683898925782, 0.15423922729492187, 0.1573369903564453, 0.16209298706054687, 0.15494210815429688, 0.1526456298828125, 0.1527373504638672, 0.15292630004882812, 0.15320841979980468, 0.15446502685546876, 0.1542941436767578, 0.15693574523925782, 0.15282669067382812, 0.15345663452148436, 0.15375155639648438, 0.1537288360595703, 0.1543272247314453, 0.15396893310546875, 0.15326307678222656, 0.1538321990966797, 0.1546790771484375, 0.15436207580566405, 0.15418357849121095, 0.15350373840332032, 0.15343624877929687, 0.15374745178222657, 0.153712646484375, 0.15358534240722657, 0.154228515625, 0.1540551300048828, 0.15403424072265626, 0.15971849060058593, 0.15436195373535155, 0.15408146667480468, 0.15457958984375, 0.15321702575683593, 0.15290133666992187, 0.15359318542480468, 0.15321359252929687, 0.15309420776367189, 0.15684841918945314, 0.15547564697265626, 0.15408767700195314, 0.15543212890625, 0.15552525329589845, 0.15438018798828124, 0.1540697021484375, 0.15510684204101563, 0.15330108642578125, 0.1531945343017578, 0.15458125305175782, 0.15392959594726563, 0.15497042846679687, 0.15396659851074218, 0.15627468872070313, 0.15441510009765624, 0.1547120666503906, 0.15423814392089844, 0.15391212463378906, 0.15777381896972656, 0.15446336364746094, 0.15768975830078125, 0.15377711486816406, 0.15389030456542968, 0.15429887390136718, 0.15451557922363282, 0.15796768188476562, 0.16016236877441406, 0.15635372924804689, 0.15356748962402345, 0.15425389099121095, 0.1563627471923828, 0.15551712036132812, 0.155287353515625, 0.15580364990234374, 0.15465817260742187, 0.1549707489013672, 0.15335423278808594, 0.15310438537597656, 0.15447203063964843, 0.15438275146484376, 0.15352330017089844, 0.15420509338378907, 0.15479965209960939, 0.1537329864501953, 0.15382179260253906, 0.15367578125, 0.15301632690429687, 0.15410748291015625, 0.15429058837890625, 0.15393894958496093, 0.15505833435058594, 0.15539292907714844, 0.15640931701660157, 0.1584602508544922, 0.15666726684570312, 0.15569381713867186, 0.15535308837890624, 0.15429827880859376, 0.15532450866699218, 0.15538380432128907, 0.15497010803222655, 0.15465180969238282, 0.15547862243652344, 0.156182373046875, 0.15492547607421875, 0.15707254028320314, 0.15396095275878907, 0.16342051696777343, 0.15434751892089843, 0.15556764221191408, 0.15658441162109374, 0.1566740417480469, 0.155504638671875, 0.15496319580078124, 0.15451417541503906, 0.15530581665039062, 0.15404048156738281, 0.15521177673339845, 0.154967041015625, 0.15464118957519532, 0.15594451904296874, 0.1547451171875, 0.15567706298828124, 0.15492279052734376, 0.15519577026367187, 0.15478973388671874, 0.15406454467773437, 0.15473085021972657, 0.15513087463378905, 0.15906425476074218, 0.15815481567382814, 0.15587353515625, 0.15504435729980467, 0.1551381378173828, 0.1559423370361328, 0.15613705444335937, 0.15477066040039061, 0.1540425262451172, 0.15413311767578125, 0.15380274963378907, 0.1554698181152344, 0.1561333770751953, 0.1566793212890625, 0.15520217895507812, 0.1553984375, 0.15535711669921876, 0.15577609252929686, 0.15470399475097657, 0.15579942321777343, 0.15563664245605469, 0.15372309875488283, 0.15356192016601564, 0.15426220703125, 0.15397506713867187, 0.1548431396484375, 0.15569920349121094, 0.1535774688720703, 0.15524249267578125, 0.15345660400390626, 0.15435574340820313, 0.15434474182128907, 0.15385856628417968, 0.1538145294189453, 0.15316450500488282, 0.15443299865722657, 0.15402572631835937, 0.15557096862792968, 0.154355712890625, 0.15465692138671874, 0.15496380615234376, 0.15387391662597658, 0.15397555541992186, 0.1542755889892578, 0.15593881225585937, 0.1548299560546875, 0.15432135009765624, 0.15503199768066406, 0.155363037109375, 0.15465090942382811, 0.1541077423095703, 0.1543497314453125, 0.1553177032470703, 0.15574835205078125, 0.1546174774169922, 0.154076416015625, 0.1554808654785156, 0.15659849548339844, 0.15523500061035156, 0.15672320556640626, 0.15537356567382812, 0.1546279296875, 0.15499075317382813, 0.15434909057617188, 0.169265625, 0.15703797912597656, 0.1560745849609375, 0.15461581420898438, 0.15482028198242187, 0.15570156860351564, 0.15487020874023438, 0.15925503540039063, 0.15501516723632813, 0.15410733032226562, 0.154932861328125, 0.1547683868408203, 0.154483642578125, 0.15594371032714843, 0.15471417236328125, 0.15484739685058593, 0.15447450256347656, 0.1545891876220703, 0.1548062744140625, 0.15479603576660156, 0.15559677124023438, 0.15556405639648438, 0.15541891479492187, 0.15663398742675783, 0.15679904174804687, 0.15727696228027344, 0.1570918426513672, 0.1595842590332031, 0.15591410827636717, 0.15614169311523438, 0.15569100952148437, 0.1549844512939453, 0.15527049255371095, 0.15654141235351562, 0.1554752960205078, 0.15557289123535156, 0.15841912841796876, 0.155789306640625, 0.15672933959960939, 0.15585916137695313, 0.15562728881835938, 0.15497421264648437, 0.1568522186279297, 0.1569869384765625, 0.15560118103027343, 0.15530409240722656, 0.15637493896484375, 0.1563568572998047, 0.1550575408935547, 0.15524864196777344, 0.15554150390625, 0.156876708984375, 0.15571002197265624, 0.15688447570800781, 0.15637554931640624, 0.15667543029785155, 0.1557490234375, 0.15472579956054688, 0.15513375854492187, 0.15482550048828125, 0.15547126770019531, 0.15519151306152343, 0.15540876770019532, 0.16236543273925783, 0.155893310546875, 0.1547390441894531, 0.15524978637695314, 0.15490556335449218, 0.15495578002929689]",tokens/s,6.445593470723445,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,1096.6016,709.820416,0.0,314.5728,299.62752,s,1,8.29876953125,8.29876953125,0.0,8.29876953125,8.29876953125,8.29876953125,8.29876953125,[8.29876953125],,kWh,3.174359504165902e-05,3.4870759261862695e-06,1.1626398190009435e-05,4.6857069157854725e-05,,MB,1247.06816,797.9008,0.0,381.681664,359.87456,s,10,0.2938719673156738,0.029387196731567384,0.001027390722866223,0.029006208419799805,0.030425178146362305,0.031296924400329586,0.03199432140350342,"[0.030231456756591796, 0.02891900825500488, 0.03216867065429688, 0.0285383358001709, 0.029249759674072267, 0.02894256019592285, 0.028616064071655272, 0.02906985664367676, 0.02890902328491211, 0.029227231979370116]",tokens/s,8711.276626293782,kWh,8.580955272303441e-07,9.463321532899257e-08,3.7207301007649246e-07,1.324801752635829e-06,tokens/kWh,193236459.3348867,MB,1258.438656,820.969472,0.0,404.750336,361.449984,s,10,17.94017346191406,1.794017346191406,0.006884328040200046,1.7932661743164062,1.8034279541015625,1.8042221557617188,1.8048575170898438,"[1.80325146484375, 1.7861607666015624, 1.793760986328125, 1.792058837890625, 1.7860655517578126, 1.7999669189453125, 1.7968050537109375, 1.805016357421875, 1.7927713623046875, 1.784316162109375]",tokens/s,35.11671731254177,kWh,5.20455033881874e-05,5.740288016313365e-06,2.0339026892123554e-05,7.81248182966243e-05,tokens/kWh,806401.8755320698,,s,630,17.93442261314392,0.02846733748118083,0.0005289263217916553,0.028420432090759278,0.028783866119384764,0.028968376731872556,0.029877254180908207,"[0.028222496032714844, 0.028015584945678712, 0.028149728775024415, 0.028395807266235352, 0.02823756790161133, 0.028222848892211914, 0.028344959259033204, 0.028248064041137694, 0.02818191909790039, 0.028205663681030273, 0.028365055084228517, 0.028327167510986326, 0.02826214408874512, 0.028234495162963866, 0.02812883186340332, 0.02855776023864746, 0.028411199569702148, 0.03115283203125, 0.028357759475708007, 0.028303712844848634, 0.028246559143066407, 0.02976358413696289, 0.029706239700317383, 0.028753215789794923, 0.02852275276184082, 0.028543424606323243, 0.028442623138427735, 0.028526304244995117, 0.02832592010498047, 0.02833228874206543, 0.029394943237304686, 0.02834022331237793, 0.02834432029724121, 0.028543167114257813, 0.02891516876220703, 0.028709215164184572, 0.02860438346862793, 0.028648672103881837, 0.028737855911254884, 0.02850806427001953, 0.028518239974975587, 0.028854303359985352, 0.028860895156860352, 0.028788991928100586, 0.028604415893554686, 0.028672000885009766, 0.02868351936340332, 0.02859699249267578, 0.028548799514770507, 0.02862726402282715, 0.029104127883911132, 0.028815359115600587, 0.029024255752563476, 0.02876185607910156, 0.028574176788330078, 0.02896054458618164, 0.02856118392944336, 0.02882921600341797, 0.028699520111083985, 0.02866975975036621, 0.028598272323608398, 0.028442304611206056, 0.028631359100341796, 0.028447263717651366, 0.02838921546936035, 0.028783296585083006, 0.028396991729736327, 0.028301792144775392, 0.02817228889465332, 0.028184864044189455, 0.028137184143066405, 0.028061920166015625, 0.028069664001464843, 0.027963232040405274, 0.028091615676879882, 0.02834726333618164, 0.028673728942871093, 0.02840025520324707, 0.02846188735961914, 0.028519136428833008, 0.028544479370117188, 0.028336896896362304, 0.028435903549194334, 0.028317695617675782, 0.0282587833404541, 0.028764255523681642, 0.028262496948242188, 0.028233407974243164, 0.028285120010375978, 0.028251935958862304, 0.027902463912963867, 0.027926015853881835, 0.028201120376586914, 0.028435871124267577, 0.028289119720458986, 0.028359039306640625, 0.02855344009399414, 0.02834771156311035, 0.028283584594726564, 0.02824511909484863, 0.028230239868164062, 0.02847158432006836, 0.028370943069458008, 0.028175968170166016, 0.02819660758972168, 0.02835321617126465, 0.02833593559265137, 0.028530048370361327, 0.028121152877807618, 0.028123903274536132, 0.0284117431640625, 0.028401695251464843, 0.02904217529296875, 0.028569631576538086, 0.028414527893066407, 0.028358655929565428, 0.028461055755615236, 0.02840575981140137, 0.028307039260864256, 0.02821776008605957, 0.02861231994628906, 0.028612607955932616, 0.028414239883422853, 0.028423328399658204, 0.028343391418457032, 0.028093215942382812, 0.028544736862182618, 0.02855331230163574, 0.02892576026916504, 0.028586528778076173, 0.028591903686523437, 0.02858620834350586, 0.02867184066772461, 0.028642847061157228, 0.028843711853027344, 0.0286561279296875, 0.02858950424194336, 0.028552032470703124, 0.028639135360717775, 0.02855311965942383, 0.02944393539428711, 0.02953660774230957, 0.02895871925354004, 0.028848127365112306, 0.028612607955932616, 0.030217248916625975, 0.028721408843994142, 0.028506847381591798, 0.028726783752441407, 0.02912713623046875, 0.028538047790527345, 0.02859913635253906, 0.02874982452392578, 0.028491775512695314, 0.02876518440246582, 0.028804096221923828, 0.028596223831176756, 0.028211200714111328, 0.02817024040222168, 0.027790943145751954, 0.028302783966064452, 0.02819580841064453, 0.02840291213989258, 0.0283287353515625, 0.028298368453979494, 0.028269439697265624, 0.028246015548706056, 0.028299264907836914, 0.02815385627746582, 0.02819891166687012, 0.028073984146118162, 0.02774982452392578, 0.027680896759033204, 0.027789312362670897, 0.027851200103759764, 0.027967487335205078, 0.028104703903198244, 0.027841856002807617, 0.027955904006958007, 0.028360639572143555, 0.02851568031311035, 0.028682975769042968, 0.028463104248046874, 0.028192768096923827, 0.028262304306030273, 0.02812259292602539, 0.028015199661254882, 0.028387359619140625, 0.028163936614990233, 0.028150272369384766, 0.028872255325317384, 0.028690879821777343, 0.028369983673095702, 0.02885523223876953, 0.028375040054321288, 0.028278335571289063, 0.0282935676574707, 0.028294591903686522, 0.028207679748535157, 0.028194496154785156, 0.02838150405883789, 0.028369152069091796, 0.028155519485473634, 0.027977344512939453, 0.028068351745605468, 0.02809062385559082, 0.02802252769470215, 0.028049407958984376, 0.028090368270874022, 0.028026432037353517, 0.02778566360473633, 0.027838623046875, 0.027949119567871095, 0.028143232345581054, 0.02824617576599121, 0.0287825927734375, 0.028657663345336915, 0.028557056427001952, 0.028604095458984374, 0.02849430465698242, 0.02817033576965332, 0.028063488006591798, 0.028186880111694335, 0.028241920471191406, 0.028167327880859374, 0.028322656631469725, 0.028685600280761718, 0.028762847900390624, 0.028624895095825196, 0.028761600494384764, 0.02857561683654785, 0.028578432083129882, 0.029091455459594726, 0.02869900894165039, 0.02865558433532715, 0.02867612838745117, 0.02870854377746582, 0.02869068717956543, 0.028659296035766602, 0.028702720642089844, 0.028656095504760743, 0.028638431549072266, 0.028513023376464844, 0.02844607925415039, 0.02848361587524414, 0.02844121551513672, 0.028651519775390624, 0.0286167049407959, 0.02874736022949219, 0.02863555145263672, 0.028974784851074218, 0.028743999481201172, 0.02862710380554199, 0.028553632736206053, 0.02842812728881836, 0.028308544158935547, 0.02804422378540039, 0.027897855758666993, 0.02803094482421875, 0.027971647262573243, 0.027930591583251955, 0.02791801643371582, 0.028135744094848633, 0.027905984878540038, 0.02819487953186035, 0.027971744537353516, 0.02817625617980957, 0.028176576614379882, 0.02817616081237793, 0.028442623138427735, 0.028565439224243164, 0.028386463165283204, 0.028275903701782228, 0.03287830352783203, 0.02829308891296387, 0.028164127349853515, 0.02815180778503418, 0.0280894718170166, 0.028445472717285158, 0.028321216583251953, 0.028414623260498047, 0.02832908821105957, 0.028525184631347657, 0.028369152069091796, 0.028604320526123047, 0.02832313537597656, 0.028259103775024413, 0.028481536865234375, 0.028601472854614257, 0.028506879806518556, 0.028462495803833008, 0.028307968139648438, 0.02815023994445801, 0.02806559944152832, 0.028389312744140624, 0.02830940818786621, 0.028248031616210936, 0.028397695541381836, 0.028084224700927734, 0.027968671798706053, 0.02797612762451172, 0.028203424453735353, 0.02815795135498047, 0.028225088119506837, 0.028189247131347656, 0.0283022403717041, 0.02846614456176758, 0.028548576354980468, 0.028326431274414064, 0.02824991989135742, 0.028145856857299804, 0.02828495979309082, 0.028355808258056642, 0.02838400077819824, 0.028393056869506834, 0.02821343994140625, 0.028191680908203124, 0.028283679962158203, 0.029526111602783203, 0.028811328887939452, 0.028628639221191406, 0.02857206344604492, 0.028706079483032228, 0.028729215621948242, 0.02868092727661133, 0.0286046085357666, 0.028603839874267577, 0.028497407913208008, 0.029913984298706054, 0.02872902488708496, 0.02881772804260254, 0.029783231735229492, 0.028592960357666015, 0.028480960845947267, 0.028598848342895507, 0.028509408950805663, 0.028517120361328124, 0.028444704055786134, 0.028477439880371092, 0.028446304321289063, 0.02873504066467285, 0.028630144119262697, 0.02862870407104492, 0.028553216934204102, 0.028858367919921874, 0.029787328720092772, 0.028649887084960936, 0.028678112030029297, 0.029052799224853515, 0.02845155143737793, 0.028466720581054688, 0.028594335556030272, 0.028434591293334963, 0.028720928192138673, 0.028199136734008787, 0.02827574348449707, 0.028317888259887694, 0.02820908737182617, 0.02819568061828613, 0.028248064041137694, 0.028297216415405273, 0.02823151969909668, 0.028046976089477538, 0.02818307113647461, 0.028237823486328126, 0.02842540740966797, 0.02814252853393555, 0.029037952423095703, 0.02911187171936035, 0.028771263122558594, 0.028231008529663086, 0.028234399795532227, 0.028325504302978515, 0.028133472442626952, 0.028207679748535157, 0.028311264038085936, 0.028342271804809572, 0.028118303298950195, 0.028627775192260743, 0.028839616775512694, 0.02833030319213867, 0.028268543243408203, 0.02825823974609375, 0.028270719528198242, 0.028041151046752928, 0.028094463348388672, 0.028314912796020508, 0.02824060821533203, 0.028611648559570314, 0.028271551132202147, 0.028762111663818358, 0.028391424179077147, 0.02828886413574219, 0.028154048919677734, 0.02799203109741211, 0.027930624008178712, 0.027922431945800782, 0.027897823333740236, 0.029745183944702148, 0.028753503799438477, 0.028453279495239257, 0.028516351699829103, 0.02820627212524414, 0.028204992294311525, 0.02820924758911133, 0.028291872024536133, 0.02858598327636719, 0.028497440338134766, 0.0286909122467041, 0.028471296310424804, 0.02840959930419922, 0.02826470375061035, 0.028476959228515626, 0.028342752456665038, 0.028216352462768556, 0.028165088653564454, 0.028256256103515624, 0.02851161575317383, 0.028750463485717772, 0.0285546875, 0.02852921676635742, 0.028700672149658202, 0.029746623992919923, 0.02884383964538574, 0.028583744049072265, 0.028775487899780273, 0.028694400787353514, 0.028844032287597656, 0.028622976303100588, 0.02859110450744629, 0.028553184509277345, 0.028613471984863283, 0.028852287292480468, 0.028688543319702147, 0.028723039627075196, 0.028848127365112306, 0.02890070343017578, 0.028770240783691406, 0.02885843276977539, 0.028652191162109375, 0.02871500778198242, 0.028612287521362304, 0.028666175842285157, 0.028778495788574218, 0.02857561683654785, 0.02830761528015137, 0.028200191497802736, 0.02805014419555664, 0.027929887771606446, 0.028023519515991212, 0.02794067192077637, 0.027906240463256834, 0.02813542366027832, 0.028165887832641602, 0.028360479354858397, 0.028438175201416015, 0.028598783493041992, 0.028994976043701173, 0.028767040252685547, 0.028760160446166992, 0.028669023513793947, 0.028677024841308595, 0.028678144454956055, 0.028606271743774413, 0.028554880142211914, 0.02841865539550781, 0.028313568115234375, 0.02818160057067871, 0.028425119400024415, 0.028325664520263673, 0.028676319122314452, 0.028194816589355468, 0.03595683288574219, 0.031592607498168945, 0.029067007064819336, 0.028891424179077148, 0.028862207412719727, 0.02851958465576172, 0.028502847671508787, 0.02824835205078125, 0.028099872589111327, 0.02818492889404297, 0.028208383560180662, 0.02835481643676758, 0.028745567321777344, 0.028154624938964843, 0.028192832946777345, 0.029562431335449217, 0.028561792373657226, 0.028170175552368164, 0.02834160041809082, 0.028676864624023437, 0.02840777587890625, 0.028519424438476562, 0.028535167694091798, 0.02846272087097168, 0.028498943328857423, 0.028512191772460938, 0.02858742332458496, 0.028420768737792968, 0.029495296478271486, 0.028307136535644532, 0.0283853759765625, 0.028463327407836914, 0.02854902458190918, 0.02834662437438965, 0.028316959381103516, 0.02847407913208008, 0.028381343841552734, 0.028401119232177734, 0.028475839614868163, 0.028829792022705077, 0.028560415267944336, 0.02845574378967285, 0.02894976043701172, 0.02850454330444336, 0.0284736328125, 0.028483552932739256, 0.02855878448486328, 0.028684896469116213, 0.028587583541870118, 0.02851577568054199, 0.028412864685058593, 0.02852016067504883, 0.028615007400512694, 0.028510175704956054, 0.028797023773193358, 0.028509759902954103, 0.028594560623168945, 0.028539903640747072, 0.02850681686401367, 0.02898975944519043, 0.02858598327636719, 0.028630752563476563, 0.028507776260375976, 0.02871887969970703, 0.031959264755249024, 0.02873616027832031, 0.028598272323608398, 0.029016063690185546, 0.028590080261230468, 0.028432384490966797, 0.02847871971130371, 0.028392127990722656, 0.028354688644409178, 0.02844051170349121, 0.028350656509399413, 0.028267423629760743, 0.028338655471801758, 0.028139968872070313, 0.027885568618774413, 0.027785215377807617, 0.027727424621582033, 0.027675071716308595, 0.02779110336303711, 0.02788582420349121, 0.027813472747802735, 0.02773756790161133, 0.027687231063842774, 0.028137311935424805, 0.028279327392578126, 0.028123615264892578, 0.028280607223510744, 0.028224576950073244, 0.028363712310791017, 0.028420095443725587, 0.028317695617675782, 0.028043392181396485, 0.028196735382080076, 0.028212799072265624, 0.027908672332763673, 0.02778099250793457, 0.02777052879333496, 0.027992416381835937, 0.02778432083129883, 0.027799968719482423, 0.02785327911376953, 0.028321056365966796, 0.027966304779052733, 0.028373952865600585, 0.02832044792175293, 0.028188928604125977, 0.028108320236206054, 0.028111328125, 0.028136831283569336, 0.02813942337036133, 0.02803785514831543, 0.028020671844482422, 0.02802284812927246, 0.02793267250061035, 0.027747583389282227, 0.027874048233032227, 0.027656192779541015, 0.02823686408996582, 0.028213247299194336, 0.028520416259765625, 0.028437471389770506, 0.028358655929565428, 0.028853248596191407, 0.028380128860473634, 0.02826652717590332, 0.028416160583496095, 0.028595775604248048, 0.028356895446777344, 0.02838857650756836, 0.02874345588684082, 0.028349344253540038, 0.02837539291381836, 0.028376703262329103, 0.02895884895324707, 0.028595712661743163, 0.028582399368286132, 0.028716991424560547, 0.028645439147949217, 0.028594175338745118, 0.028582048416137696, 0.028571487426757813, 0.028637184143066406, 0.02855299186706543, 0.028543039321899413, 0.028557472229003907, 0.028622848510742187, 0.028499967575073244, 0.028466400146484376, 0.02854400062561035, 0.02862473678588867, 0.02866374397277832, 0.028521984100341798, 0.028572160720825194, 0.028485631942749022]",tokens/s,35.1279778328788,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,14028.345344,7835.942912,0.0,7440.695296,7427.899392,s,1,31.845142578125,31.845142578125,0.0,31.845142578125,31.845142578125,31.845142578125,31.845142578125,[31.845142578125],,kWh,0.0007130096238708574,7.86430868783795e-05,0.0002699088270380068,0.0010615615377872437,,MB,1197.400064,8416.854016,0.0,8000.63488,7875.673088,s,10,0.9571953277587891,0.09571953277587891,0.0002780228861242757,0.09565481567382812,0.09612270812988281,0.09613992919921875,0.0961537060546875,"[0.09561958312988281, 0.0955525131225586, 0.09569004821777344, 0.0959486083984375, 0.09580300903320313, 0.09615715026855469, 0.09520767974853515, 0.09550665283203125, 0.09611888122558594, 0.09559120178222656]",tokens/s,2674.4802505399543,kWh,2.9104864475250007e-06,3.2097065990848037e-07,1.9323502807522923e-06,5.163807388185773e-06,tokens/kWh,49575822.79031167,MB,1215.070208,8437.825536,0.0,8021.6064,7976.51712,s,10,46.77368115234375,4.6773681152343745,0.006220704046563695,4.6768447265625,4.6834939453125,4.68686806640625,4.68956736328125,"[4.6902421875, 4.68072509765625, 4.673880859375, 4.669634765625, 4.6815068359375, 4.67028515625, 4.682744140625, 4.6764287109375, 4.67097265625, 4.6772607421875]",tokens/s,13.469113066984505,kWh,0.00013614018957080752,1.5016638126850834e-05,8.281410640524727e-05,0.00023397093410290563,tokens/kWh,269264.2154101552,,s,630,46.77019203186032,0.07423840005057197,0.0007574064742200948,0.07412756729125977,0.07484287872314453,0.0753373062133789,0.07690783737182619,"[0.07384646606445312, 0.07495516967773437, 0.07387904357910156, 0.07421788787841797, 0.0753267822265625, 0.07419551849365234, 0.07395136260986328, 0.07432806396484375, 0.07442617797851563, 0.07405133056640625, 0.07421737670898437, 0.07420572662353515, 0.07417855834960937, 0.07492198181152344, 0.07469612884521484, 0.07372576141357422, 0.07386934661865234, 0.07431375885009765, 0.07379161834716796, 0.07350943756103516, 0.07369932556152343, 0.07396966552734376, 0.07435369873046875, 0.07426137542724609, 0.07407369232177734, 0.07368141174316406, 0.07334706878662109, 0.07310131072998047, 0.07318732452392578, 0.07305548858642578, 0.0737118377685547, 0.074144287109375, 0.0743180160522461, 0.07401862335205078, 0.07528575897216797, 0.07415676879882813, 0.07420317077636719, 0.07468614196777344, 0.07795334625244141, 0.07461478424072265, 0.0746618881225586, 0.07605862426757813, 0.07501414489746094, 0.07449350738525391, 0.0748415985107422, 0.07443052673339844, 0.07450224304199218, 0.07426742553710937, 0.07977708435058593, 0.07535481262207032, 0.07832371520996094, 0.0746393585205078, 0.07452035522460937, 0.07430950164794922, 0.07418675231933594, 0.07441238403320312, 0.07403110504150391, 0.07403520202636718, 0.07472128295898438, 0.07396521759033203, 0.07382806396484375, 0.07371456146240235, 0.07344614410400391, 0.07390223693847656, 0.07424188995361328, 0.07408422088623047, 0.07394111633300782, 0.07417414093017578, 0.07400441741943359, 0.07421609497070313, 0.0743584976196289, 0.07400563049316407, 0.07415283203125, 0.07396886444091796, 0.07385167694091797, 0.07394416046142578, 0.07433424377441407, 0.0744283218383789, 0.07406230163574219, 0.07406246185302734, 0.0743196792602539, 0.07425759887695313, 0.07445916748046875, 0.07585878753662109, 0.0743724136352539, 0.07499369812011719, 0.07436908721923828, 0.0746615982055664, 0.07434278106689453, 0.07388211059570313, 0.07459225463867188, 0.07414988708496094, 0.07400653076171874, 0.07392870330810547, 0.07387059020996094, 0.07559273529052735, 0.07500895690917969, 0.07461315155029297, 0.07420105743408204, 0.07396803283691407, 0.07390643310546875, 0.0739530258178711, 0.07517183685302735, 0.07425027465820312, 0.07425987243652343, 0.07375929260253906, 0.0736885757446289, 0.07380633544921875, 0.07389794921875, 0.07411100769042969, 0.07467417907714843, 0.07414963531494141, 0.07412556457519531, 0.07416831970214843, 0.07440589141845703, 0.07447756958007813, 0.07455948638916016, 0.07436083221435547, 0.0744120330810547, 0.07423590087890625, 0.07411436462402343, 0.07422045135498047, 0.074297119140625, 0.07485440063476563, 0.07536831665039062, 0.073984130859375, 0.07405165100097656, 0.07450902557373047, 0.07384512329101563, 0.07444483184814453, 0.07397465515136718, 0.07417948913574218, 0.07388159942626953, 0.07373836517333984, 0.07352678680419922, 0.074316162109375, 0.07442022705078125, 0.07405875396728516, 0.07348287963867188, 0.07333039855957031, 0.07363967895507813, 0.07349750518798828, 0.07415558624267578, 0.07420342254638672, 0.07430569458007813, 0.07400380706787109, 0.07385897827148437, 0.07492793273925781, 0.07421228790283203, 0.07466393280029297, 0.0741346206665039, 0.07414390563964844, 0.07412995147705079, 0.07436003112792969, 0.07422806549072265, 0.07419971466064453, 0.07436310577392578, 0.07409347534179687, 0.07435148620605468, 0.07432806396484375, 0.07449190521240234, 0.07434464263916016, 0.07446304321289063, 0.07488864135742188, 0.07543251037597656, 0.07389756774902344, 0.07483433532714844, 0.07320543670654298, 0.07362332916259766, 0.07421129608154296, 0.07558771514892579, 0.07434003448486329, 0.07363452911376953, 0.07515103912353516, 0.07368739318847656, 0.07405487823486329, 0.07471389007568359, 0.07409574127197266, 0.07391337585449219, 0.07386640167236327, 0.07356690979003906, 0.07466950225830078, 0.07503622436523437, 0.07395382690429687, 0.07409712219238282, 0.07379558563232422, 0.07401273345947265, 0.07412745666503906, 0.07425791931152344, 0.0749494400024414, 0.07404051208496094, 0.0738885726928711, 0.07410688018798828, 0.07389798736572266, 0.07429555511474609, 0.07409168243408203, 0.07408290863037109, 0.07374835205078124, 0.07351513671875, 0.07363510131835938, 0.07330889892578125, 0.07366233825683594, 0.074268798828125, 0.07411302185058594, 0.07342243194580078, 0.07373833465576173, 0.07357266998291015, 0.07340985870361329, 0.073614013671875, 0.07383577728271484, 0.07367935943603515, 0.07333283233642578, 0.07369868469238282, 0.07413820648193359, 0.07350905609130859, 0.0737525405883789, 0.07409257507324218, 0.0739202880859375, 0.07380604553222657, 0.07379132843017579, 0.07394486236572266, 0.07385740661621094, 0.07388694763183594, 0.07384758758544922, 0.0740474853515625, 0.07422557067871094, 0.0744090576171875, 0.07675801849365234, 0.07511357116699219, 0.07444351959228515, 0.07445065307617188, 0.07449967956542969, 0.07409954833984375, 0.07430729675292969, 0.07420336151123047, 0.07407939147949219, 0.07427369689941406, 0.07417826843261718, 0.0734169921875, 0.0734900131225586, 0.074336669921875, 0.07435279846191406, 0.07457164764404296, 0.07446080017089844, 0.07429145812988282, 0.07424348449707031, 0.07730643463134766, 0.07422156524658204, 0.07430976104736328, 0.07433216094970703, 0.07430944061279297, 0.07408150482177735, 0.07401055908203125, 0.07384457397460938, 0.0740167007446289, 0.07429676818847657, 0.07397618865966797, 0.07412374114990235, 0.07407202911376953, 0.07416015625, 0.07488841247558593, 0.07444764709472657, 0.07451551818847656, 0.07437612915039063, 0.07496908569335937, 0.0741949462890625, 0.0748636474609375, 0.074531005859375, 0.07481375885009765, 0.07451491546630859, 0.07425433349609376, 0.07408025360107422, 0.07356396484375, 0.07369337463378907, 0.07386726379394531, 0.07411673736572266, 0.07372838592529297, 0.0737423324584961, 0.07369910430908203, 0.07351660919189452, 0.0735647964477539, 0.07421900939941406, 0.07406003570556641, 0.07398838043212891, 0.07361459350585937, 0.07353148651123047, 0.07358432006835937, 0.07407100677490235, 0.07415596771240235, 0.07391596984863281, 0.07394972991943359, 0.07410070037841797, 0.07381775665283204, 0.07405299377441406, 0.0738987808227539, 0.07460275268554688, 0.07430342102050781, 0.07408640289306641, 0.07546470642089843, 0.07443456268310547, 0.07435465240478516, 0.07429244995117187, 0.07429203033447265, 0.07606476593017578, 0.07413123321533203, 0.07696201324462891, 0.07534591674804687, 0.0743034896850586, 0.07473942565917968, 0.07410307312011719, 0.07410406494140626, 0.0736447982788086, 0.07371981048583984, 0.07393075561523438, 0.07888243103027344, 0.07417804718017579, 0.07379404449462891, 0.07380931091308594, 0.07399689483642578, 0.07405391693115235, 0.07408406066894531, 0.07451443481445312, 0.07414112091064454, 0.07381459045410156, 0.07344918060302734, 0.07369929504394532, 0.073695068359375, 0.07385545349121093, 0.07426457977294922, 0.074176513671875, 0.07410665893554688, 0.07359715270996094, 0.0739368667602539, 0.07409693145751953, 0.07436815643310547, 0.07452528381347656, 0.07426850891113282, 0.07427907562255859, 0.07439961242675781, 0.07439279937744141, 0.07439862060546874, 0.07402301025390624, 0.07428521728515625, 0.07443430328369141, 0.07459996795654297, 0.07455382537841797, 0.07403110504150391, 0.07419513702392579, 0.07434591674804687, 0.07404102325439453, 0.07366726684570313, 0.0737996826171875, 0.07383782196044922, 0.073755615234375, 0.07411033630371094, 0.074, 0.0736848602294922, 0.07330477142333984, 0.0737314224243164, 0.07595507049560547, 0.07426457977294922, 0.07496498870849609, 0.07403110504150391, 0.07401634979248047, 0.07375276947021485, 0.07349612426757812, 0.07384646606445312, 0.07395996856689453, 0.07407830047607422, 0.07382675170898438, 0.07396342468261718, 0.07412041473388672, 0.07451718139648437, 0.07432316589355469, 0.07457369232177734, 0.07511459350585938, 0.07445929718017578, 0.07433462524414063, 0.07575843048095703, 0.07457917022705078, 0.07433296203613281, 0.07442022705078125, 0.07351471710205078, 0.07323062133789063, 0.0732357406616211, 0.07336540985107422, 0.07448834991455078, 0.0761178207397461, 0.07524508666992187, 0.07415609741210938, 0.07446617889404297, 0.07411650848388672, 0.0741013412475586, 0.07453052520751953, 0.07383200073242188, 0.07435132598876953, 0.07457746887207031, 0.07401251220703126, 0.0736960678100586, 0.07347586822509766, 0.07463321685791016, 0.07463868713378906, 0.07433225250244141, 0.074271484375, 0.0746780776977539, 0.07488716888427735, 0.07391027069091796, 0.07427474975585938, 0.07426054382324218, 0.07403929901123046, 0.0742762222290039, 0.07445974731445312, 0.07532546997070312, 0.07500908660888672, 0.0747734375, 0.07458611297607422, 0.07420873260498047, 0.07446720123291016, 0.07431644439697266, 0.07389321899414063, 0.0740337905883789, 0.0739840316772461, 0.07379894256591797, 0.07381439971923828, 0.0738977279663086, 0.07387811279296876, 0.07387503814697266, 0.07442086029052734, 0.07394461059570312, 0.07379987335205078, 0.07445526123046875, 0.07421731567382812, 0.07400431823730469, 0.07378755187988281, 0.07414169311523437, 0.07447142028808594, 0.07544217681884766, 0.07570022583007813, 0.07466172790527344, 0.07435689544677734, 0.07486585235595702, 0.07456531524658203, 0.07419731140136719, 0.07665821075439454, 0.07592352294921875, 0.07556531524658203, 0.07458956909179687, 0.07424079895019531, 0.074176513671875, 0.07400857543945312, 0.07423766326904296, 0.07421389007568359, 0.07439542388916015, 0.07499571228027344, 0.07430758666992188, 0.07428822326660156, 0.07371663665771484, 0.07371161651611328, 0.07355596923828125, 0.07347200012207031, 0.07429238128662109, 0.07396438598632812, 0.0740126724243164, 0.07369522857666015, 0.07407001495361328, 0.0739546890258789, 0.07369987487792969, 0.07391165161132812, 0.07412767791748047, 0.07377555084228515, 0.07399209594726562, 0.07392041778564454, 0.07370873260498047, 0.07392092895507812, 0.0738842544555664, 0.07443267059326172, 0.07368482971191406, 0.07390729522705078, 0.07389654541015625, 0.07428860473632813, 0.07466070556640625, 0.07428688049316406, 0.07412582397460937, 0.07415369415283203, 0.07412931060791016, 0.074174560546875, 0.0742681884765625, 0.07448828887939453, 0.07443389129638672, 0.07413622283935548, 0.074176513671875, 0.07647859191894531, 0.07513279724121094, 0.0744981460571289, 0.07412726593017578, 0.07385088348388671, 0.07409037017822266, 0.07391203308105469, 0.07350678253173829, 0.0737959976196289, 0.0740884780883789, 0.07426585388183594, 0.07394509124755859, 0.07341862487792969, 0.07359305572509765, 0.07458233642578126, 0.07430115509033203, 0.07398783874511719, 0.0736982421875, 0.07437721252441407, 0.07381196594238282, 0.07391836547851563, 0.07401904296875, 0.0742053451538086, 0.0741250228881836, 0.0742113265991211, 0.07399971008300782, 0.07404611206054687, 0.07420905303955078, 0.07471250915527344, 0.07436908721923828, 0.07426841735839844, 0.07427283477783203, 0.0740832290649414, 0.07385292816162109, 0.07452982330322265, 0.07423792266845704, 0.07411974334716796, 0.07387506866455078, 0.07401529693603516, 0.07383475494384766, 0.07385702514648437, 0.07382179260253906, 0.07393321228027344, 0.0741396484375, 0.07350032043457032, 0.07400240325927734, 0.0736396484375, 0.07394480133056641, 0.07377804565429688, 0.07406390380859375, 0.07570845031738281, 0.07424409484863281, 0.074176513671875, 0.07378329467773438, 0.0741949462890625, 0.07412918090820313, 0.0738736343383789, 0.07375772857666016, 0.0740703353881836, 0.0738617935180664, 0.07413276672363281, 0.07383446502685546, 0.07423667144775391, 0.0739835205078125, 0.07361151885986328, 0.07415580749511719, 0.07434223937988281, 0.07411158752441406, 0.074720703125, 0.07431206512451172, 0.07460269165039063, 0.07497843170166016, 0.07424224090576172, 0.07511248016357422, 0.07416054534912109, 0.07434381103515625, 0.07677519989013672, 0.0758121566772461, 0.07377581024169921, 0.07370925140380859, 0.07489977264404298, 0.07361516571044922, 0.0735888671875, 0.07380588531494141, 0.07448726654052734, 0.07392105865478515, 0.0838287353515625, 0.07399628448486328, 0.07375433349609375, 0.0735931167602539, 0.07356825256347656, 0.07335836791992187, 0.07353568267822265, 0.07370121765136718, 0.0738971176147461, 0.07378614044189453, 0.07381718444824219, 0.07355545806884765, 0.07642562866210938, 0.0739797134399414, 0.07396371459960938, 0.07396514892578125, 0.07389430236816406, 0.07426399993896485, 0.07393849945068359, 0.07424649810791016, 0.07404541015625, 0.07461942291259765, 0.07416438293457031, 0.07417855834960937, 0.0739205093383789, 0.07439949035644532, 0.07393888092041015, 0.07385043334960938, 0.07393551635742188, 0.07389603424072265, 0.07369075012207031, 0.07351641845703125, 0.07333740997314453, 0.07362928009033202, 0.0740626220703125, 0.0742011489868164, 0.07380786895751953, 0.07372211456298829, 0.07353078460693359, 0.0735215072631836, 0.07340013122558593, 0.07386918640136719, 0.07446150207519531, 0.07427382659912109, 0.07365897369384766, 0.07379154968261718, 0.07398563385009765, 0.07568793487548828, 0.07433296203613281, 0.07414777374267578, 0.07462092590332031, 0.07434361267089844, 0.0748572769165039]",tokens/s,13.470117881295792,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,4392.05888,1721.63072,0.0,1319.108608,1304.104448,s,1,11.15692578125,11.15692578125,0.0,11.15692578125,11.15692578125,11.15692578125,11.15692578125,[11.15692578125],,kWh,0.00012156793935416015,1.340089669116954e-05,3.782891915202202e-05,0.0001727977551973517,,MB,4448.702464,1801.322496,0.0,1377.828864,1350.19776,s,10,0.798412742614746,0.07984127426147461,0.000858906748972448,0.08005551910400391,0.08055615997314453,0.08092567825317383,0.08122129287719726,"[0.08047404479980469, 0.08046963500976563, 0.08012332916259765, 0.08037773132324219, 0.08129519653320312, 0.07914979553222656, 0.07998770904541015, 0.07871635437011719, 0.07836211395263672, 0.07945683288574219]",tokens/s,3206.3616515139506,kWh,2.3467114433001673e-06,2.588013491021797e-07,1.0265008212000382e-06,3.6320136136023855e-06,tokens/kWh,70484317.3057626,MB,4452.651008,1801.322496,0.0,1377.828864,1337.617408,s,10,50.72342724609375,5.072342724609374,0.04995648975200244,5.0726064453125,5.132321435546875,5.133031811523438,5.133600112304688,"[5.13216357421875, 5.12299755859375, 5.1337421875, 5.118171875, 5.06588671875, 5.079326171875, 5.0095849609375, 5.01674267578125, 5.04282763671875, 5.00198388671875]",tokens/s,12.42029638382759,kWh,0.00014829603092044922,1.6357522411764455e-05,5.162759685760032e-05,0.00021628115018981403,tokens/kWh,291287.5206401924,,s,630,50.71836746215824,0.08050534517802889,0.0011417454131090903,0.08067811203002931,0.08161748962402345,0.08218495140075684,0.08432947563171388,"[0.08125276947021484, 0.08333516693115234, 0.08389826965332031, 0.08094525146484376, 0.08084889221191406, 0.08101068878173828, 0.08097154998779296, 0.08069961547851562, 0.0808202896118164, 0.08072585296630859, 0.0809402847290039, 0.08089651489257813, 0.08486640167236328, 0.0812467498779297, 0.08133650970458985, 0.08102912139892578, 0.08085737609863282, 0.08093606567382812, 0.08103337860107422, 0.08104124450683593, 0.08099881744384765, 0.08071647644042969, 0.08135011291503906, 0.08097747039794922, 0.08156435394287109, 0.08170722961425782, 0.08212384033203125, 0.0814815673828125, 0.08136774444580078, 0.08094121551513672, 0.08135279846191407, 0.08129094696044922, 0.08085091400146484, 0.08117017364501954, 0.0811153564453125, 0.08086975860595703, 0.08072396850585938, 0.08103321838378906, 0.08089497375488282, 0.08125145721435546, 0.08126656341552735, 0.08342642974853516, 0.08318204498291015, 0.08212726593017577, 0.08101068878173828, 0.08085708618164063, 0.08147721862792968, 0.08104370880126953, 0.0809432601928711, 0.08098611450195313, 0.08083360290527344, 0.08576505279541016, 0.08598134613037109, 0.08108220672607422, 0.08130150604248047, 0.08098406219482422, 0.08109670257568359, 0.081080322265625, 0.08101888275146485, 0.08092050933837891, 0.08073836517333985, 0.08110079956054687, 0.08094713592529297, 0.08107622528076172, 0.08099545288085938, 0.08098870086669922, 0.08080553436279297, 0.08113375854492187, 0.08082479858398438, 0.08070732879638672, 0.08114998626708984, 0.08086739349365234, 0.08066700744628906, 0.08110675048828125, 0.08104755401611329, 0.08074227142333984, 0.08121561431884766, 0.08189683532714843, 0.0811915512084961, 0.08161468505859375, 0.08096492767333985, 0.08098611450195313, 0.08107913970947266, 0.08112870025634765, 0.0809991683959961, 0.08247321319580078, 0.08142144012451172, 0.08145337677001953, 0.08125686645507812, 0.08163318634033204, 0.08167369842529297, 0.0814854736328125, 0.08125939178466797, 0.08108013153076171, 0.08160479736328125, 0.0812577896118164, 0.08114985656738281, 0.08175279998779297, 0.08113670349121094, 0.08114892578125, 0.08215961456298829, 0.08288050842285156, 0.08193215942382813, 0.08226419067382812, 0.08126873779296875, 0.0811003189086914, 0.08130156707763672, 0.08175247955322265, 0.081155517578125, 0.081494140625, 0.081336669921875, 0.08137065887451173, 0.08145990753173828, 0.08113324737548828, 0.0815408935546875, 0.08168192291259765, 0.08145807647705078, 0.08102092742919922, 0.08103731536865234, 0.08128950500488281, 0.08106742095947266, 0.0811277084350586, 0.08138524627685546, 0.0810723876953125, 0.0810738525390625, 0.0811600341796875, 0.08101478576660157, 0.08167155456542968, 0.08084134674072266, 0.08065023803710937, 0.08087107086181641, 0.08076502227783203, 0.08068486022949219, 0.08686841583251953, 0.0811863021850586, 0.08097193908691407, 0.08099874877929687, 0.0808256607055664, 0.08068915557861328, 0.08136720275878906, 0.08107676696777344, 0.08093625640869141, 0.0807508773803711, 0.08316553497314454, 0.08086649322509766, 0.08147052764892578, 0.08140576171875, 0.08169660949707032, 0.08097737884521485, 0.08081273651123047, 0.08089590454101563, 0.08127497863769531, 0.08119427490234375, 0.08108515167236328, 0.08158191680908203, 0.08122383880615235, 0.08086121368408203, 0.08176841735839843, 0.08158963012695312, 0.08119769287109375, 0.08124416351318359, 0.08098397064208984, 0.08474041748046875, 0.082712158203125, 0.08250399780273437, 0.08266265869140625, 0.08118335723876953, 0.08151395416259766, 0.08082099151611329, 0.0805906219482422, 0.08099839782714843, 0.08265535736083984, 0.08332787322998046, 0.08261529541015625, 0.08099775695800782, 0.08116607666015625, 0.08169993591308594, 0.08129923248291016, 0.08136294555664063, 0.08078524780273437, 0.0822040023803711, 0.08102278137207031, 0.08089036560058593, 0.08075724792480468, 0.08424988555908203, 0.08081999969482422, 0.08080070495605468, 0.08059085083007812, 0.08080178833007813, 0.08087741088867187, 0.08065929412841796, 0.08174390411376953, 0.08063385772705078, 0.08075059509277344, 0.0811229476928711, 0.081123779296875, 0.08113990020751953, 0.0816157455444336, 0.081982177734375, 0.08104771423339843, 0.08128511810302734, 0.08128102111816406, 0.08126054382324219, 0.08199523162841797, 0.08099407958984375, 0.08098252868652343, 0.08144716644287109, 0.08137318420410156, 0.08113152313232422, 0.08081423950195313, 0.08076380920410156, 0.0808150405883789, 0.08186784362792969, 0.08116320037841797, 0.08094915008544921, 0.08128521728515625, 0.08128102111816406, 0.08076640319824219, 0.0808023681640625, 0.08081228637695312, 0.08077900695800781, 0.08182924652099609, 0.08159318542480469, 0.08161052703857422, 0.08164351654052734, 0.08327983856201172, 0.08133145904541016, 0.08142829132080077, 0.08163836669921876, 0.08120524597167969, 0.08087519836425781, 0.08111341094970703, 0.08078694152832032, 0.08071968078613281, 0.08080643463134765, 0.081266845703125, 0.08129090881347656, 0.08119058990478516, 0.08110147094726562, 0.08129945373535157, 0.08247705841064454, 0.08100454711914062, 0.08088531494140624, 0.08072032165527344, 0.08083190155029298, 0.08090882873535156, 0.08097811126708984, 0.08141001892089844, 0.08108022308349609, 0.08115200042724609, 0.08151779174804688, 0.08211353302001953, 0.08129004669189453, 0.08107782745361328, 0.08356620788574219, 0.08085568237304687, 0.08079753875732422, 0.08104771423339843, 0.08092243194580079, 0.0810211181640625, 0.08191900634765625, 0.08108512115478515, 0.0811456298828125, 0.08102745819091797, 0.08110012817382813, 0.08105039978027344, 0.08124416351318359, 0.08103241729736328, 0.08079779052734375, 0.08081478118896485, 0.08086323547363282, 0.08105574035644532, 0.08126636505126954, 0.08093113708496094, 0.08104118347167968, 0.08012413024902344, 0.08012595367431641, 0.08026461029052734, 0.07992364501953125, 0.07979023742675781, 0.07939071655273437, 0.07958319854736329, 0.07938050842285156, 0.08072191619873047, 0.08216166687011718, 0.08011571502685547, 0.07984333038330078, 0.0799591064453125, 0.07983612823486329, 0.0796180191040039, 0.07982598114013671, 0.08022726440429688, 0.0793511962890625, 0.08021616363525391, 0.081961181640625, 0.08010269165039062, 0.07982112121582031, 0.07923564910888672, 0.07899327850341797, 0.07964083099365235, 0.07981568145751954, 0.0793364486694336, 0.07926579284667969, 0.07923849487304688, 0.07973750305175781, 0.07907942199707031, 0.0792125473022461, 0.07927913665771484, 0.07962921905517578, 0.08034537506103516, 0.08087734222412109, 0.08019945526123047, 0.08051324462890624, 0.08055836486816406, 0.08107984161376953, 0.08095549011230468, 0.08057091522216797, 0.07982630157470703, 0.07974976348876953, 0.08108608245849609, 0.08128876495361329, 0.08058963012695312, 0.0808260498046875, 0.08179539489746093, 0.08140185546875, 0.0805191650390625, 0.08081203460693359, 0.08064205169677735, 0.08072191619873047, 0.0805191650390625, 0.0803768310546875, 0.08005120086669922, 0.08029798126220702, 0.08046915435791016, 0.08061219024658203, 0.08048435211181641, 0.08104345703125, 0.08083660888671874, 0.08034925079345703, 0.08019737243652343, 0.08091366577148437, 0.0806143341064453, 0.08023165130615234, 0.08316588592529296, 0.08182790374755859, 0.08172547149658203, 0.08135987091064453, 0.080640380859375, 0.0808966064453125, 0.0806338882446289, 0.08033708953857421, 0.08025273895263672, 0.08034832000732423, 0.08076166534423829, 0.08053276824951172, 0.08047014617919922, 0.08067155456542968, 0.08054460906982422, 0.08074281311035156, 0.08143315124511719, 0.08185804748535157, 0.08096623992919921, 0.08109251403808594, 0.08045916748046875, 0.07968624114990235, 0.07968358612060547, 0.07955379486083984, 0.08436198425292969, 0.07985526275634766, 0.0795033950805664, 0.07967366027832032, 0.0801402587890625, 0.07962403106689453, 0.07936019134521484, 0.07942704010009766, 0.07953164672851562, 0.07976847839355469, 0.08015776062011719, 0.07967513275146484, 0.07942546844482422, 0.0790946273803711, 0.07902566528320312, 0.07893055725097656, 0.07935590362548828, 0.07913037109375, 0.07910848236083984, 0.07906079864501953, 0.07891670227050782, 0.07910294342041016, 0.07947264099121094, 0.07931084442138672, 0.07920223999023437, 0.0794870376586914, 0.07911628723144531, 0.07930879974365235, 0.07926143646240234, 0.07916365051269532, 0.0790909423828125, 0.08022092437744141, 0.07940265655517578, 0.07939926147460938, 0.07963375854492187, 0.07978038024902344, 0.0795767364501953, 0.07949155426025391, 0.0799969253540039, 0.08352767944335937, 0.07967948913574219, 0.07984127807617188, 0.07917801666259766, 0.07972748565673828, 0.07979705810546875, 0.07919321441650391, 0.07892470550537109, 0.0793958740234375, 0.07904112243652343, 0.08068351745605469, 0.08141401672363281, 0.07974297332763672, 0.07965695953369141, 0.0796382064819336, 0.07955680084228516, 0.07935590362548828, 0.08001328277587891, 0.07941705322265626, 0.0791760025024414, 0.0793497314453125, 0.07898860931396484, 0.07903215789794922, 0.07920857238769531, 0.07910694122314453, 0.07921868896484376, 0.07898726654052735, 0.08055398559570312, 0.07984067535400391, 0.07959980773925782, 0.08017453002929688, 0.07898966217041016, 0.07911625671386718, 0.07917021179199218, 0.0790835189819336, 0.07913862609863281, 0.0801847686767578, 0.08003791809082031, 0.07920066833496094, 0.07936646270751953, 0.07915087890625, 0.07982109069824218, 0.07953968048095703, 0.07920687866210938, 0.0790855712890625, 0.08165401458740235, 0.08005923461914062, 0.07988489532470704, 0.07929273223876954, 0.07936614227294922, 0.07941254425048828, 0.07932498931884766, 0.07939161682128906, 0.079351806640625, 0.08001331329345703, 0.07904665374755859, 0.07904569244384765, 0.07948115539550782, 0.07926620483398437, 0.0792660140991211, 0.08154476928710938, 0.08114015960693359, 0.07996217346191406, 0.08067270660400391, 0.08052252960205078, 0.0801123504638672, 0.07970130920410157, 0.08038265228271485, 0.07946649932861329, 0.07923712158203125, 0.07974877166748047, 0.07924508666992187, 0.07924208068847656, 0.07921839904785156, 0.07896473693847657, 0.07933952331542969, 0.07974038696289062, 0.07935440063476562, 0.07919615936279296, 0.07955443572998047, 0.07955219268798829, 0.07934815979003906, 0.07959961700439454, 0.07965286254882813, 0.07924307250976563, 0.07905510711669922, 0.07909286499023438, 0.07911225891113281, 0.079321533203125, 0.07963884735107422, 0.07985971069335937, 0.079285888671875, 0.0795815658569336, 0.07913881683349609, 0.07908541107177734, 0.07930281829833985, 0.08239254760742187, 0.07907997131347656, 0.07883805084228515, 0.07842201232910156, 0.08353145599365235, 0.07915904235839843, 0.07917830657958984, 0.07867596435546875, 0.07970406341552734, 0.07883776092529297, 0.07941529846191406, 0.0796527328491211, 0.07948419189453125, 0.07917180633544922, 0.07900019073486328, 0.0792179183959961, 0.07929523468017578, 0.0794559326171875, 0.07961427307128906, 0.08017427062988282, 0.08018390655517578, 0.08056649780273438, 0.07999231719970704, 0.08041522979736328, 0.08042291259765624, 0.08016223907470703, 0.08040054321289063, 0.0801419219970703, 0.08451789093017578, 0.08071887969970704, 0.080427490234375, 0.08044131469726562, 0.08038861083984375, 0.08125011444091797, 0.08052352142333985, 0.08029599761962891, 0.0804339828491211, 0.08127689361572266, 0.08298502349853516, 0.08036236572265625, 0.07997235107421875, 0.07987619018554687, 0.08012902069091797, 0.07978076934814453, 0.07981203460693359, 0.08084742736816407, 0.08145101165771484, 0.08033074951171874, 0.07952793884277344, 0.07936614227294922, 0.07928627014160156, 0.0788705291748047, 0.08147353363037109, 0.07912857818603515, 0.07953750610351562, 0.07919667053222657, 0.07946870422363281, 0.07918386840820313, 0.08006655883789063, 0.07965081787109375, 0.07992892456054687, 0.08055411529541015, 0.07925753784179687, 0.07918611145019532, 0.0796173095703125, 0.07966944122314454, 0.07942607879638672, 0.07933542633056641, 0.07963471984863281, 0.0795030746459961, 0.07968899536132812, 0.07909059143066406, 0.0791747817993164, 0.0793832015991211, 0.07887446594238282, 0.07865977478027343, 0.07863295745849609, 0.0787435531616211, 0.07913215637207031, 0.07901392364501954, 0.07892182159423829, 0.07918016052246094, 0.07954637145996094, 0.07955046081542969, 0.07919615936279296, 0.07989635467529296, 0.07907555389404297, 0.07929036712646484, 0.08278643035888672, 0.07958035278320312, 0.07994147491455078, 0.07955500793457031, 0.07887071990966797, 0.07882300567626953, 0.07987264251708984, 0.07982109069824218, 0.07967436981201172, 0.07933379364013672, 0.0791144027709961, 0.07908163452148438, 0.07892092895507813, 0.07936473846435547, 0.07910415649414063, 0.07958019256591797, 0.07972335815429688, 0.07966732788085938, 0.079604736328125, 0.07989871978759766, 0.08002210998535156, 0.07941171264648438, 0.0793372802734375, 0.08023846435546875, 0.07891913604736328, 0.07886438751220703, 0.07883229064941406, 0.07885801696777343, 0.07924518585205079, 0.07876233673095703, 0.07929446411132812, 0.07893196868896485, 0.07906269073486329, 0.07981024169921876, 0.07921049499511719, 0.07918605041503907, 0.08158672332763672, 0.07900931549072265, 0.07894454193115234, 0.07906937408447266]",tokens/s,12.421535461882783,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,6418.624512,3721.330688,0.0,3326.083072,3249.416192,s,1,17.51308203125,17.51308203125,0.0,17.51308203125,17.51308203125,17.51308203125,17.51308203125,[17.51308203125],,kWh,0.00030355860972501125,3.347633576307504e-05,0.0001152642588779923,0.0004522992043660786,,MB,1872.0768,4006.54336,0.0,3590.324224,3521.678336,s,10,0.7537450408935547,0.07537450408935548,0.0015239186641714225,0.07506945419311523,0.07570234146118164,0.07776300926208496,0.07941154350280762,"[0.0750257568359375, 0.07477347564697266, 0.07524441528320312, 0.07415190124511718, 0.07494012451171875, 0.07513983917236328, 0.07520329284667969, 0.07511315155029297, 0.07432940673828126, 0.07982367706298828]",tokens/s,3396.3739210345634,kWh,2.179116079975385e-06,2.4031599320989613e-07,1.440817902238829e-06,3.860249975424111e-06,tokens/kWh,66316948.80637212,MB,1872.0768,4117.692416,0.0,3701.47328,3608.866816,s,10,46.84605126953126,4.684605126953125,0.007524969231583742,4.68286474609375,4.69220595703125,4.6969560058593745,4.700756044921874,"[4.691150390625, 4.6794951171875, 4.68296044921875, 4.67753759765625, 4.689615234375, 4.7017060546875, 4.68276904296875, 4.6844453125, 4.68272998046875, 4.67364208984375]",tokens/s,13.448305309133985,kWh,0.00013488809747252246,1.4878509955376521e-05,6.468262388536061e-05,0.00021444923131325958,tokens/kWh,293775.8257010113,,s,630,46.8426582336426,0.07435342576768662,0.0008832220590586653,0.07422393417358399,0.07506347274780273,0.07541375885009766,0.07775482261657715,"[0.074025634765625, 0.07351910400390625, 0.07401996612548828, 0.07403593444824219, 0.07385874938964844, 0.07375103759765625, 0.0742747802734375, 0.07384678649902343, 0.07427065277099609, 0.0741827163696289, 0.07476950073242188, 0.074380126953125, 0.07473296356201171, 0.07454163360595703, 0.07522022247314453, 0.07421014404296875, 0.07441379547119141, 0.07455158233642578, 0.07421049499511718, 0.07453984069824218, 0.07430963134765625, 0.0750665283203125, 0.07402508544921875, 0.07376950073242187, 0.07423126220703125, 0.07428169250488281, 0.07420928192138672, 0.07418595123291015, 0.07347625732421875, 0.0739815673828125, 0.0765818862915039, 0.07498748779296875, 0.07441206359863281, 0.07443865966796875, 0.07429529571533203, 0.07381145477294922, 0.07432653045654297, 0.0746455078125, 0.07427468872070313, 0.0772179183959961, 0.07436697387695312, 0.07419625854492187, 0.07437385559082031, 0.07434563446044921, 0.07463314819335938, 0.07422643280029297, 0.0747234878540039, 0.07499088287353516, 0.07479564666748047, 0.07472752380371094, 0.07459772491455079, 0.0750241928100586, 0.07492281341552734, 0.07458518218994141, 0.07457888031005859, 0.07452877044677735, 0.07496089935302734, 0.07479296112060548, 0.07433126068115234, 0.07418675231933594, 0.07471603393554688, 0.07414733123779296, 0.07420329284667969, 0.0741456298828125, 0.07455554962158203, 0.07408025360107422, 0.07369872283935547, 0.07378594970703126, 0.07388979339599609, 0.07396966552734376, 0.07413091278076171, 0.073830078125, 0.07413843536376953, 0.0745607681274414, 0.07404579162597656, 0.07455158233642578, 0.07447100830078125, 0.07401734161376954, 0.07440316772460938, 0.0745719985961914, 0.07415443420410156, 0.07387465667724609, 0.07409072113037109, 0.07444742584228516, 0.07606681823730468, 0.07806156921386719, 0.07460969543457031, 0.07473251342773438, 0.07454499053955078, 0.07431798553466797, 0.07454105377197266, 0.07460201263427735, 0.07524809265136718, 0.07431782531738282, 0.07431961822509765, 0.07375440216064454, 0.07394745635986329, 0.07437248229980468, 0.0740994873046875, 0.07385292816162109, 0.07413747406005859, 0.07356377410888672, 0.07372032165527344, 0.07609139251708984, 0.07507977294921875, 0.07382825469970702, 0.07361945343017579, 0.07405158233642578, 0.0739422378540039, 0.07380867004394531, 0.074176513671875, 0.0739854736328125, 0.07374700927734375, 0.07337718200683593, 0.07352499389648437, 0.07376982116699218, 0.07403833770751952, 0.07419789123535156, 0.07434860992431641, 0.07387340545654297, 0.07397154998779297, 0.07364406585693359, 0.07425241851806641, 0.07430143737792969, 0.07506313323974609, 0.07422978973388672, 0.07497920227050782, 0.07464927673339844, 0.07444841766357421, 0.0741580810546875, 0.0753448028564453, 0.07468198394775391, 0.07364415740966797, 0.07355526733398438, 0.07313823699951172, 0.07379443359375, 0.07435250854492187, 0.07427903747558594, 0.07423744201660157, 0.07497516632080078, 0.07385759735107422, 0.07343309020996094, 0.07383245086669922, 0.07407170867919922, 0.0748109130859375, 0.07343392181396484, 0.07360262298583985, 0.07376914978027344, 0.07381996917724609, 0.0743603515625, 0.07436784362792968, 0.0742740478515625, 0.07442262268066406, 0.0754277114868164, 0.07364796447753906, 0.07343497467041016, 0.07399862670898437, 0.07400102233886718, 0.07498751831054687, 0.07438336181640624, 0.07447468566894531, 0.07424076843261719, 0.07508589172363281, 0.07641053009033204, 0.07484860992431641, 0.07430758666992188, 0.07481139373779297, 0.07426252746582031, 0.07437516784667969, 0.07437721252441407, 0.07717683410644531, 0.07761673736572265, 0.07488143920898438, 0.0741560287475586, 0.07421952056884766, 0.07348633575439453, 0.07382630157470703, 0.07418470764160157, 0.07419062042236328, 0.07412329864501953, 0.07374649810791016, 0.07381619262695313, 0.0741396484375, 0.074347900390625, 0.0742938232421875, 0.07445510101318359, 0.07384678649902343, 0.07352524566650391, 0.07327948760986328, 0.07411164855957031, 0.07422566223144532, 0.07411436462402343, 0.07441887664794922, 0.07405773162841797, 0.07377510070800782, 0.07455232238769531, 0.07428546905517579, 0.0743788833618164, 0.07450857543945312, 0.07406003570556641, 0.07441846466064453, 0.07388585662841797, 0.0742850570678711, 0.07408640289306641, 0.07469670104980469, 0.07440998077392579, 0.07449600219726563, 0.07441600036621093, 0.0736277084350586, 0.07403321838378907, 0.07385497283935546, 0.07370547485351563, 0.07410892486572265, 0.07386726379394531, 0.07366041564941406, 0.0736727066040039, 0.07387276458740234, 0.07769718170166015, 0.07413807678222656, 0.0738564453125, 0.07369785308837891, 0.07339584350585937, 0.07382624053955078, 0.0740561294555664, 0.0737996826171875, 0.07351868438720703, 0.07371612548828126, 0.07366614532470703, 0.07396393585205079, 0.07400038146972657, 0.07378112030029296, 0.07414176177978515, 0.07431378936767578, 0.07393417358398438, 0.07422415924072266, 0.07428905487060547, 0.07530028533935547, 0.07737014770507812, 0.07453001403808594, 0.07447795104980469, 0.07440201568603516, 0.07450233459472656, 0.074515869140625, 0.07434095764160156, 0.07476751708984375, 0.07412435150146485, 0.07437904357910156, 0.07391961669921875, 0.0742317123413086, 0.07403823852539063, 0.07443154907226562, 0.07427731323242187, 0.07362710571289062, 0.07352988433837891, 0.07362355041503907, 0.07385292816162109, 0.07430143737792969, 0.07427072143554687, 0.07411302185058594, 0.07356617736816407, 0.07405955505371094, 0.073963134765625, 0.07426227569580078, 0.07393315124511719, 0.07435836791992187, 0.0739276123046875, 0.07341983795166016, 0.07369004821777343, 0.07415094757080078, 0.07452105712890625, 0.07432038116455078, 0.07539670562744141, 0.07436089324951171, 0.07550348663330078, 0.07428144073486329, 0.07452588653564453, 0.07505184173583984, 0.07516476440429687, 0.0755080337524414, 0.07444108581542969, 0.0743503646850586, 0.07477260589599609, 0.07414201354980468, 0.07418182373046875, 0.07467501068115234, 0.07437107086181641, 0.07484361267089844, 0.07391900634765625, 0.07411686706542969, 0.07403266906738282, 0.07479145812988282, 0.07396985626220703, 0.07402425384521484, 0.07337779235839843, 0.07327584075927734, 0.07602543640136719, 0.07611158752441406, 0.0743966064453125, 0.07422566223144532, 0.07329792022705078, 0.07310294342041015, 0.0726944351196289, 0.07388480377197265, 0.07431433868408203, 0.08445545959472656, 0.07465513610839844, 0.07382077026367187, 0.07430095672607422, 0.07443897247314453, 0.07449616241455079, 0.0740997772216797, 0.07508390045166016, 0.07478265380859375, 0.07441907501220703, 0.0740843505859375, 0.07663616180419922, 0.07524518585205078, 0.07516544342041015, 0.07428358459472656, 0.07402706909179688, 0.0741560287475586, 0.07390396881103516, 0.07434051513671874, 0.07387068939208985, 0.0744741439819336, 0.07390364837646485, 0.0741421127319336, 0.07394879913330078, 0.07434230041503906, 0.0744486083984375, 0.07435126495361329, 0.07377458953857421, 0.07366912078857422, 0.07399008178710938, 0.07397193908691406, 0.07436697387695312, 0.07396047973632812, 0.07536911773681641, 0.07472764587402343, 0.07423503875732422, 0.0741976318359375, 0.07462944030761719, 0.07417036437988281, 0.07487010955810547, 0.07473538970947266, 0.07437811279296876, 0.0741212158203125, 0.07459996795654297, 0.07464189147949218, 0.07628089904785157, 0.07577056121826171, 0.07545616149902344, 0.07445769500732421, 0.07463267517089844, 0.07473725128173828, 0.07534467315673828, 0.0750675506591797, 0.0744796142578125, 0.07371981048583984, 0.07430947113037109, 0.0743466567993164, 0.07386930847167969, 0.07406992340087891, 0.07507363128662109, 0.07456905364990235, 0.07446717071533203, 0.07448454284667969, 0.07506739044189453, 0.07868211364746094, 0.07986988830566406, 0.0744940185546875, 0.07438130950927735, 0.07376076507568359, 0.07387728118896485, 0.07419699096679687, 0.07435286712646484, 0.07396147155761719, 0.0739205093383789, 0.07391539001464843, 0.07386316680908203, 0.07396940612792968, 0.07443247985839843, 0.07535577392578124, 0.07456221008300781, 0.07435244750976562, 0.07430889892578126, 0.07395011138916016, 0.07424585723876953, 0.07426889801025391, 0.07433168029785156, 0.07481382751464843, 0.07440809631347656, 0.07391619110107422, 0.07379990386962891, 0.0746470718383789, 0.07390022277832031, 0.07395561981201172, 0.073270751953125, 0.07308108520507813, 0.07384012603759765, 0.07353542327880859, 0.07370838165283203, 0.07367453002929687, 0.07383676910400391, 0.07442966461181641, 0.07407820892333984, 0.07367145538330078, 0.07352476501464844, 0.07472176361083985, 0.07466556549072266, 0.07527056121826171, 0.07399404907226563, 0.07378758239746094, 0.07350067138671874, 0.07375257873535156, 0.07415193939208985, 0.07414086151123046, 0.07393103790283204, 0.0755492172241211, 0.07393827056884765, 0.07430620574951172, 0.07503052520751953, 0.07466146850585938, 0.07433023834228515, 0.07410095977783203, 0.07471520233154297, 0.07444627380371094, 0.07638050842285156, 0.07524784088134766, 0.07445696258544922, 0.07414329528808594, 0.0747844467163086, 0.07528256225585937, 0.07398681640625, 0.07436003112792969, 0.07428377532958984, 0.07701302337646485, 0.07495033264160156, 0.07425651550292969, 0.07438098907470703, 0.07425804901123047, 0.0740495376586914, 0.07437926483154297, 0.07372799682617187, 0.0758497314453125, 0.07370457458496094, 0.07374732971191406, 0.07475609588623047, 0.07407939147949219, 0.07407814025878906, 0.07380655670166016, 0.07452272033691407, 0.07422370910644531, 0.07369728088378906, 0.07393689727783204, 0.07437926483154297, 0.07474176025390625, 0.07514514923095703, 0.07471858978271484, 0.07402470397949219, 0.07489836883544922, 0.07426048278808593, 0.07416146850585938, 0.07399040222167969, 0.07416790771484374, 0.07419785308837891, 0.07384678649902343, 0.07399833679199219, 0.07408201599121093, 0.0741495361328125, 0.07475878143310546, 0.0747540512084961, 0.07426982116699218, 0.07411183929443359, 0.07500393676757812, 0.07428415679931641, 0.07423065948486328, 0.0739532470703125, 0.07333875274658203, 0.07338336181640626, 0.07363817596435547, 0.07400902557373047, 0.07777836608886719, 0.07384524536132812, 0.07413766479492187, 0.0740986557006836, 0.0736993637084961, 0.07393484497070313, 0.07433971405029297, 0.07434697723388672, 0.07419715118408203, 0.07399542236328124, 0.07419084930419922, 0.0742158432006836, 0.07465414428710937, 0.07443199920654296, 0.07460457611083984, 0.07455996704101563, 0.07473766326904296, 0.0742848663330078, 0.07441824340820312, 0.07514476776123047, 0.07721836853027343, 0.0742336654663086, 0.07326787567138672, 0.07348429107666016, 0.07388678741455078, 0.07407046508789063, 0.07412992095947266, 0.07500800323486329, 0.0753477783203125, 0.07447321319580077, 0.07364153289794922, 0.07411801910400391, 0.07405101013183593, 0.0742130584716797, 0.0738372802734375, 0.07335478210449219, 0.07351062774658203, 0.07367362976074218, 0.07425433349609376, 0.07436601257324219, 0.07418707275390625, 0.07415408325195312, 0.07515097808837891, 0.07374531555175781, 0.07368425750732421, 0.07490207672119141, 0.07489113616943359, 0.0741911392211914, 0.07443641662597657, 0.0753420181274414, 0.07461273956298828, 0.0743353271484375, 0.07475087738037109, 0.07431168365478516, 0.07519369506835938, 0.07417513275146484, 0.07439974212646484, 0.07419904327392578, 0.07532479858398437, 0.07404608154296875, 0.07438745880126953, 0.0735373764038086, 0.07392886352539063, 0.07339328002929688, 0.07403183746337891, 0.07455760192871094, 0.07422108459472657, 0.07404182434082031, 0.07475199890136719, 0.07399833679199219, 0.07426662445068359, 0.07390322875976563, 0.07462137603759765, 0.07398854064941406, 0.07439564514160156, 0.07366041564941406, 0.07447862243652344, 0.07402185821533203, 0.07734473419189453, 0.07484623718261718, 0.07423385620117187, 0.07449766540527344, 0.074074462890625, 0.07517801666259766, 0.07525785827636719, 0.08058236694335938, 0.0787212142944336, 0.07431648254394531, 0.07423129272460938, 0.074276611328125, 0.07448371124267578, 0.0745068130493164, 0.0744587173461914, 0.07435939025878906, 0.07421878051757813, 0.07393746948242187, 0.07370972442626954, 0.07456095886230468, 0.07417817687988282, 0.07367266845703126, 0.07388668823242188, 0.07349043273925782, 0.07318732452392578, 0.07367884826660157, 0.07348799896240234, 0.07392294311523437, 0.07411711883544922, 0.07379702758789063, 0.07372032165527344, 0.07362569427490234, 0.07306985473632813, 0.0738741455078125, 0.07403916931152343, 0.07384835052490234, 0.07354838562011719, 0.07332454681396484, 0.07334083557128906, 0.07368224334716797, 0.07391107177734375, 0.07415798187255859, 0.07417046356201172, 0.0745164794921875, 0.07452262115478515, 0.07437337493896484, 0.074487548828125, 0.07420281219482422, 0.07395542144775391, 0.07523721313476563, 0.0742076187133789, 0.07411481475830078, 0.0741849594116211, 0.07440179443359375, 0.07623407745361328, 0.07419766235351563, 0.07392460632324219, 0.0740079345703125, 0.07380441284179687, 0.07499958038330078, 0.07404361724853516, 0.07352114868164063, 0.07349657440185547, 0.07306390380859375, 0.07362000274658204, 0.07332860565185546, 0.07363791656494141, 0.07379299163818359, 0.07372239685058593, 0.07357440185546875]",tokens/s,13.449279433666547,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,2016.866304,1252.982784,0.0,857.735168,829.14304,s,1,9.9037060546875,9.9037060546875,0.0,9.9037060546875,9.9037060546875,9.9037060546875,9.9037060546875,[9.9037060546875],,kWh,7.237408021669579e-05,7.9762110518436e-06,2.5385575864012022e-05,0.00010573586713255141,,MB,2069.7088,1542.38976,0.0,1126.170624,1096.740864,s,10,0.8928128356933593,0.08928128356933593,0.0006608523235376482,0.08932579040527344,0.08998932495117187,0.09003573532104492,0.09007286361694336,"[0.09008214569091796, 0.08997901153564453, 0.08905856323242188, 0.08850822448730469, 0.08941951751708985, 0.0879260482788086, 0.08981951904296875, 0.08923206329345704, 0.08985135650634765, 0.08893638610839844]",tokens/s,2867.342289060956,kWh,2.6036028127213455e-06,2.8713099847076336e-07,1.1604655301417716e-06,4.05119934133388e-06,tokens/kWh,63191163.512509495,MB,2073.796608,1565.458432,0.0,1149.239296,1096.743424,s,10,54.44341552734375,5.444341552734375,0.01229974840854743,5.44352294921875,5.4570804199218745,5.463798999023438,5.469173862304688,"[5.45218359375, 5.44805810546875, 5.4460361328125, 5.43594287109375, 5.441009765625, 5.4293740234375, 5.4284873046875, 5.45558740234375, 5.470517578125, 5.43621875]",tokens/s,11.57164725794229,kWh,0.0001584023618814468,1.7472330678598248e-05,6.178527927086001e-05,0.00023765997183090504,tokens/kWh,265084.6060220207,,s,630,54.4407574539184,0.08641390072050549,0.0010975190457818203,0.08617566680908204,0.08732050094604492,0.08799230728149414,0.09161460517883302,"[0.08679596710205079, 0.08654541015625, 0.08620751953125, 0.08669680023193359, 0.08647078704833984, 0.08680652618408204, 0.0862371826171875, 0.08638873291015625, 0.0860255355834961, 0.08643449401855469, 0.08658729553222656, 0.08620015716552734, 0.08715084838867188, 0.08659932708740234, 0.0880643539428711, 0.08645760345458985, 0.08768911743164062, 0.08693750762939453, 0.09107100677490235, 0.08609423828125, 0.0857456283569336, 0.08613890838623046, 0.08606924438476563, 0.08660582733154297, 0.08645782470703126, 0.08905347442626953, 0.08705443572998046, 0.08610012817382813, 0.08673187255859376, 0.08642176055908203, 0.08589584350585938, 0.0858662109375, 0.0866116485595703, 0.08666297912597656, 0.08682575988769531, 0.08644198608398437, 0.08630671691894531, 0.08627005004882812, 0.08645222473144532, 0.08651366424560547, 0.08695152282714844, 0.08606963348388671, 0.08620604705810547, 0.08622329711914062, 0.08617814636230468, 0.0862327651977539, 0.08665814208984375, 0.08599027252197265, 0.08622812652587891, 0.086395263671875, 0.08592227172851563, 0.0862198715209961, 0.08585004425048828, 0.08627001953125, 0.08603536224365234, 0.08692530822753906, 0.08606924438476563, 0.08604579162597656, 0.08583245086669922, 0.08568195343017578, 0.0865713882446289, 0.08634329223632813, 0.08732096099853516, 0.08675353240966797, 0.09192031860351563, 0.08682291412353516, 0.08732044982910156, 0.08725904083251954, 0.08827846527099609, 0.08769580841064453, 0.08686003112792968, 0.08635196685791016, 0.0859156494140625, 0.08629862213134766, 0.08617715454101563, 0.08597392272949218, 0.08585596466064453, 0.08655020904541015, 0.08590684509277344, 0.08622537231445312, 0.08674678039550782, 0.08654723358154297, 0.08596685028076172, 0.08646656036376953, 0.08603871917724609, 0.08628524780273437, 0.08624166107177735, 0.08637900543212891, 0.08599545288085937, 0.08662022399902344, 0.08658943939208984, 0.08637030029296874, 0.086614013671875, 0.08634681701660156, 0.0866517105102539, 0.08610623931884766, 0.08650927734375, 0.08605519866943359, 0.08607743835449219, 0.08637644958496093, 0.08669593811035156, 0.08665702056884765, 0.08654557037353515, 0.08617382049560547, 0.08605465698242187, 0.0862873306274414, 0.08575590515136719, 0.08577228546142578, 0.08594009399414063, 0.08621478271484374, 0.08597299194335938, 0.08600950622558594, 0.0859668197631836, 0.08616588592529296, 0.08646249389648437, 0.08661808013916016, 0.08611225891113282, 0.08684496307373046, 0.08586470031738282, 0.08639250946044921, 0.08736370849609375, 0.08699126434326172, 0.08570674896240234, 0.08596479797363281, 0.0861143035888672, 0.08599868774414063, 0.08610002899169922, 0.0872872314453125, 0.08660163116455079, 0.08594908905029297, 0.086263427734375, 0.08770992279052735, 0.08619570922851562, 0.0853326416015625, 0.08598528289794923, 0.0858005142211914, 0.08627763366699219, 0.08565795135498047, 0.0917523193359375, 0.08838832092285157, 0.08667759704589843, 0.0860322265625, 0.08593004608154296, 0.08587232208251953, 0.0854686050415039, 0.08579366302490235, 0.08672870635986328, 0.08608258819580078, 0.08658147430419921, 0.08562723541259766, 0.08587715148925781, 0.08625698852539063, 0.08682768249511719, 0.08654029083251953, 0.08752537536621094, 0.08610963439941406, 0.08595852661132812, 0.0870284194946289, 0.08597840118408204, 0.0861662368774414, 0.0863121566772461, 0.08641613006591797, 0.08866387176513672, 0.0866746597290039, 0.08625385284423828, 0.08570687866210938, 0.08588540649414063, 0.08588825225830078, 0.08593417358398438, 0.08636278533935547, 0.08616915130615234, 0.08599814605712891, 0.08615666961669922, 0.08587532806396485, 0.08582089233398438, 0.0856171875, 0.08596685028076172, 0.08672051239013671, 0.08571686553955078, 0.0912774429321289, 0.08633753967285156, 0.08607510375976563, 0.08569884490966796, 0.08650752258300781, 0.08612035369873047, 0.08622089385986328, 0.08604057312011719, 0.08674527740478516, 0.08625132751464844, 0.08603014373779297, 0.08566989135742188, 0.08658678436279296, 0.08600166320800781, 0.08615817260742188, 0.08589884948730468, 0.0858862075805664, 0.08563177490234375, 0.08673702239990234, 0.08578457641601563, 0.08688025665283203, 0.08633782196044922, 0.08591862487792969, 0.08574816131591798, 0.08685011291503907, 0.08543344116210938, 0.08629273223876953, 0.08611190032958985, 0.08629891204833984, 0.0867476806640625, 0.08615046691894532, 0.08571539306640626, 0.08575142669677735, 0.0858641586303711, 0.08571116638183594, 0.0864692153930664, 0.08663005065917968, 0.08695228576660156, 0.08617779541015624, 0.08558796691894531, 0.08640128326416016, 0.08617916870117187, 0.08598979187011718, 0.08605900573730468, 0.08560617828369141, 0.08638076782226563, 0.08598118591308594, 0.08808448028564453, 0.08635945892333985, 0.08617772674560546, 0.0863238754272461, 0.08567295837402343, 0.08665542602539063, 0.08606329345703125, 0.08630470275878906, 0.08581999969482422, 0.08605270385742188, 0.0865418243408203, 0.08594419097900391, 0.08622348785400391, 0.08575596618652344, 0.08689862060546875, 0.08697551727294922, 0.08759327697753906, 0.08602588653564452, 0.08618495941162109, 0.08627001953125, 0.09024505615234375, 0.08612454223632812, 0.08580912017822266, 0.08622287750244141, 0.08646604919433594, 0.08626432037353515, 0.08644812774658203, 0.09576038360595703, 0.08624486541748047, 0.08614262390136719, 0.08574886322021484, 0.08567180633544921, 0.0853440933227539, 0.08564736175537109, 0.08615923309326172, 0.08672268676757812, 0.08583984375, 0.08976902770996094, 0.08639382171630859, 0.0864677734375, 0.08633350372314454, 0.08720793914794922, 0.08571571350097656, 0.08518150329589844, 0.08569747161865235, 0.08629043579101563, 0.08636006164550782, 0.08566169738769532, 0.08693283081054687, 0.08557839965820313, 0.08594550323486329, 0.08578953552246094, 0.0859279327392578, 0.08573747253417968, 0.08579424285888672, 0.08608739471435548, 0.08590016174316406, 0.08627811431884766, 0.08609308624267578, 0.08593843078613281, 0.08617417907714844, 0.08633673858642578, 0.0866170883178711, 0.0864150390625, 0.0863018569946289, 0.08572819519042969, 0.08642073822021484, 0.0939702377319336, 0.08617056274414063, 0.08629023742675782, 0.08636835479736328, 0.0859768295288086, 0.08561312103271485, 0.08569014739990234, 0.08539663696289063, 0.08604348754882812, 0.08603443145751953, 0.08573725128173829, 0.0855042266845703, 0.08551321411132813, 0.0860231704711914, 0.08632867431640626, 0.08598137664794922, 0.08809654235839844, 0.08577008056640625, 0.0853831024169922, 0.08551248168945312, 0.08553129577636719, 0.08500994873046876, 0.08557839965820313, 0.08562655639648438, 0.086417724609375, 0.08617372894287109, 0.086087646484375, 0.08545587158203125, 0.08617171478271485, 0.08556845092773438, 0.08546918487548828, 0.08610163116455079, 0.08562726593017578, 0.08606310272216797, 0.08556134033203125, 0.08632319641113281, 0.08560435485839844, 0.08568243408203124, 0.085587646484375, 0.08583139038085938, 0.08645053100585938, 0.0854302749633789, 0.0862259521484375, 0.08625251007080079, 0.08554112243652344, 0.08527232360839844, 0.08581484985351563, 0.087019775390625, 0.08626914978027343, 0.08606339263916016, 0.0861943359375, 0.08633193969726563, 0.08640431976318359, 0.08559081268310546, 0.08655052947998047, 0.08572255706787109, 0.08773894500732422, 0.08662608337402344, 0.08591766357421875, 0.08575926208496094, 0.08561090850830078, 0.08559468841552734, 0.08555519866943359, 0.08797798156738282, 0.08607273864746094, 0.08564595031738281, 0.08584188842773438, 0.08820496368408202, 0.08650582122802734, 0.08548889923095702, 0.08568511962890625, 0.08609779357910156, 0.08647792053222657, 0.08603292846679687, 0.08678403472900391, 0.0959266586303711, 0.08604783630371093, 0.08563302612304688, 0.08536268615722656, 0.08523661041259765, 0.08549375915527344, 0.08538873291015625, 0.0858120346069336, 0.08655232238769531, 0.08594226837158203, 0.08625151824951172, 0.08618502044677734, 0.08576121520996094, 0.08573078155517579, 0.08565094757080079, 0.08554566192626953, 0.08578876495361328, 0.08619213104248047, 0.086329345703125, 0.0859156494140625, 0.08606044769287109, 0.08628489685058593, 0.08581324768066406, 0.08613811492919922, 0.08622156524658203, 0.08562102508544922, 0.08542384338378907, 0.0858419189453125, 0.08561254119873046, 0.08626790618896485, 0.08607350158691406, 0.08538505554199219, 0.08575360107421875, 0.08571286773681641, 0.08600748443603516, 0.08565593719482421, 0.08560176086425782, 0.08568089294433594, 0.08579071807861328, 0.08912467193603515, 0.09313465881347656, 0.08685606384277343, 0.08684748840332031, 0.08619213104248047, 0.08651776123046875, 0.0869552001953125, 0.086067138671875, 0.08590946960449218, 0.08597801971435547, 0.08600707244873047, 0.086001953125, 0.08557202911376953, 0.08599561309814453, 0.08573487854003906, 0.08631110382080077, 0.08620467376708985, 0.08650096130371093, 0.08628585815429687, 0.08568937683105468, 0.0855054702758789, 0.0855712661743164, 0.08556617736816406, 0.08583535766601562, 0.08586246490478516, 0.08617161560058594, 0.08655500793457031, 0.08630009460449219, 0.08587117004394532, 0.08527593231201172, 0.08700387573242188, 0.08605900573730468, 0.08637814331054687, 0.08609417724609375, 0.08650838470458984, 0.08648851013183594, 0.08636678314208984, 0.08635391998291016, 0.08636383819580078, 0.08689081573486328, 0.08611020660400391, 0.08598694610595703, 0.08574755096435546, 0.08613660430908203, 0.08593679809570312, 0.08554710388183594, 0.08608972930908203, 0.08589446258544922, 0.08561529541015625, 0.085728515625, 0.0867701416015625, 0.08713587188720703, 0.08572297668457031, 0.08610419464111328, 0.08559276580810547, 0.08572652435302734, 0.0857545623779297, 0.08654755401611328, 0.08612342071533204, 0.08583372497558593, 0.086165283203125, 0.08624969482421875, 0.08599346923828124, 0.08696627044677735, 0.08600985717773438, 0.08580300903320312, 0.08574156951904296, 0.0877998046875, 0.08697606658935547, 0.08577069091796875, 0.08615849304199219, 0.08596771240234374, 0.08578185272216797, 0.08818720245361328, 0.08780425262451172, 0.08689356994628906, 0.085578369140625, 0.08592620849609375, 0.086412353515625, 0.0867422103881836, 0.08731629180908203, 0.08738323211669922, 0.08672319793701172, 0.08697270202636719, 0.08702738952636718, 0.08763005065917968, 0.08692259216308594, 0.08689427185058594, 0.08788886260986328, 0.08772927856445313, 0.08705667114257812, 0.08851315307617187, 0.0876871337890625, 0.08808448028564453, 0.0876605453491211, 0.08812879943847657, 0.0876981430053711, 0.08767369842529296, 0.08849161529541015, 0.08796591949462891, 0.0869048309326172, 0.08677311706542969, 0.08670066833496094, 0.08716697692871093, 0.08810272216796874, 0.08761974334716797, 0.08720591735839844, 0.08680818939208984, 0.08760972595214844, 0.08707891082763672, 0.08827494049072265, 0.08695990753173828, 0.08742275238037109, 0.09033773040771484, 0.09274742126464844, 0.08713565063476562, 0.08677267456054688, 0.08712806701660156, 0.08759225463867187, 0.08672892761230469, 0.08662207794189453, 0.08772463989257813, 0.08682701110839844, 0.08598086547851562, 0.08572902679443359, 0.08630738830566406, 0.08762931060791015, 0.0860257568359375, 0.0859268798828125, 0.08627523040771484, 0.0865525131225586, 0.08740137481689453, 0.0865322265625, 0.086355712890625, 0.08660185241699218, 0.08639078521728516, 0.08584358215332032, 0.08584604644775391, 0.08589110565185547, 0.08541830444335938, 0.08604054260253906, 0.08644713592529298, 0.08615132904052734, 0.08546390533447265, 0.08583782196044921, 0.0866890869140625, 0.08591609954833984, 0.08554521942138672, 0.08543436431884766, 0.08578195190429687, 0.08609849548339844, 0.08618598175048828, 0.0865955810546875, 0.08620835113525391, 0.08553692626953124, 0.08556947326660157, 0.08576790618896485, 0.09107516479492188, 0.08641510772705079, 0.08641516876220703, 0.08559808349609375, 0.0856170883178711, 0.08581581115722656, 0.08601404571533203, 0.08714387512207031, 0.08601814270019531, 0.0858642578125, 0.08583638763427734, 0.08540070343017578, 0.08570764923095703, 0.08589228820800782, 0.08556752014160156, 0.08573212432861328, 0.08628018951416015, 0.08569356536865234, 0.08576908874511718, 0.08849203491210937, 0.0870645751953125, 0.0859557113647461, 0.08631795501708985, 0.08837308502197265, 0.0865547866821289, 0.08562601470947266, 0.08543830108642578, 0.08571392059326172, 0.08668156433105469, 0.0866295394897461, 0.08749686431884765, 0.08779440307617188, 0.08654656219482422, 0.08577216339111328, 0.0872264633178711, 0.08595855712890625, 0.08567561340332032, 0.08565760040283203, 0.08687593841552735, 0.08601181030273437, 0.08618601226806641, 0.08698745727539063, 0.08647065734863281, 0.08580857849121094, 0.08628896331787109, 0.0864579849243164, 0.086181884765625, 0.08595439910888672, 0.08627458953857423, 0.0880040283203125, 0.08604524993896484, 0.08608358764648437, 0.08564940643310547, 0.08570880126953125, 0.08599961853027344, 0.0860057601928711, 0.08620236968994141, 0.08756371307373047, 0.0869238052368164, 0.08600169372558594, 0.08610774230957031, 0.08796163177490235, 0.0865153579711914, 0.08561942291259765, 0.08564736175537109, 0.08548761749267578]",tokens/s,11.572212244351402,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,5097.627648,3461.28384,0.0,3066.036224,2865.160192,s,1,13.38390625,13.38390625,0.0,13.38390625,13.38390625,13.38390625,13.38390625,[13.38390625],,kWh,0.00017487062844995005,1.9281217380313427e-05,6.501616312401537e-05,0.00025916800895427883,,MB,5149.663232,3790.536704,0.0,3374.317568,3158.448128,s,10,0.9157994461059572,0.0915799446105957,0.0011892466327708747,0.09108694076538086,0.09342338409423828,0.09351868362426759,0.09359492324829102,"[0.09025955200195312, 0.09040876770019532, 0.09074793243408204, 0.0907008285522461, 0.09142594909667968, 0.09361398315429688, 0.09259849548339844, 0.09199523162841797, 0.09064649963378907, 0.09340220642089844]",tokens/s,2795.37185885545,kWh,2.7058780424000155e-06,2.9840813150666025e-07,1.496266629111287e-06,4.500552803017963e-06,tokens/kWh,56881901.22518561,MB,5153.906688,3790.536704,0.0,3374.317568,3158.450688,s,10,55.28744775390625,5.528744775390625,0.02828313822901441,5.533910888671874,5.561607861328125,5.561866918945313,5.562074165039062,"[5.48099658203125, 5.48805810546875, 5.5044287109375, 5.536615234375, 5.51940771484375, 5.56155029296875, 5.54486474609375, 5.55819384765625, 5.53120654296875, 5.5621259765625]",tokens/s,11.394991550419114,kWh,0.00016187142267759847,1.7854969432405406e-05,6.869442841228868e-05,0.00024842082052229254,tokens/kWh,253601.9318652342,,s,630,55.28498953247067,0.08775395163884239,0.0011452891762173502,0.08761142349243164,0.08863429946899415,0.08943564682006835,0.09132697471618652,"[0.08672249603271484, 0.08921660614013673, 0.08627180480957031, 0.08731305694580078, 0.0873371810913086, 0.08705142211914063, 0.08651136016845704, 0.08661033630371094, 0.08614755249023437, 0.08600479888916016, 0.08675353240966797, 0.08663724517822266, 0.08671340942382813, 0.08705039978027344, 0.08636041259765626, 0.086299072265625, 0.08696361541748047, 0.08683487701416015, 0.08672348785400391, 0.08708096313476563, 0.08619414520263671, 0.08639411163330078, 0.08615299224853516, 0.08633042907714844, 0.08709673309326171, 0.08659552001953125, 0.0863279037475586, 0.08664678192138672, 0.08673075103759766, 0.08678137969970703, 0.0874686050415039, 0.08691232299804688, 0.08670272064208985, 0.08641276550292969, 0.08648764801025391, 0.08669776153564453, 0.08782447814941406, 0.08641081237792969, 0.08690950775146485, 0.08630271911621094, 0.08749228668212891, 0.08859024047851563, 0.08655913543701171, 0.08709939575195312, 0.09019391632080079, 0.08824755096435546, 0.08671040344238282, 0.086995361328125, 0.0865315170288086, 0.08727817535400391, 0.08965074920654297, 0.0903944320678711, 0.0877423324584961, 0.08647776031494141, 0.08700227355957031, 0.08631951904296875, 0.08639859008789062, 0.0870544662475586, 0.08724534606933594, 0.08680770874023437, 0.0864901123046875, 0.0872790756225586, 0.08621724700927734, 0.0864300765991211, 0.08674457550048828, 0.08694953918457031, 0.0866251220703125, 0.0865005111694336, 0.08636252593994141, 0.08686841583251953, 0.08805593872070312, 0.08734047698974609, 0.08655712127685547, 0.08668364715576173, 0.08688758087158203, 0.08662335968017579, 0.08706598663330078, 0.08693135833740234, 0.08662265777587891, 0.08678809356689453, 0.08698191833496094, 0.08695267486572265, 0.08637030029296874, 0.08682003021240234, 0.08649967956542969, 0.0866984634399414, 0.08723865509033203, 0.08686182403564453, 0.0871725082397461, 0.08734146881103516, 0.09051907348632812, 0.08798684692382812, 0.0868106231689453, 0.08767398071289062, 0.08646131134033203, 0.08711500549316406, 0.08680524444580077, 0.0869191665649414, 0.08658029174804688, 0.08715058898925782, 0.08689759826660157, 0.0867872314453125, 0.08687091064453124, 0.08707711791992187, 0.08726252746582032, 0.087072509765625, 0.08758477020263672, 0.08744003295898438, 0.08725078582763672, 0.08689667510986328, 0.08698483276367187, 0.08744290924072265, 0.08677430725097657, 0.08726707458496094, 0.08734336090087891, 0.0901711654663086, 0.08681494140625, 0.08739408111572265, 0.08768144226074219, 0.08700457763671875, 0.08753603363037109, 0.08715264129638672, 0.08681881713867187, 0.08741248321533203, 0.08664214324951172, 0.08724150085449218, 0.08748851013183594, 0.08652909088134765, 0.08695410919189453, 0.08725177764892578, 0.0868331527709961, 0.08665907287597656, 0.08636316680908203, 0.08964969635009766, 0.08684108734130859, 0.08767507171630859, 0.08820172882080078, 0.08718745422363282, 0.08751280212402343, 0.08670236968994141, 0.08757247924804687, 0.08760115051269532, 0.08708064270019532, 0.08687033843994141, 0.08836710357666015, 0.08680448150634766, 0.08678825378417969, 0.08713187408447266, 0.08694182586669921, 0.08683721923828125, 0.08659964752197266, 0.0871629409790039, 0.08775475311279297, 0.08809471893310547, 0.08714649963378907, 0.08667545318603516, 0.09059667205810547, 0.08697277069091797, 0.08667910766601562, 0.08709606170654297, 0.08641539001464844, 0.08729395294189453, 0.08661395263671876, 0.08662432098388671, 0.0909677734375, 0.08739868927001954, 0.0868384017944336, 0.08705522918701172, 0.08800800323486328, 0.08683904266357421, 0.0872674560546875, 0.0874334716796875, 0.08734982299804687, 0.08711513519287109, 0.09081037139892578, 0.08801340484619141, 0.08753907012939453, 0.08694646453857421, 0.0868325424194336, 0.08708911895751953, 0.08690560150146484, 0.08682460784912109, 0.08802649688720703, 0.08714348602294922, 0.08666703796386718, 0.08761753845214844, 0.08724272155761718, 0.08727263641357422, 0.08740541076660156, 0.08711526489257812, 0.0874639663696289, 0.08712374114990235, 0.08803603363037109, 0.08776627349853515, 0.08742578887939453, 0.08734620666503906, 0.08781104278564453, 0.08759910583496094, 0.08808201599121093, 0.08745820617675781, 0.08749056243896484, 0.08765235137939453, 0.08943001556396485, 0.08964688110351562, 0.08870320129394531, 0.08752742767333985, 0.08788582611083984, 0.08790611267089844, 0.08831916809082031, 0.08848700714111328, 0.08786319732666016, 0.08739984130859375, 0.08759766387939454, 0.08777932739257813, 0.08754291534423828, 0.08910323333740235, 0.08783846282958985, 0.08857830047607422, 0.09074073791503906, 0.08742707061767578, 0.08813702392578125, 0.08794719696044923, 0.08765705871582032, 0.08713375854492188, 0.08719542694091797, 0.0881200942993164, 0.08866560363769531, 0.09160710144042969, 0.08810765075683594, 0.0873388442993164, 0.08696217346191407, 0.08822486114501953, 0.08830818939208984, 0.08768761444091797, 0.08727916717529297, 0.08744547271728516, 0.08770742034912109, 0.08768511962890625, 0.08852345275878906, 0.08725424194335937, 0.08725583648681641, 0.08711539459228515, 0.0873722915649414, 0.08787126159667968, 0.08744931030273438, 0.08794560241699219, 0.08732466888427734, 0.08680413055419922, 0.08766226959228515, 0.08744812774658203, 0.08761148834228516, 0.08736358642578125, 0.08777145385742187, 0.08790460968017579, 0.08782425689697265, 0.0876559066772461, 0.08772281646728515, 0.08731619262695313, 0.087531005859375, 0.08743417358398438, 0.08740585327148437, 0.08739708709716797, 0.08702976226806641, 0.08752947235107422, 0.08790016174316406, 0.08959964752197265, 0.08764019012451171, 0.08738569641113281, 0.08931375885009765, 0.08813993835449219, 0.08786316680908203, 0.08801702117919921, 0.08744898986816406, 0.08740310668945313, 0.08751500701904297, 0.08792896270751953, 0.09071820831298828, 0.08827699279785156, 0.08729190063476562, 0.08747539520263672, 0.08677401733398438, 0.08715116882324218, 0.08721202850341797, 0.08762163543701172, 0.08744550323486328, 0.08704144287109375, 0.08678256225585937, 0.08739997100830078, 0.08822950744628906, 0.08725385284423828, 0.08747007751464844, 0.087364990234375, 0.08755580902099609, 0.08771389007568359, 0.08752761840820313, 0.08716966247558594, 0.08723865509033203, 0.08765644836425782, 0.08714649963378907, 0.08775475311279297, 0.08766025543212891, 0.08725122833251953, 0.08726732635498047, 0.08761277008056641, 0.0876304931640625, 0.08797529602050781, 0.08717375946044922, 0.08711373138427735, 0.08699884796142578, 0.08758665466308593, 0.08728399658203125, 0.08755001831054687, 0.08751516723632813, 0.08740860748291016, 0.08717842864990234, 0.08839254760742188, 0.08801074981689454, 0.08871116638183593, 0.08743936157226563, 0.0880345916748047, 0.0877677764892578, 0.08830770874023437, 0.08798598480224609, 0.08887519836425781, 0.08826470184326173, 0.08820896148681641, 0.08896969604492187, 0.08988047790527344, 0.08874569702148437, 0.08880982208251953, 0.08880086517333985, 0.08873158264160157, 0.08877481842041016, 0.08920060729980468, 0.08793910217285156, 0.08779193878173829, 0.08765187072753906, 0.08757465362548827, 0.08846371459960937, 0.09420390319824219, 0.0915979232788086, 0.0886341781616211, 0.08780169677734374, 0.08774687957763672, 0.08810006713867187, 0.08765497589111328, 0.08778294372558594, 0.0878086395263672, 0.08815837097167968, 0.08808432006835938, 0.08825241851806641, 0.08764643096923828, 0.08791395568847657, 0.08769725036621094, 0.08778905487060547, 0.08829574584960938, 0.08774518585205078, 0.08753561401367188, 0.08732057952880859, 0.08766368103027344, 0.08805452728271485, 0.08821778869628906, 0.08750406646728516, 0.08784317016601563, 0.08755862426757813, 0.08825856018066407, 0.08786739349365234, 0.08842422485351563, 0.08827942657470703, 0.08799420928955078, 0.08779571533203125, 0.08786688232421876, 0.08917453002929687, 0.08760476684570312, 0.08820076751708984, 0.08793529510498047, 0.08761164855957031, 0.08835485076904297, 0.0876148452758789, 0.08776972961425782, 0.09135222625732421, 0.0876839370727539, 0.08769081878662109, 0.0877531509399414, 0.08740249633789063, 0.08743936157226563, 0.09081609344482422, 0.08972329711914062, 0.08811110687255859, 0.08780595397949219, 0.08749056243896484, 0.08716659545898438, 0.08759539031982422, 0.08794461059570312, 0.08833695983886719, 0.08775273895263672, 0.08736972808837891, 0.08742060852050781, 0.087484130859375, 0.08820387268066407, 0.08757148742675781, 0.08769430541992188, 0.08754176330566406, 0.08720384216308594, 0.08737177276611328, 0.08774166107177735, 0.08764905548095703, 0.09094876861572265, 0.08775308990478516, 0.0875232925415039, 0.0875647964477539, 0.08784512329101563, 0.08724364471435547, 0.08772281646728515, 0.08759302520751953, 0.0875387191772461, 0.08767378997802734, 0.08850844573974609, 0.08829686737060546, 0.0885824966430664, 0.08774272155761718, 0.0876578598022461, 0.08825676727294922, 0.08779199981689453, 0.08738979339599609, 0.08748073577880859, 0.08830156707763671, 0.09102745819091797, 0.08817254638671874, 0.08764559936523438, 0.08774102020263672, 0.08796774291992188, 0.08778125, 0.08846963500976562, 0.08853609466552734, 0.08775369262695312, 0.08736675262451171, 0.08763689422607422, 0.08759442901611328, 0.08807635498046874, 0.08872716522216798, 0.08831168365478516, 0.08841081237792969, 0.08795337677001953, 0.08818019104003906, 0.0875607681274414, 0.0882339859008789, 0.08775478363037109, 0.0879943389892578, 0.08783052825927734, 0.08850249481201172, 0.09037596893310547, 0.08786265563964844, 0.08787324523925781, 0.08783721923828125, 0.08890751647949219, 0.08787417602539062, 0.08809081268310547, 0.08781804656982421, 0.08863539123535157, 0.08790016174316406, 0.08854243469238281, 0.08834742736816406, 0.08780966186523438, 0.08776105499267578, 0.08798131561279297, 0.08815305328369141, 0.088923583984375, 0.08761135864257813, 0.08738188934326171, 0.0874750747680664, 0.08728521728515624, 0.08759539031982422, 0.08837324523925781, 0.08790214538574219, 0.08708412933349609, 0.08769779205322266, 0.08736418914794922, 0.08785414123535157, 0.08748332977294922, 0.08834662628173828, 0.08718540954589844, 0.0878380126953125, 0.08805856323242188, 0.08794882965087891, 0.0877265625, 0.08813164520263672, 0.08754720306396484, 0.08984844970703125, 0.08886617279052735, 0.08782460784912109, 0.08749097442626953, 0.08988671875, 0.08825651550292969, 0.08795458984375, 0.08866492462158203, 0.08799436950683594, 0.08820243072509766, 0.08767139434814453, 0.08733103942871094, 0.0989881591796875, 0.08893660736083984, 0.08751203155517578, 0.08720665740966797, 0.08752947235107422, 0.0876883544921875, 0.08787439727783203, 0.08763362884521485, 0.08736112213134765, 0.08708985900878906, 0.0873512954711914, 0.08757852935791016, 0.08778342437744141, 0.0881460189819336, 0.08796329498291015, 0.08748281860351563, 0.08745148468017579, 0.08823177337646484, 0.08793660736083984, 0.08758911895751953, 0.08733734130859375, 0.08747417449951173, 0.0876786880493164, 0.08753753662109375, 0.08719833374023438, 0.08713375854492188, 0.0869948501586914, 0.08731065368652344, 0.08781327819824218, 0.08812630462646484, 0.08795516967773437, 0.08710956573486328, 0.08712841796875, 0.08766585540771485, 0.08843462371826172, 0.08758975982666016, 0.08719155120849609, 0.08718268585205079, 0.09164822387695312, 0.0887996826171875, 0.08814966583251953, 0.08808403015136719, 0.08771398162841797, 0.0873778533935547, 0.08696809387207032, 0.08752406311035156, 0.0876525115966797, 0.08738374328613281, 0.08757484436035157, 0.0873587188720703, 0.08726399993896485, 0.0878182373046875, 0.08740367889404296, 0.08811427307128906, 0.09023257446289062, 0.08875417327880859, 0.08800227355957031, 0.0873864974975586, 0.08696221160888672, 0.08701689910888671, 0.08702207946777343, 0.08722220611572265, 0.0878551025390625, 0.08772777557373047, 0.09126515197753907, 0.08815779113769531, 0.08896109008789063, 0.0887603530883789, 0.08806735992431641, 0.08819305419921875, 0.08784966278076171, 0.08771078491210937, 0.08779792022705078, 0.08847171020507813, 0.08754649353027344, 0.08798332977294922, 0.0877281265258789, 0.08887375640869141, 0.08824422454833984, 0.08807218933105469, 0.08774451446533203, 0.08773017883300781, 0.08743321228027344, 0.08777295684814453, 0.08784713745117187, 0.08775625610351563, 0.08839609527587891, 0.08810514831542969, 0.08774454498291015, 0.08944025421142578, 0.08878291320800781, 0.08796915435791015, 0.08835062408447265, 0.08740131378173828, 0.08772819519042968, 0.08798307037353516, 0.08822227478027343, 0.08760956573486328, 0.08786943817138672, 0.08769670104980469, 0.0877791976928711, 0.0891394271850586, 0.08860118103027344, 0.08816166687011719, 0.09099244689941406, 0.08832809448242188, 0.0876038055419922, 0.08801721954345704, 0.08756018829345703, 0.08806931304931641, 0.0881526107788086, 0.10139228820800782, 0.08885244750976562, 0.08792630767822265, 0.08820211029052734, 0.08812748718261719, 0.08796729278564454, 0.08808902740478515, 0.08823935699462891, 0.08764006042480468, 0.08755043029785156, 0.08841785430908203, 0.08753376007080078, 0.08754176330566406, 0.0880268783569336, 0.08781289672851562, 0.08786534118652344, 0.08767894744873046, 0.08750508880615235, 0.08821887969970703]",tokens/s,11.395498223436944,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,846.467072,565.116928,0.0,169.869312,150.669312,s,1,8.1319296875,8.1319296875,0.0,8.1319296875,8.1319296875,8.1319296875,8.1319296875,[8.1319296875],,kWh,2.2475241483327103e-05,2.4720796588808212e-06,8.348062234014852e-06,3.3295383376222775e-05,,MB,1151.246336,625.934336,0.0,209.7152,193.680384,s,11,0.1753775987625122,0.01594341806931929,0.0001830208789097381,0.01591209602355957,0.016218271255493164,0.016245919227600096,0.016268037605285644,"[0.016158304214477538, 0.01592643165588379, 0.015769344329833984, 0.015905728340148927, 0.015714240074157716, 0.015849472045898438, 0.01591209602355957, 0.015912799835205077, 0.01627356719970703, 0.016218271255493164, 0.015737343788146972]",tokens/s,16056.782735480887,kWh,4.7600317202966865e-07,5.2494784996865563e-08,1.9738234025121626e-07,7.258802972777505e-07,tokens/kWh,352675228.9049172,MB,1162.346496,628.031488,0.0,211.812352,193.682944,s,11,10.631011718749999,0.9664556107954545,0.004707955255336871,0.9647833862304688,0.9734203491210938,0.9755199279785156,0.9771995910644531,"[0.9612041015625, 0.963017578125, 0.9734203491210938, 0.9776195068359375, 0.9675733642578125, 0.9644152221679687, 0.9658329467773438, 0.9645062255859375, 0.962111328125, 0.9647833862304688, 0.9665277099609375]",tokens/s,65.18664623215027,kWh,2.7935242829482866e-05,3.08080831378856e-06,1.0652626339749065e-05,4.16686774830205e-05,tokens/kWh,1511927.0350174126,,s,693,10.624617526054392,0.015331338421434894,0.0002571975798281347,0.01528831958770752,0.015467743682861327,0.01559829750061035,0.016082920684814452,"[0.015103391647338867, 0.015395520210266113, 0.015293439865112305, 0.015240351676940917, 0.015201984405517578, 0.015198271751403808, 0.015108096122741698, 0.015095168113708497, 0.015278719902038574, 0.015388031959533692, 0.015181856155395508, 0.015217151641845703, 0.015165504455566406, 0.015199295997619629, 0.015212544441223145, 0.015178879737854005, 0.015255392074584961, 0.015431039810180664, 0.015291040420532226, 0.015262911796569825, 0.015321887969970704, 0.015157407760620117, 0.015150912284851074, 0.015194144248962402, 0.015167776107788087, 0.015210335731506347, 0.015195232391357422, 0.015143487930297852, 0.015628512382507325, 0.015151167869567871, 0.015534015655517578, 0.015282336235046387, 0.01513372802734375, 0.015101759910583497, 0.01520844841003418, 0.015121567726135253, 0.015132512092590332, 0.015108096122741698, 0.015118335723876953, 0.015128576278686523, 0.0151364164352417, 0.015270591735839844, 0.015163040161132813, 0.015118335723876953, 0.015151103973388673, 0.01518182373046875, 0.015159104347229004, 0.015199647903442384, 0.015176095962524415, 0.015278464317321777, 0.015263680458068848, 0.015758975982666016, 0.015300160408020019, 0.01531388759613037, 0.015300512313842773, 0.01562828826904297, 0.015278079986572265, 0.015233247756958009, 0.015353568077087402, 0.015361791610717773, 0.015433279991149903, 0.015382911682128906, 0.015280799865722656, 0.015210495948791505, 0.015159296035766602, 0.015250720024108887, 0.015338208198547363, 0.015213600158691406, 0.015199423789978028, 0.015144800186157227, 0.015251423835754395, 0.015199392318725587, 0.015217472076416015, 0.015230463981628419, 0.015234848022460938, 0.01510268783569336, 0.015616000175476074, 0.015124480247497558, 0.015689536094665526, 0.015327327728271485, 0.015276127815246583, 0.015232768058776856, 0.015247615814208984, 0.015183775901794434, 0.015189248085021973, 0.01519702434539795, 0.015193696022033692, 0.015118751525878906, 0.015143936157226562, 0.015176704406738281, 0.015152128219604492, 0.015148032188415527, 0.015408672332763673, 0.01533795166015625, 0.015219840049743652, 0.01514367961883545, 0.015323264122009277, 0.015318143844604492, 0.01525011157989502, 0.015300671577453613, 0.015187328338623046, 0.01519699192047119, 0.015296319961547851, 0.015244447708129882, 0.015260640144348145, 0.015245247840881347, 0.015386688232421874, 0.015284223556518555, 0.015187552452087402, 0.015194527626037598, 0.015306495666503906, 0.015263104438781738, 0.015407999992370606, 0.015248703956604003, 0.015440768241882324, 0.015291423797607421, 0.015257439613342284, 0.01530470371246338, 0.01524227237701416, 0.015463520050048828, 0.01570915222167969, 0.015294303894042969, 0.015346943855285644, 0.015250304222106933, 0.01532096004486084, 0.015720191955566405, 0.015302687644958497, 0.015285696029663086, 0.015344287872314453, 0.015351807594299317, 0.01532271957397461, 0.015348383903503418, 0.015326784133911133, 0.015396832466125488, 0.015339743614196778, 0.015394816398620606, 0.015266880035400391, 0.01608185577392578, 0.01529856014251709, 0.015191424369812011, 0.015264384269714356, 0.015263744354248047, 0.015278207778930663, 0.015333215713500976, 0.015315103530883788, 0.015312095642089844, 0.015286944389343261, 0.01548902416229248, 0.015381888389587402, 0.015315584182739258, 0.01534943962097168, 0.015308352470397949, 0.015481151580810548, 0.015294912338256837, 0.01534329605102539, 0.015288736343383789, 0.015376288414001465, 0.015435520172119141, 0.01525715160369873, 0.015421664237976074, 0.015407487869262696, 0.01533683204650879, 0.015309727668762207, 0.015243071556091309, 0.015234975814819337, 0.015222880363464355, 0.01521664047241211, 0.0167869758605957, 0.018315744400024415, 0.015548383712768555, 0.015445759773254394, 0.015501855850219726, 0.015332223892211914, 0.015397215843200683, 0.015251999855041503, 0.015357952117919921, 0.015422752380371094, 0.015357855796813966, 0.015769696235656737, 0.015634336471557618, 0.015490943908691407, 0.01540940761566162, 0.015311103820800781, 0.015823007583618164, 0.015400927543640136, 0.015328991889953613, 0.01531760025024414, 0.015333375930786132, 0.015277440071105958, 0.015350720405578614, 0.015233983993530273, 0.015158304214477539, 0.015282719612121583, 0.015655360221862792, 0.015238719940185547, 0.015257087707519532, 0.015783072471618653, 0.015260895729064942, 0.015220352172851562, 0.015248319625854493, 0.015294303894042969, 0.015829312324523927, 0.01815100860595703, 0.015614208221435546, 0.015381952285766602, 0.015341600418090821, 0.015431743621826172, 0.01586390399932861, 0.015642175674438475, 0.015995360374450682, 0.015628640174865722, 0.01547878360748291, 0.015581184387207032, 0.01526748752593994, 0.015352160453796386, 0.015394559860229492, 0.015476448059082031, 0.015362591743469238, 0.015264927864074707, 0.015239744186401368, 0.01563881587982178, 0.015378432273864746, 0.015230976104736327, 0.015322527885437011, 0.015204992294311524, 0.015257375717163086, 0.015228832244873047, 0.015299072265625, 0.015182847976684571, 0.015180704116821288, 0.015283743858337402, 0.015227231979370117, 0.015263903617858886, 0.015275744438171387, 0.015446271896362305, 0.0152674560546875, 0.015251711845397949, 0.015187968254089355, 0.015362048149108886, 0.015348832130432128, 0.015588319778442383, 0.015335359573364259, 0.015544320106506348, 0.015507007598876953, 0.015415712356567383, 0.015417216300964355, 0.015796607971191406, 0.01859971237182617, 0.01674569511413574, 0.015301440238952637, 0.015427552223205566, 0.015243264198303222, 0.015580127716064453, 0.015332159996032714, 0.015313247680664062, 0.015255328178405761, 0.015260831832885742, 0.01531766414642334, 0.015280320167541503, 0.015333375930786132, 0.015237088203430177, 0.015310879707336425, 0.015180800437927246, 0.015407999992370606, 0.015273311614990234, 0.015305536270141602, 0.01526576042175293, 0.015343615531921387, 0.015300288200378417, 0.015355648040771485, 0.015340096473693847, 0.01528217601776123, 0.015339584350585937, 0.015261119842529296, 0.015319711685180664, 0.015398752212524415, 0.015265791893005372, 0.015316927909851074, 0.01545248031616211, 0.015277695655822755, 0.015554847717285157, 0.01544332790374756, 0.015434240341186524, 0.015554176330566407, 0.015606111526489257, 0.015411231994628906, 0.015370207786560059, 0.015286304473876954, 0.015549759864807128, 0.015317279815673828, 0.01567372798919678, 0.015378432273864746, 0.015247360229492187, 0.015224255561828614, 0.015235039710998535, 0.015317279815673828, 0.015323455810546876, 0.015288288116455079, 0.015364031791687011, 0.015288415908813477, 0.015292415618896485, 0.015290047645568848, 0.015298879623413086, 0.015195391654968261, 0.01533414363861084, 0.015253503799438477, 0.015435775756835938, 0.015261695861816407, 0.015497311592102051, 0.015327136039733886, 0.01568563175201416, 0.015333279609680176, 0.015284128189086914, 0.015366239547729492, 0.015335488319396972, 0.015226431846618652, 0.015237407684326172, 0.015400896072387696, 0.01519983959197998, 0.015142911911010743, 0.015172351837158203, 0.01520400047302246, 0.015171584129333495, 0.015278079986572265, 0.015257087707519532, 0.01529203224182129, 0.015263872146606445, 0.015264512062072754, 0.015204352378845215, 0.015197952270507813, 0.015397024154663087, 0.015283807754516602, 0.015241632461547852, 0.015394751548767089, 0.015304415702819825, 0.015274656295776368, 0.01538428783416748, 0.015363743782043457, 0.015274432182312012, 0.0152740478515625, 0.015232928276062012, 0.015234880447387696, 0.015214783668518066, 0.015331487655639648, 0.015239007949829101, 0.015257599830627442, 0.015312383651733399, 0.0152542724609375, 0.015280991554260255, 0.015383456230163574, 0.015386015892028808, 0.015243871688842774, 0.015363295555114747, 0.015309503555297851, 0.015196255683898926, 0.01520035171508789, 0.015279871940612793, 0.015298720359802245, 0.015263936042785645, 0.015259200096130372, 0.01528384017944336, 0.01547283172607422, 0.015372672080993653, 0.015384639739990234, 0.015370240211486816, 0.01529036808013916, 0.01535110378265381, 0.015372960090637208, 0.015358016014099122, 0.015288064002990723, 0.01537660789489746, 0.015369376182556153, 0.015344703674316406, 0.015338335990905762, 0.015348320007324218, 0.015348064422607421, 0.01558035182952881, 0.01533420753479004, 0.015210847854614257, 0.015219679832458496, 0.015196864128112792, 0.015253631591796876, 0.015239040374755859, 0.015245311737060547, 0.015398624420166016, 0.015249631881713866, 0.015317055702209472, 0.015284416198730468, 0.015363871574401855, 0.015245344161987305, 0.015302656173706054, 0.015245599746704102, 0.015228639602661133, 0.01539891242980957, 0.015339103698730469, 0.015293919563293457, 0.015275168418884277, 0.015256671905517579, 0.015303359985351563, 0.015242752075195312, 0.015241567611694335, 0.015327391624450683, 0.015255552291870117, 0.015230976104736327, 0.015238143920898438, 0.015287551879882812, 0.015302528381347656, 0.015255423545837402, 0.015284223556518555, 0.015224639892578124, 0.015240960121154785, 0.015183712005615234, 0.015262304306030273, 0.015212544441223145, 0.01530031967163086, 0.015255840301513671, 0.015366144180297851, 0.015277600288391114, 0.015345600128173828, 0.015254048347473145, 0.015187968254089355, 0.015209535598754882, 0.015186783790588378, 0.015177439689636231, 0.015260191917419433, 0.015172608375549316, 0.015323904037475586, 0.015345696449279786, 0.015351200103759765, 0.015315615653991699, 0.015380319595336913, 0.015831199645996094, 0.01673040008544922, 0.015359711647033692, 0.01537382411956787, 0.015303232192993165, 0.015253472328186034, 0.015409119606018066, 0.015229215621948242, 0.01593929576873779, 0.015460351943969726, 0.015253536224365235, 0.01525545597076416, 0.015236960411071778, 0.015244383811950684, 0.015219807624816895, 0.015212320327758789, 0.015150272369384765, 0.015291232109069824, 0.015265248298645019, 0.01527660846710205, 0.01526912021636963, 0.015225567817687989, 0.015237119674682617, 0.015195679664611817, 0.015468799591064453, 0.015266016006469726, 0.015211872100830078, 0.015092384338378907, 0.01521459197998047, 0.015332991600036622, 0.01522316837310791, 0.015312352180480958, 0.015294560432434082, 0.015253503799438477, 0.015223360061645508, 0.015834848403930665, 0.015322943687438965, 0.015374688148498536, 0.015232159614562988, 0.01528115177154541, 0.015291584014892579, 0.015348352432250977, 0.015335455894470214, 0.015388671875, 0.015341535568237304, 0.01536841583251953, 0.015373663902282715, 0.01532156753540039, 0.015309951782226562, 0.015288288116455079, 0.015291296005249023, 0.015382528305053711, 0.015347647666931152, 0.015312992095947265, 0.015317055702209472, 0.015347583770751953, 0.015275424003601074, 0.015288031578063965, 0.015299712181091309, 0.015322912216186523, 0.015327263832092285, 0.01526576042175293, 0.015294464111328124, 0.015304736137390137, 0.01526576042175293, 0.015275615692138672, 0.015288736343383789, 0.015249407768249512, 0.015243264198303222, 0.015290271759033204, 0.015405152320861816, 0.015390239715576171, 0.01548755168914795, 0.01535654354095459, 0.01533743953704834, 0.015315072059631347, 0.015275584220886231, 0.015212896347045899, 0.01519820785522461, 0.015307007789611816, 0.015187711715698242, 0.015283488273620605, 0.015210240364074708, 0.015203295707702636, 0.015216768264770508, 0.015205280303955078, 0.015277055740356446, 0.015249664306640626, 0.015298208236694337, 0.015188032150268555, 0.015257599830627442, 0.01520639991760254, 0.015299967765808106, 0.015254143714904786, 0.015212800025939941, 0.01526144027709961, 0.015341567993164062, 0.015297696113586426, 0.015276896476745605, 0.015259743690490723, 0.015194016456604004, 0.015218688011169433, 0.01516147232055664, 0.015233920097351075, 0.015219136238098145, 0.015370816230773925, 0.01543887996673584, 0.0154552001953125, 0.015388671875, 0.015357952117919921, 0.015309151649475097, 0.01552451229095459, 0.015200703620910645, 0.01516220760345459, 0.015133472442626953, 0.015229887962341308, 0.015287360191345216, 0.015164383888244629, 0.015305888175964356, 0.015323871612548829, 0.015285408020019531, 0.015377568244934083, 0.015217632293701171, 0.015239999771118165, 0.015161343574523926, 0.015142687797546387, 0.015206111907958984, 0.015211008071899413, 0.01523311996459961, 0.015154208183288574, 0.015189984321594237, 0.015245408058166503, 0.015223615646362305, 0.015219903945922852, 0.01552188777923584, 0.015241855621337891, 0.015517696380615235, 0.015749183654785157, 0.01549715232849121, 0.0153121919631958, 0.015284000396728515, 0.015211135864257812, 0.015243328094482422, 0.0151976318359375, 0.015285087585449218, 0.015290111541748046, 0.015265983581542969, 0.015322463989257813, 0.01535638427734375, 0.015359647750854492, 0.015432224273681641, 0.015427359580993652, 0.015235296249389648, 0.015407103538513184, 0.015484928131103515, 0.01528831958770752, 0.015246560096740722, 0.015372096061706543, 0.015245951652526856, 0.015329664230346679, 0.015170623779296875, 0.015221664428710938, 0.015239232063293457, 0.015269824028015137, 0.015265791893005372, 0.015238719940185547, 0.015241663932800293, 0.015267935752868653, 0.015184991836547852, 0.01529535961151123, 0.015338848114013672, 0.015387231826782227, 0.015308032035827637, 0.015554911613464355, 0.015314720153808593, 0.01528876781463623, 0.015241408348083496, 0.015170559883117676, 0.015227904319763183, 0.015229984283447266, 0.015166432380676269, 0.015221887588500977, 0.015211487770080566, 0.015343520164489746, 0.01528217601776123, 0.015474687576293946, 0.01535763168334961, 0.015224479675292968, 0.015212800025939941, 0.015182239532470703, 0.015497440338134766, 0.015400383949279785, 0.015298944473266601, 0.015215840339660645, 0.015211168289184571, 0.01523862361907959, 0.015265983581542969, 0.0152478084564209, 0.015339103698730469, 0.015108096122741698, 0.015294719696044921, 0.015593088150024415, 0.015335552215576172, 0.015292415618896485, 0.015281344413757324, 0.015204480171203613, 0.015273951530456542, 0.01530339241027832, 0.015259231567382812, 0.015214143753051758, 0.015196288108825683, 0.015237536430358887, 0.015300928115844726, 0.015300831794738769, 0.015206175804138184, 0.015239168167114257, 0.015196160316467285, 0.015265439987182617, 0.015298303604125976, 0.015200863838195801, 0.015226335525512695, 0.01529695987701416, 0.015374719619750976, 0.015394304275512695, 0.01536451244354248, 0.015304448127746582, 0.01609516716003418, 0.01540828800201416, 0.01537337589263916, 0.015363903999328613, 0.015361472129821778, 0.015288991928100586, 0.015210399627685547, 0.01530070400238037, 0.01517363166809082, 0.01527830410003662, 0.015282976150512695, 0.015293439865112305, 0.01520639991760254, 0.01520639991760254, 0.01528217601776123, 0.015295488357543945, 0.015311871528625488, 0.015335424423217774, 0.015429632186889648, 0.015471839904785156, 0.015339391708374023, 0.015389599800109864, 0.015531007766723632, 0.015393759727478027, 0.015267295837402344, 0.015368351936340332, 0.01537712001800537, 0.015372063636779785, 0.015378335952758788, 0.01529036808013916, 0.01548697566986084, 0.015454208374023438, 0.015383808135986329, 0.015480640411376954, 0.015514143943786621, 0.015411616325378418]",tokens/s,65.2258773834051,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,,MB,5172.396032,3489.529856,0.0,3154.116608,3134.091264,s,1,14.63770703125,14.63770703125,0.0,14.63770703125,14.63770703125,14.63770703125,14.63770703125,[14.63770703125],,kWh,0.0002302281885458342,2.538882364802772e-05,7.701117271999292e-05,0.00033262818491385486,,MB,1536.049152,3976.06912,0.0,3552.575488,3441.951744,s,10,0.7592968978881836,0.07592968978881837,0.00024984955712885657,0.07589299392700195,0.07623240814208984,0.07624721221923829,0.07625905548095703,"[0.07622911834716797, 0.07564659118652343, 0.07622518157958984, 0.07586921691894531, 0.0761116180419922, 0.07578521728515625, 0.07626201629638672, 0.0759167709350586, 0.0755421142578125, 0.07570905303955078]",tokens/s,3371.5401802905735,kWh,2.2257914802080422e-06,2.454640766438241e-07,1.3470823066213524e-06,3.818337863473219e-06,tokens/kWh,67044878.990131706,MB,1545.662464,3976.06912,0.0,3552.575488,3441.954304,s,10,44.5354130859375,4.45354130859375,0.013278921274472012,4.456549072265625,4.46981279296875,4.470565576171875,4.471167802734375,"[4.44390087890625, 4.4535859375, 4.4696455078125, 4.4599736328125, 4.4596728515625, 4.45418359375, 4.471318359375, 4.43794140625, 4.45891455078125, 4.4262763671875]",tokens/s,14.146045951888313,kWh,0.00012968387308479108,1.4304451126720692e-05,5.5168324016777226e-05,0.000199156648228289,tokens/kWh,316333.9037910723,,s,630,44.533468025207526,0.07068804448445638,0.0006773691138710003,0.07054206466674805,0.07123603820800781,0.07174382591247558,0.07368806449890138,"[0.07025459289550781, 0.07029049682617187, 0.07059894561767578, 0.07102326202392578, 0.07026185607910156, 0.06990735626220704, 0.06997100830078125, 0.07017180633544921, 0.07011510467529297, 0.07136617279052734, 0.07023664093017579, 0.0700125732421875, 0.07078742218017578, 0.07081983947753906, 0.07030303955078125, 0.07023033905029297, 0.07005951690673828, 0.07128153228759766, 0.07025663757324219, 0.07017190551757813, 0.07018905639648437, 0.07010995483398437, 0.06987149047851562, 0.0700396499633789, 0.07003132629394532, 0.06986271667480469, 0.07026969909667968, 0.07070105743408203, 0.07073951721191406, 0.07075475311279297, 0.07049343872070313, 0.07028403472900391, 0.07027097320556641, 0.07032403564453125, 0.07011698913574219, 0.07028205108642578, 0.06999155426025391, 0.07010163116455079, 0.07003750610351563, 0.07019680023193359, 0.07068482971191406, 0.0704361572265625, 0.0708986587524414, 0.07059430694580078, 0.0733671646118164, 0.07072422027587891, 0.07303372955322265, 0.07078227233886719, 0.07025325012207032, 0.0706355209350586, 0.07066124725341796, 0.0701162872314453, 0.07029507446289063, 0.07018946838378906, 0.07118377685546876, 0.07116809844970703, 0.0713016357421875, 0.07134848022460938, 0.0707704315185547, 0.07078912353515625, 0.07063686370849609, 0.07058092498779298, 0.07047577667236328, 0.0701028823852539, 0.06999136352539062, 0.07046758270263671, 0.07038915252685547, 0.07014822387695313, 0.07068466949462891, 0.070961181640625, 0.07070764923095703, 0.0703795166015625, 0.07042047882080078, 0.07177625274658203, 0.0702562255859375, 0.07035536193847657, 0.07025206756591797, 0.07015676879882812, 0.0703078384399414, 0.07033446502685547, 0.0702955551147461, 0.07085260772705078, 0.07054950714111329, 0.0703135986328125, 0.07048537445068359, 0.0707096939086914, 0.07089209747314452, 0.07110451507568359, 0.07077788543701172, 0.07028144073486328, 0.07038028717041016, 0.07044403076171875, 0.07008563232421874, 0.07033356475830078, 0.07039814758300782, 0.07078163146972656, 0.07067852783203125, 0.07031986999511719, 0.07060643005371094, 0.07077037048339843, 0.07054985809326172, 0.0709249267578125, 0.07040204620361327, 0.07040614318847656, 0.070266845703125, 0.07442845153808594, 0.07058415985107422, 0.07025603485107422, 0.07060959625244141, 0.0705263671875, 0.07079341125488281, 0.07124015808105469, 0.07090972900390625, 0.07090198516845703, 0.07094387054443359, 0.07058460998535156, 0.07068118286132813, 0.07065503692626954, 0.0704151382446289, 0.07043497467041016, 0.0705343017578125, 0.07058678436279296, 0.07069535827636719, 0.07067378997802734, 0.07132224273681641, 0.07430143737792969, 0.07079730987548828, 0.07593708801269532, 0.0719776611328125, 0.07068262481689454, 0.07035430145263671, 0.07021836853027344, 0.07026892852783204, 0.07031603240966797, 0.07011465454101562, 0.070210205078125, 0.07065103912353515, 0.07078157043457031, 0.07100412750244141, 0.07164749145507812, 0.07181721496582032, 0.07049369812011719, 0.07115007781982421, 0.07240294647216797, 0.0705607681274414, 0.07033139038085938, 0.07024639892578124, 0.07023827362060547, 0.07167993927001953, 0.07048105621337891, 0.07052374267578125, 0.07093196868896484, 0.0726451187133789, 0.07078912353515625, 0.07067648315429688, 0.07109017944335938, 0.07123558044433594, 0.07138508605957031, 0.07073737335205078, 0.0708408660888672, 0.07075635528564453, 0.07064514923095704, 0.07098019409179687, 0.07141075134277344, 0.0708371810913086, 0.07131059265136719, 0.07094924926757812, 0.07051712036132812, 0.07057612609863281, 0.07108134460449218, 0.07045900726318359, 0.07086573028564454, 0.07048416137695312, 0.07019929504394531, 0.07027839660644532, 0.07021644592285156, 0.07015219116210937, 0.07030169677734376, 0.07049625396728515, 0.0705638427734375, 0.07071334075927735, 0.0703116455078125, 0.0703115234375, 0.07378537750244141, 0.07123529815673828, 0.07203231811523438, 0.07072681427001953, 0.07160758209228515, 0.07043305969238281, 0.07042237091064453, 0.07047840118408204, 0.0709017562866211, 0.0706921615600586, 0.07111689758300781, 0.07020515441894531, 0.07138710021972657, 0.07042332458496094, 0.07072118377685546, 0.07021135711669922, 0.07330630493164063, 0.07084083557128906, 0.0736904296875, 0.07074793243408203, 0.07036937713623047, 0.07065248107910156, 0.07015245056152344, 0.07046758270263671, 0.07168204498291016, 0.07022182464599609, 0.07041545867919922, 0.07035334777832031, 0.07059235382080079, 0.07111341094970704, 0.07049209594726563, 0.07085651397705078, 0.07047187042236328, 0.0704000015258789, 0.07057373046875, 0.07038515472412109, 0.070216064453125, 0.07035747528076172, 0.07110585784912109, 0.07040070343017578, 0.07038486480712891, 0.07051900482177734, 0.07036685180664062, 0.07018386840820312, 0.07030579376220703, 0.0702525405883789, 0.07061094665527344, 0.07043395233154297, 0.07023638153076171, 0.07044569396972657, 0.07022528076171874, 0.07036319732666016, 0.07077126312255859, 0.07070105743408203, 0.07152547454833984, 0.07160454559326172, 0.07105801391601563, 0.07140668487548828, 0.071318115234375, 0.07065376281738281, 0.07105587005615234, 0.0742973403930664, 0.07085417938232422, 0.07047135925292969, 0.07045836639404297, 0.07037315368652344, 0.07070489501953126, 0.07101983642578125, 0.0707470703125, 0.07059516906738281, 0.0702938232421875, 0.070517822265625, 0.07045830535888672, 0.07100006103515626, 0.07084236907958984, 0.07043468475341796, 0.07042457580566407, 0.07024652862548827, 0.0703078384399414, 0.07088521575927734, 0.07344060516357422, 0.07078336334228516, 0.07125971221923828, 0.07319232177734375, 0.07104307556152344, 0.07061299133300782, 0.07084441375732421, 0.07044710540771484, 0.07052902221679687, 0.0710544662475586, 0.0704740447998047, 0.07033203125, 0.07044960021972656, 0.07049881744384766, 0.07076274871826171, 0.07028096008300781, 0.07046665954589844, 0.07037020874023438, 0.07068876647949218, 0.07059251403808593, 0.07061913299560547, 0.07111475372314453, 0.07052428436279297, 0.07084095764160156, 0.07278591918945312, 0.07056950378417969, 0.07045782470703126, 0.0706087646484375, 0.07052006530761719, 0.07041522979736328, 0.07041126251220703, 0.07039282989501953, 0.0707295684814453, 0.07095465850830078, 0.07071318054199219, 0.07030646514892579, 0.07029532623291015, 0.0702957763671875, 0.07073792266845703, 0.07036109161376954, 0.07049225616455078, 0.07109990692138672, 0.07410857391357421, 0.07052278137207031, 0.07047388458251953, 0.07158035278320313, 0.07054847717285156, 0.07046348571777344, 0.07065702056884765, 0.0707375717163086, 0.070406494140625, 0.07060070037841797, 0.0706662368774414, 0.07092604827880859, 0.07068701171875, 0.07092633819580078, 0.07046348571777344, 0.07069900512695312, 0.07162681579589844, 0.07068870544433593, 0.07084031677246094, 0.07049830627441406, 0.070508544921875, 0.07027712249755859, 0.07039590454101563, 0.07042867279052735, 0.0706170883178711, 0.07043036651611329, 0.070443359375, 0.07073996734619141, 0.07063756561279297, 0.07055974578857421, 0.0705308837890625, 0.07062137603759766, 0.07053036499023438, 0.07048812866210938, 0.07085324859619141, 0.07063740539550781, 0.07045954895019531, 0.07036313629150391, 0.07053517150878906, 0.0708136978149414, 0.07061913299560547, 0.07089730834960938, 0.07095689392089843, 0.07059097290039062, 0.07050035095214843, 0.07051468658447266, 0.07110451507568359, 0.07052082824707032, 0.07100006103515626, 0.07071241760253906, 0.07049270629882813, 0.07054310607910157, 0.0705684814453125, 0.07066019439697266, 0.07055769348144532, 0.07072518157958985, 0.07060931396484375, 0.07054950714111329, 0.07059862518310547, 0.07060896301269531, 0.07050633239746094, 0.07066435241699219, 0.07079116821289062, 0.07076831817626954, 0.0706053466796875, 0.07061865234375, 0.0710591049194336, 0.07078896331787109, 0.07117424011230469, 0.07100800323486328, 0.07123551940917969, 0.07133692932128906, 0.07120281219482422, 0.0712590103149414, 0.07101849365234375, 0.07111679840087891, 0.07088550567626953, 0.07303068542480469, 0.07136911773681641, 0.07037590026855468, 0.07028937530517577, 0.07056793975830078, 0.07105149078369141, 0.07069468688964843, 0.07055769348144532, 0.07063251495361328, 0.07119913482666015, 0.07104492950439453, 0.07089635467529297, 0.07105449676513671, 0.07102127838134766, 0.07142412567138672, 0.07103257751464843, 0.07090386962890625, 0.07084639739990234, 0.07038387298583984, 0.07117005157470703, 0.0736822738647461, 0.07117686462402344, 0.07095091247558594, 0.07113542175292968, 0.07236998748779297, 0.07078297424316406, 0.0705084457397461, 0.07335485076904297, 0.07090198516845703, 0.07066038513183594, 0.07063740539550781, 0.07051280212402344, 0.07072358703613281, 0.0706552963256836, 0.07133869171142578, 0.07071743774414062, 0.07050556945800782, 0.0702022705078125, 0.07110176086425782, 0.07081849670410156, 0.07067648315429688, 0.07069900512695312, 0.07060848236083984, 0.0703012466430664, 0.0725123519897461, 0.07054102325439453, 0.07044697570800781, 0.07020175933837891, 0.07091814422607422, 0.07085465240478515, 0.07047513580322265, 0.07081433868408203, 0.07121286773681641, 0.07105554962158203, 0.07048729705810547, 0.07076662445068359, 0.07097625732421875, 0.07044092559814454, 0.07057523345947266, 0.07027104187011719, 0.07056172943115234, 0.07066214752197265, 0.07093452453613282, 0.07048947143554687, 0.07034886169433593, 0.07061686706542969, 0.07043766021728516, 0.07034467315673829, 0.07042256164550781, 0.07112268829345703, 0.07041458892822265, 0.07017014312744141, 0.06997398376464843, 0.07018547058105469, 0.07014012908935546, 0.07058306884765625, 0.0711546859741211, 0.07044239807128906, 0.07217008209228516, 0.07071949005126953, 0.07136460876464844, 0.07063961791992188, 0.07035289764404297, 0.07083171081542969, 0.07037929534912109, 0.0701176986694336, 0.07010950469970703, 0.07026483154296875, 0.07013990020751953, 0.07031398773193359, 0.07071334075927735, 0.07077808380126953, 0.07055570983886719, 0.07032460784912109, 0.07011158752441406, 0.07018701171875, 0.06996173095703125, 0.07043807983398437, 0.07037830352783203, 0.07012099456787109, 0.0703160629272461, 0.0700006103515625, 0.07000937652587891, 0.07013919830322266, 0.0706292495727539, 0.07022822570800781, 0.07073638153076171, 0.07042633819580078, 0.07111539459228515, 0.0703536605834961, 0.07036201477050781, 0.0702786865234375, 0.07044876861572266, 0.07022064208984374, 0.07044915008544922, 0.07018402862548828, 0.07020547485351562, 0.06997081756591797, 0.07032422637939453, 0.07030809783935547, 0.070353759765625, 0.07043708801269531, 0.07008592224121094, 0.07041683197021484, 0.07017501068115234, 0.07067183685302734, 0.07037187194824218, 0.07016966247558594, 0.07027152252197266, 0.07074147033691407, 0.07021868896484375, 0.0702577896118164, 0.07065484619140625, 0.07048336029052735, 0.07083478546142578, 0.07182518768310547, 0.070994140625, 0.07041868591308593, 0.07029734039306641, 0.07318118286132813, 0.07095529937744141, 0.07051554870605468, 0.07033331298828124, 0.07039794921875, 0.07125196838378907, 0.07031129455566407, 0.0710598373413086, 0.07088972473144531, 0.07051459503173828, 0.0706879653930664, 0.07039299011230468, 0.07034960174560546, 0.07016556549072266, 0.07068386840820312, 0.07085327911376953, 0.07057817840576172, 0.07027417755126954, 0.07067021179199219, 0.0701628189086914, 0.07216191864013671, 0.07178173065185547, 0.07108470153808594, 0.07074201965332032, 0.0716344985961914, 0.07246688079833985, 0.07123049926757813, 0.070955810546875, 0.07068077087402344, 0.07099104309082031, 0.07070598602294922, 0.07059046173095704, 0.07053107452392578, 0.07051999664306641, 0.07049295806884766, 0.07063260650634766, 0.07047052764892578, 0.07049359893798827, 0.07068694305419922, 0.07129945373535156, 0.0705184326171875, 0.07053510284423828, 0.07119721221923828, 0.07072345733642578, 0.07077069091796875, 0.07170419311523438, 0.07016448211669922, 0.07031584167480469, 0.0701928939819336, 0.07016429138183594, 0.07021177673339844, 0.06993571472167968, 0.07096099090576172, 0.0703437728881836, 0.07005305480957032, 0.0700962905883789, 0.07022418975830078, 0.07013497924804687, 0.07026687622070313, 0.07009136199951171, 0.06977158355712891, 0.07055084991455078, 0.07009715270996093, 0.06997545623779297, 0.06998550415039062, 0.06998191833496094, 0.07002726745605468, 0.07075430297851562, 0.07022898864746094, 0.07000371551513672, 0.06979379272460938, 0.07008777618408203, 0.0698971176147461, 0.06996572875976563, 0.06998336029052735, 0.07019618988037109, 0.06998834991455079, 0.07015395355224609, 0.07005548858642578, 0.07188143920898438, 0.07064575958251954, 0.07032422637939453, 0.07060684967041016, 0.07007027435302735, 0.07003955078125, 0.07038925170898437, 0.06997154998779297, 0.06989302062988281, 0.069930908203125, 0.07009721374511718, 0.0699749755859375, 0.0702778549194336, 0.07048406219482421, 0.07029551696777343, 0.07041251373291016, 0.07013155364990234, 0.06997811126708985, 0.07314636993408204, 0.07031398773193359, 0.07047151947021485, 0.07071788787841797, 0.07005974578857421, 0.0699139175415039, 0.0699557113647461, 0.0702078399658203, 0.07106172943115234, 0.07001292419433594, 0.07018489837646484, 0.0699781723022461]",tokens/s,14.146663799985163,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.21.4,,,,0.12.0,,,,MB,19031.883776,9810.345984,0.0,9558.818816,9558.429696,s,1,34.86158203125,34.86158203125,0.0,34.86158203125,34.86158203125,34.86158203125,34.86158203125,[34.86158203125],,kWh,0.0008164202400374885,9.005013364084165e-05,0.0002745374418519908,0.001181007815530321,,MB,6239.5392,10443.685888,0.0,10020.192256,9855.258624,s,10,1.4222306823730468,0.14222306823730468,0.00018526399393422504,0.14226337432861327,0.1424356994628906,0.14246687316894532,0.14249181213378906,"[0.14231196594238282, 0.1421041259765625, 0.14230911254882814, 0.14181199645996093, 0.142498046875, 0.14214218139648438, 0.1422895050048828, 0.1420977325439453, 0.14242877197265624, 0.14223724365234375]",tokens/s,1799.9892926853056,kWh,4.2287607165462756e-06,4.663584646711207e-07,2.797900789043633e-06,7.49301997026103e-06,tokens/kWh,34165129.81628713,MB,6243.790848,10445.78304,0.0,10022.289408,9856.30976,s,10,68.76144091796876,6.876144091796876,0.02386239486544287,6.878005126953125,6.90415712890625,6.907386669921875,6.909970302734375,"[6.9106162109375, 6.8609580078125, 6.87934033203125, 6.876669921875, 6.88435009765625, 6.89791552734375, 6.8729033203125, 6.837279296875, 6.83796875, 6.903439453125]",tokens/s,9.16211166592014,kWh,0.0002006951345172039,2.2137602313013926e-05,0.00010424471262275436,0.0003270774494529722,tokens/kWh,192614.92990533504,,s,630,68.75911467742922,0.10914145186893526,0.0009691064384644023,0.10893747329711914,0.11017671279907228,0.11100705223083496,0.11272734603881837,"[0.10943292999267579, 0.1092613754272461, 0.10953958129882813, 0.10978848266601562, 0.11066844940185547, 0.10995929718017577, 0.10950364685058593, 0.11064201354980469, 0.10981798553466797, 0.10961014556884766, 0.11004707336425781, 0.10984333038330078, 0.10943283081054687, 0.1094853744506836, 0.10941305541992187, 0.10894745635986328, 0.10902857971191406, 0.10895782470703125, 0.10922870635986329, 0.10914816284179688, 0.10917830657958984, 0.10888249969482422, 0.10958029174804687, 0.10970252990722656, 0.10920780944824218, 0.10949846649169923, 0.1111039047241211, 0.10935334777832031, 0.11616806030273437, 0.10949491119384766, 0.10883679962158203, 0.10948774719238281, 0.10930799865722657, 0.10912957000732422, 0.10893772888183594, 0.10941232299804687, 0.10906012725830078, 0.10951615905761719, 0.10930368041992188, 0.10874912261962891, 0.10871343994140625, 0.1128539810180664, 0.10953497314453126, 0.10910873413085938, 0.10967715454101562, 0.10895069122314453, 0.10912255859375, 0.10994477081298829, 0.1091437759399414, 0.1094208984375, 0.11004108428955078, 0.10985881805419923, 0.11033948516845703, 0.11033763122558594, 0.1094131851196289, 0.10971155548095703, 0.11012448120117188, 0.1091526107788086, 0.11091375732421875, 0.1094471664428711, 0.10938777923583984, 0.10963740539550781, 0.10989385223388672, 0.10878828430175781, 0.10953523254394532, 0.10879385375976562, 0.10887782287597657, 0.10941993713378906, 0.11114147186279297, 0.10931814575195313, 0.10893721771240235, 0.10861936187744141, 0.10863247680664062, 0.10860749053955078, 0.10833100891113281, 0.1082709732055664, 0.10827635192871093, 0.10825894165039063, 0.11153241729736328, 0.10985062408447266, 0.10860953521728516, 0.10885501098632812, 0.10906038665771485, 0.10789068603515625, 0.108548095703125, 0.10825318145751953, 0.1083719711303711, 0.10903692626953125, 0.108763427734375, 0.11213859558105468, 0.10874060821533203, 0.10859110260009766, 0.10821427154541016, 0.10885440063476562, 0.10877375793457031, 0.10873452758789062, 0.10888188934326172, 0.10912358093261719, 0.10871651458740235, 0.10837811279296874, 0.10910105895996093, 0.10808675384521485, 0.1087940444946289, 0.10869590759277344, 0.10908672332763672, 0.10882406616210938, 0.10852812957763672, 0.10842082977294921, 0.10860326385498047, 0.10864067077636719, 0.10833715057373047, 0.10855567932128907, 0.10888374328613282, 0.10863494110107422, 0.10885692596435546, 0.10877110290527343, 0.10979328155517579, 0.10883340454101563, 0.10933657836914062, 0.10978304290771485, 0.10931814575195313, 0.10860284423828125, 0.10869609832763671, 0.10843135833740235, 0.10897357177734375, 0.10841088104248046, 0.10859763336181641, 0.10930502319335937, 0.1088100814819336, 0.10874732971191406, 0.11592540740966797, 0.10908262634277344, 0.10902528381347656, 0.1092833251953125, 0.10878112030029297, 0.10871571350097656, 0.10881088256835937, 0.10857039642333985, 0.10849478149414063, 0.10917724609375, 0.1081098861694336, 0.1091296615600586, 0.10886934661865234, 0.10904399871826172, 0.10935606384277344, 0.1092100830078125, 0.1088721923828125, 0.11095231628417969, 0.10918726348876953, 0.10874054718017578, 0.10880742645263672, 0.11049433898925781, 0.10870387268066406, 0.10919731140136718, 0.10848198699951171, 0.108251708984375, 0.10840678405761718, 0.10897721862792968, 0.10872927856445312, 0.11256012725830078, 0.10875411224365235, 0.10881238555908203, 0.10899632263183594, 0.10880921936035157, 0.10823474884033203, 0.10943456268310547, 0.10998719787597656, 0.10865280151367188, 0.10989433288574219, 0.11279564666748047, 0.11102963256835938, 0.1088620834350586, 0.10864435577392578, 0.10837401580810548, 0.10874470520019532, 0.10873814392089844, 0.1083861083984375, 0.10964205169677735, 0.10986758422851563, 0.10942025756835938, 0.10860134124755859, 0.10919116973876954, 0.10834941101074219, 0.10845597076416015, 0.10852092742919922, 0.10798246765136718, 0.10841289520263672, 0.10881734466552734, 0.10929910278320312, 0.10845238494873047, 0.10928307342529296, 0.10818790435791016, 0.10824435424804688, 0.10846272277832031, 0.10826751708984375, 0.1089702377319336, 0.10860883331298828, 0.109275390625, 0.10855443572998047, 0.10878940582275391, 0.10839807891845703, 0.10860822296142578, 0.10903564453125, 0.10863410949707031, 0.10963897705078125, 0.1097489242553711, 0.10984652709960938, 0.10981740570068359, 0.1106231689453125, 0.10846214294433594, 0.10913897705078125, 0.10896466827392579, 0.10848470306396485, 0.10926857757568359, 0.10913126373291016, 0.10900806427001954, 0.10962505340576172, 0.10953526306152343, 0.1084591064453125, 0.10850569915771484, 0.11021711730957032, 0.11019097900390624, 0.10916659545898437, 0.10980060577392578, 0.10892681884765625, 0.11183145904541016, 0.10943456268310547, 0.10877046203613282, 0.10921139526367188, 0.10922332763671876, 0.10932665252685547, 0.10929939270019531, 0.10907302093505859, 0.10916633605957031, 0.10934499359130859, 0.10870988464355469, 0.10871398162841797, 0.10994393920898438, 0.10900089263916016, 0.10842182159423829, 0.10886732482910157, 0.10889971160888672, 0.11055382537841797, 0.10867692565917969, 0.10894732666015625, 0.10858544158935547, 0.10870780944824218, 0.10837161254882813, 0.10906047821044922, 0.1113169937133789, 0.108998046875, 0.10962185668945312, 0.10924031829833984, 0.1091923828125, 0.10847727966308594, 0.10911161804199218, 0.10892047882080078, 0.10898432159423828, 0.11239424133300781, 0.10969292449951172, 0.10921984100341797, 0.1093222427368164, 0.10868323516845703, 0.10877545928955078, 0.10878975677490234, 0.10892195129394532, 0.10883475494384766, 0.10898937225341797, 0.10850921630859375, 0.10854166412353515, 0.10886115264892578, 0.10884153747558593, 0.108740478515625, 0.10845935821533204, 0.1091137924194336, 0.10858531188964844, 0.10892390441894531, 0.10974310302734375, 0.10881228637695313, 0.1090334701538086, 0.10883277130126953, 0.10860543823242187, 0.10875904083251953, 0.10970633697509766, 0.10886876678466798, 0.10897599792480468, 0.10877500915527344, 0.10910662078857422, 0.10920022583007813, 0.10996851348876953, 0.10876335906982422, 0.10948623657226562, 0.10931865692138672, 0.10881375885009766, 0.10964640045166016, 0.10954137420654297, 0.10950768280029297, 0.10925567626953125, 0.11284172821044922, 0.10905897521972656, 0.10937548828125, 0.10988134765625, 0.1092279052734375, 0.10934220886230468, 0.1098674545288086, 0.10883296203613281, 0.10914835357666015, 0.11027027130126953, 0.10877133178710938, 0.10902877044677735, 0.1100212173461914, 0.11124326324462891, 0.10984012603759766, 0.10929545593261719, 0.10921148681640624, 0.11132879638671875, 0.1095800018310547, 0.10917356872558594, 0.1096480941772461, 0.10928498840332031, 0.10947990417480469, 0.11034226989746093, 0.11075708770751953, 0.11074342346191406, 0.11153654479980468, 0.11060684967041015, 0.11018022155761718, 0.10955484771728516, 0.10930274963378907, 0.10891846466064453, 0.10981314849853516, 0.11042880249023437, 0.10950204467773438, 0.1103691177368164, 0.11041964721679688, 0.1088662109375, 0.109635009765625, 0.10979590606689453, 0.11307417297363281, 0.10922921752929687, 0.10947657775878906, 0.10872640228271484, 0.10912716674804687, 0.10964838409423829, 0.10858905792236329, 0.10949836730957031, 0.10907577514648438, 0.10865529632568359, 0.11181670379638672, 0.11033299255371094, 0.10898060607910157, 0.10930438232421875, 0.10989772796630859, 0.10862409973144531, 0.10860726165771484, 0.1090887680053711, 0.10825885009765625, 0.10847996520996094, 0.10861567687988281, 0.11173990631103516, 0.1087548828125, 0.10893452453613281, 0.10856665802001954, 0.10914873504638672, 0.1091747817993164, 0.10846412658691407, 0.10868921661376953, 0.10899584197998047, 0.10866397094726563, 0.10872573089599609, 0.10920275115966797, 0.10865945434570312, 0.10904601287841797, 0.10944687652587891, 0.1084576644897461, 0.10890505981445313, 0.10913996887207031, 0.10859699249267578, 0.10982077026367187, 0.10908262634277344, 0.10903942108154296, 0.11019692993164062, 0.10889746856689453, 0.10844448089599609, 0.10908614349365234, 0.10843309020996093, 0.11125440216064453, 0.1103622055053711, 0.10890691375732423, 0.10861270141601563, 0.10928546905517578, 0.10905683135986328, 0.10816515350341797, 0.10855129241943359, 0.10883277130126953, 0.1077767333984375, 0.10851750183105469, 0.10860489654541015, 0.10813289642333984, 0.11095859527587891, 0.10966150665283203, 0.10870851135253906, 0.10944310760498047, 0.10908048248291016, 0.1082594223022461, 0.10818348693847656, 0.10901920318603515, 0.10832486724853516, 0.10940825653076172, 0.10889596557617187, 0.10833334350585938, 0.10855219268798828, 0.1086750717163086, 0.10886969757080078, 0.10856041717529297, 0.10904156494140625, 0.10836172485351563, 0.10912973022460938, 0.1087242202758789, 0.10823065948486328, 0.11079475402832031, 0.1089966049194336, 0.10836172485351563, 0.10835330963134765, 0.10884534454345703, 0.10843539428710937, 0.10825523376464843, 0.11253292846679687, 0.10859756469726563, 0.11165878295898438, 0.10892336273193359, 0.10859455871582031, 0.10857459259033203, 0.10894518280029297, 0.10905903625488281, 0.10881433868408204, 0.10995916748046874, 0.10864435577392578, 0.1119594268798828, 0.11017632293701173, 0.10870432281494141, 0.10878566741943359, 0.10871807861328125, 0.10777305603027344, 0.10874323272705078, 0.10954380798339844, 0.10849478149414063, 0.10868931579589844, 0.10874889373779296, 0.10812416076660156, 0.10970832061767578, 0.10857772827148438, 0.10840486145019532, 0.10879180908203125, 0.10838006591796875, 0.10829312133789062, 0.10876319885253906, 0.10881529235839844, 0.10847151947021484, 0.10825382232666016, 0.10853094482421875, 0.11029776000976563, 0.10895795440673828, 0.10833100891113281, 0.10792559814453125, 0.10885116577148438, 0.10870368194580078, 0.10824041748046875, 0.10809391784667968, 0.10806585693359375, 0.10812652587890625, 0.10810582733154298, 0.10795174407958984, 0.10768822479248047, 0.10848729705810548, 0.10918707275390625, 0.10870783996582031, 0.1111732177734375, 0.1084952621459961, 0.1082060775756836, 0.10834662628173829, 0.10779289245605468, 0.10743014526367188, 0.10779033660888672, 0.10819174194335937, 0.10771865844726562, 0.10794393920898437, 0.1079930877685547, 0.10778828430175781, 0.10795212554931641, 0.11105177307128906, 0.10871910095214844, 0.10833920288085938, 0.10895769500732422, 0.10836377716064453, 0.11097277069091797, 0.10826153564453125, 0.1077391357421875, 0.10797261047363281, 0.10797452545166016, 0.10799273681640625, 0.10815062713623047, 0.10900489807128906, 0.10840732574462891, 0.10940006256103516, 0.10870355224609375, 0.10888563537597656, 0.10960953521728516, 0.10828201293945312, 0.10831759643554688, 0.1087652816772461, 0.10898108673095704, 0.10801766204833985, 0.10858860778808593, 0.10849734497070312, 0.10825933074951172, 0.10878908538818359, 0.10847702026367187, 0.1084059829711914, 0.11241149139404297, 0.10881983947753907, 0.10847267150878906, 0.10845212554931641, 0.10840882873535156, 0.10809552001953125, 0.10877948760986328, 0.10810972595214843, 0.10808329772949218, 0.10925603485107421, 0.10893180847167969, 0.10828348541259766, 0.10827811431884765, 0.10795132446289063, 0.10786077117919922, 0.10829788970947266, 0.10799549102783203, 0.1078395538330078, 0.10829545593261719, 0.10805884552001953, 0.10868780517578125, 0.10820390319824219, 0.10847782135009766, 0.10777423858642578, 0.10863804626464844, 0.10804192352294922, 0.10761721801757812, 0.108042236328125, 0.10855606079101562, 0.1078130874633789, 0.10880818939208985, 0.10862384033203125, 0.10844572448730469, 0.10868121337890625, 0.10866687774658203, 0.10800332641601562, 0.10808934020996094, 0.1086579818725586, 0.10827378845214844, 0.10854617309570312, 0.10881468963623046, 0.10848880004882812, 0.10869964599609375, 0.10979122924804688, 0.10784060668945313, 0.10831120300292969, 0.10950287628173828, 0.10897545623779296, 0.11139276885986328, 0.11005545806884766, 0.1092423324584961, 0.10955497741699219, 0.10846076965332031, 0.10876927947998047, 0.10904281616210937, 0.10891910552978516, 0.10874694061279297, 0.10931417846679688, 0.10917708587646484, 0.10951679992675781, 0.11095859527587891, 0.110166015625, 0.1093625259399414, 0.10979721832275391, 0.10925682830810547, 0.10876521301269532, 0.10914195251464844, 0.10918780517578125, 0.10915644836425781, 0.10963651275634766, 0.10944409942626954, 0.10930585479736328, 0.10967049407958984, 0.10994473266601562, 0.10929766082763671, 0.10925055694580078, 0.10983014678955078, 0.11044249725341797, 0.10943836975097657, 0.10920825958251953, 0.10840873718261719, 0.10940787506103515, 0.11097945404052735, 0.10880409240722656, 0.10943215942382813, 0.10963740539550781, 0.10923462677001954, 0.11007635498046875, 0.1092767333984375, 0.10861942291259766, 0.11210422515869141, 0.10931123352050781, 0.10847235107421875, 0.10912841796875, 0.10910829162597656, 0.10928374481201172, 0.11104515075683594, 0.11103846740722656, 0.11015513610839844, 0.11422720336914062, 0.11035507202148437, 0.10906419372558594, 0.10913177490234376, 0.10968838500976562, 0.10891923522949219, 0.10941439819335938, 0.10960281372070313, 0.10872013092041016, 0.10910723114013672, 0.10927919769287109, 0.10870783996582031]",tokens/s,9.162421636106421,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 616, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 161, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 283, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.42 GiB is free. Process 115300 has 13.32 GiB memory in use. Of the allocated memory 13.21 GiB is allocated by PyTorch, and 1.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,24223.338496,13792.837632,0.0,13390.31552,13325.885952,s,1,48.88960546875,48.88960546875,0.0,48.88960546875,48.88960546875,48.88960546875,48.88960546875,[48.88960546875],,kWh,0.0012165452055749786,0.00013418700366083555,0.0003934894814580092,0.0017442216906938233,,MB,2955.804672,14019.330048,0.0,13595.836416,13509.068288,s,10,2.188134475708008,0.2188134475708008,0.003840627109715578,0.2175714874267578,0.21963419799804687,0.22491002502441404,0.22913068664550781,"[0.2181899871826172, 0.2171780090332031, 0.21755401611328126, 0.2184617919921875, 0.23018585205078124, 0.21826332092285156, 0.21758895874023437, 0.21628834533691407, 0.21742726135253906, 0.21699693298339845]",tokens/s,1169.946375974753,kWh,6.3714201750909134e-06,7.026585575769657e-07,4.1100576358693095e-06,1.118413636853719e-05,tokens/kWh,22889563.535738885,MB,2960.03584,14050.787328,0.0,13627.293696,13437.11232,s,10,124.41144726562501,12.4411447265625,0.053764586755183766,12.4295458984375,12.5216775390625,12.539193261718749,12.55320583984375,"[12.3916240234375, 12.3907666015625, 12.43857421875, 12.556708984375, 12.4526259765625, 12.420517578125, 12.4487021484375, 12.4065908203125, 12.3875517578125, 12.51778515625]",tokens/s,5.063842707776856,kWh,0.00036170330105532005,3.9898063393609525e-05,0.0001616840194437314,0.0005632853838926611,tokens/kWh,111843.83937788308,,s,630,124.4088142852783,0.19747430838933064,0.0019319183734660354,0.1970931167602539,0.1999227020263672,0.2015409126281738,0.20449177673339844,"[0.1963662109375, 0.19910797119140625, 0.19544551086425782, 0.19553689575195313, 0.19727154541015626, 0.19493043518066405, 0.19721586608886718, 0.19669442749023439, 0.1961412811279297, 0.19643907165527344, 0.1957222137451172, 0.19646633911132813, 0.19515193176269532, 0.19678019714355469, 0.19734130859375, 0.1972797393798828, 0.19605708312988282, 0.19533209228515624, 0.19575144958496093, 0.19655728149414062, 0.1958419189453125, 0.19508233642578124, 0.19382275390625, 0.19446783447265625, 0.1957475128173828, 0.19597325134277344, 0.19520941162109376, 0.19451017761230469, 0.19377833557128907, 0.19469517517089843, 0.19591513061523438, 0.19480953979492188, 0.19463693237304688, 0.19647879028320311, 0.19559004211425782, 0.19541615295410156, 0.19726092529296874, 0.2010873565673828, 0.19816432189941408, 0.19739897155761718, 0.19603018188476562, 0.19730021667480468, 0.19628810119628906, 0.1978492431640625, 0.1977794952392578, 0.19791481018066406, 0.1974169921875, 0.1966790008544922, 0.1970919952392578, 0.1986826171875, 0.19763232421875, 0.1996894989013672, 0.20193724060058593, 0.19935443115234375, 0.1977150421142578, 0.19671072387695313, 0.19657363891601562, 0.19944422912597656, 0.19695834350585936, 0.1977367401123047, 0.19704351806640624, 0.19662696838378907, 0.1974416961669922, 0.19714813232421874, 0.19607606506347655, 0.19556048583984376, 0.20033970642089843, 0.19608650207519532, 0.19687330627441407, 0.19872854614257812, 0.198546875, 0.1972558135986328, 0.1968758087158203, 0.1979601593017578, 0.1981808624267578, 0.1968599090576172, 0.19678720092773438, 0.19758387756347656, 0.1978470458984375, 0.19723068237304686, 0.19612191772460938, 0.1951995849609375, 0.19759263610839845, 0.19793344116210937, 0.19717904663085936, 0.19787794494628907, 0.19983372497558594, 0.19693986511230469, 0.19611648559570313, 0.19634381103515625, 0.19697254943847656, 0.19684147644042968, 0.19530496215820312, 0.19828582763671876, 0.19579708862304687, 0.1964520263671875, 0.19548796081542968, 0.19707225036621093, 0.19698342895507812, 0.19642774963378906, 0.19672434997558594, 0.19595919799804687, 0.19867240905761718, 0.19603248596191405, 0.195274169921875, 0.1961715545654297, 0.19606192016601562, 0.1987108154296875, 0.1971408996582031, 0.1960769958496094, 0.19598947143554687, 0.197015869140625, 0.19616989135742188, 0.1970341796875, 0.19547346496582033, 0.19502490234375, 0.19464991760253905, 0.19429763793945312, 0.19504698181152344, 0.19562173461914062, 0.19573721313476564, 0.19624998474121094, 0.19502079772949218, 0.19472998046875, 0.19617996215820313, 0.19674710083007813, 0.1963725128173828, 0.196712646484375, 0.1971847686767578, 0.19629661560058595, 0.1962977294921875, 0.1954915771484375, 0.1992911376953125, 0.1971261444091797, 0.19743743896484375, 0.19851878356933594, 0.20238925170898436, 0.19642361450195311, 0.19541023254394532, 0.19637452697753907, 0.19634080505371093, 0.20442950439453125, 0.1949908447265625, 0.19613221740722656, 0.196465087890625, 0.19645663452148437, 0.19550799560546875, 0.19642771911621093, 0.1959713592529297, 0.19649116516113282, 0.1964319610595703, 0.19650889587402343, 0.19839059448242188, 0.19646380615234374, 0.19700201416015625, 0.19606732177734376, 0.19531535339355469, 0.20032546997070313, 0.19697254943847656, 0.19687628173828126, 0.19699302673339844, 0.19726124572753906, 0.1969541778564453, 0.1975577850341797, 0.19663510131835937, 0.1975633850097656, 0.1980958709716797, 0.19759103393554686, 0.1973442840576172, 0.19745277404785155, 0.19898573303222655, 0.19856588745117187, 0.19694364929199218, 0.1974990997314453, 0.19749034118652345, 0.19817465209960938, 0.1977225341796875, 0.19721128845214844, 0.19687065124511718, 0.19660989379882812, 0.19801657104492187, 0.19792582702636718, 0.19775401306152343, 0.19940028381347658, 0.19794534301757813, 0.19808665466308595, 0.19940879821777344, 0.20092720031738281, 0.19843775939941405, 0.1984625244140625, 0.19994639587402344, 0.19721090698242189, 0.1980641326904297, 0.19791990661621095, 0.19714646911621095, 0.20300083923339843, 0.19769343566894532, 0.19825765991210936, 0.19790103149414062, 0.19722633361816405, 0.20272787475585938, 0.1976126708984375, 0.2049012451171875, 0.1982750701904297, 0.19758079528808595, 0.20330291748046875, 0.20176223754882813, 0.20046905517578126, 0.19968115234375, 0.1984420166015625, 0.2045172119140625, 0.19797544860839844, 0.19826544189453124, 0.19708956909179687, 0.19606704711914064, 0.20319436645507813, 0.2006876220703125, 0.198002685546875, 0.19762995910644532, 0.19765423583984376, 0.19856019592285157, 0.20349935913085937, 0.1994910125732422, 0.19901087951660157, 0.19808840942382813, 0.19940953063964845, 0.20337088012695312, 0.19774240112304686, 0.1981114501953125, 0.1975377960205078, 0.19856352233886718, 0.20237705993652344, 0.20320713806152343, 0.19851673889160157, 0.1981071319580078, 0.19993206787109374, 0.2051705322265625, 0.19880345153808593, 0.19850445556640625, 0.19810208129882811, 0.1985361328125, 0.20169113159179688, 0.1981317138671875, 0.19877830505371094, 0.19785171508789062, 0.19772006225585936, 0.20192665100097656, 0.19823426818847656, 0.19748439025878906, 0.1964400634765625, 0.19757778930664063, 0.19728070068359374, 0.19835804748535157, 0.19755488586425782, 0.1974757080078125, 0.20232899475097657, 0.20404127502441408, 0.19829148864746093, 0.19770664978027344, 0.19802056884765626, 0.19674330139160157, 0.20207977294921875, 0.19693043518066405, 0.1976873016357422, 0.19791081237792968, 0.19613990783691407, 0.19771414184570313, 0.1976038055419922, 0.19576797485351563, 0.19581184387207032, 0.1955799102783203, 0.20146995544433594, 0.1968476104736328, 0.1975400390625, 0.1968023681640625, 0.19605520629882814, 0.19449839782714845, 0.19605255126953125, 0.19735498046875, 0.19801350402832033, 0.19771366882324218, 0.20366403198242186, 0.19769290161132813, 0.19769398498535157, 0.20029644775390626, 0.1999742431640625, 0.19736579895019532, 0.19694992065429687, 0.19693772888183594, 0.19753852844238282, 0.2000476531982422, 0.19998179626464843, 0.19832447814941406, 0.19750706481933594, 0.19789404296875, 0.19645599365234376, 0.1967027587890625, 0.19743656921386718, 0.19709423828125, 0.19653826904296876, 0.19602566528320312, 0.19631158447265626, 0.19976626586914062, 0.19670124816894533, 0.1957855987548828, 0.19593836975097656, 0.19812889099121095, 0.1970040283203125, 0.19664691162109374, 0.19643391418457032, 0.19548722839355467, 0.1960738830566406, 0.19617420959472656, 0.19677565002441405, 0.19690217590332032, 0.19651408386230468, 0.19693458557128907, 0.19640013122558594, 0.19675750732421876, 0.195736572265625, 0.1972623748779297, 0.19620040893554688, 0.196853759765625, 0.1981720886230469, 0.19767117309570312, 0.19747459411621093, 0.19810511779785156, 0.19836294555664064, 0.19839581298828124, 0.19823590087890625, 0.2033526153564453, 0.19822592163085936, 0.19708073425292968, 0.19737408447265625, 0.20021987915039063, 0.198761474609375, 0.19685784912109375, 0.19992166137695314, 0.1980088348388672, 0.19663011169433595, 0.19620700073242187, 0.1969336395263672, 0.1975152587890625, 0.19699462890625, 0.19734573364257812, 0.1968926696777344, 0.19709657287597657, 0.19686285400390624, 0.1960793914794922, 0.19682298278808594, 0.19602665710449219, 0.19975372314453124, 0.19627357482910157, 0.1961641845703125, 0.19526860046386718, 0.19860716247558594, 0.1973531494140625, 0.1979883575439453, 0.19763941955566405, 0.19883110046386718, 0.19781961059570313, 0.19783056640625, 0.19642431640625, 0.1970699462890625, 0.19602316284179688, 0.19894216918945312, 0.19523965454101563, 0.19557029724121094, 0.19577378845214843, 0.19671849060058594, 0.19556352233886717, 0.19508114624023437, 0.1952391357421875, 0.19643075561523438, 0.1958335418701172, 0.1958115234375, 0.1951068115234375, 0.19561062622070313, 0.19537152099609376, 0.19506329345703124, 0.19817715454101562, 0.19751948547363282, 0.19647283935546875, 0.197064697265625, 0.1983950653076172, 0.19917404174804687, 0.19618704223632813, 0.19680050659179688, 0.19679641723632812, 0.19585420227050782, 0.19595481872558593, 0.1980696563720703, 0.19825030517578124, 0.19749507141113282, 0.19711231994628906, 0.197285888671875, 0.1971609649658203, 0.1968308868408203, 0.1969134979248047, 0.201787353515625, 0.19783628845214843, 0.2066417236328125, 0.198920166015625, 0.1974244842529297, 0.1979020233154297, 0.2006942138671875, 0.2024883575439453, 0.19742828369140625, 0.1966399383544922, 0.1975500183105469, 0.19734918212890626, 0.19768524169921875, 0.19658546447753905, 0.19685580444335937, 0.19657933044433593, 0.19620658874511718, 0.1966878662109375, 0.19685353088378907, 0.1969072265625, 0.1971937255859375, 0.19530752563476564, 0.19682917785644533, 0.19729798889160155, 0.19773663330078126, 0.19633766174316405, 0.19539289855957032, 0.19665731811523438, 0.19803996276855468, 0.19744569396972655, 0.19564947509765626, 0.19478022766113282, 0.19962063598632812, 0.19935562133789062, 0.1980966339111328, 0.19896319580078126, 0.1990421142578125, 0.19671136474609374, 0.19634707641601562, 0.19590841674804688, 0.19529420471191405, 0.2054703369140625, 0.1965816345214844, 0.19742311096191406, 0.1971007080078125, 0.19604931640625, 0.19743507385253906, 0.19744435119628906, 0.19808869934082032, 0.1970214385986328, 0.19571533203125, 0.1955653076171875, 0.20073858642578124, 0.19788470458984375, 0.1970946502685547, 0.19824038696289062, 0.19754786682128905, 0.20078799438476563, 0.19856008911132814, 0.1965179901123047, 0.19649740600585938, 0.19595826721191406, 0.19680117797851562, 0.19702056884765626, 0.19687315368652344, 0.1960545654296875, 0.1947993927001953, 0.19501295471191407, 0.19562326049804687, 0.1971988525390625, 0.19727871704101563, 0.1969969940185547, 0.1969144287109375, 0.19748748779296876, 0.1963581085205078, 0.19597724914550782, 0.19521443176269532, 0.1952183380126953, 0.1951680908203125, 0.19669375610351564, 0.19542684936523438, 0.1986824951171875, 0.1971215057373047, 0.1974625244140625, 0.19623631286621093, 0.20133888244628906, 0.19667567443847656, 0.19824298095703125, 0.1965857238769531, 0.19621888732910156, 0.19668992614746095, 0.1958062744140625, 0.19667245483398438, 0.19733261108398437, 0.19933161926269533, 0.19712165832519532, 0.1958973388671875, 0.19598434448242188, 0.19648101806640625, 0.1961793212890625, 0.19678224182128906, 0.19568211364746094, 0.1957341766357422, 0.19918170166015625, 0.19652029418945313, 0.19978729248046875, 0.19684066772460937, 0.19653485107421875, 0.19618153381347656, 0.19578950500488282, 0.19622706604003906, 0.19549798583984376, 0.19545292663574218, 0.19450880432128906, 0.19510179138183595, 0.199212158203125, 0.19576095581054687, 0.19577955627441407, 0.19709078979492187, 0.19694441223144532, 0.19656655883789062, 0.1956248016357422, 0.1954078369140625, 0.1960301513671875, 0.19545391845703125, 0.19609324645996093, 0.19596697998046875, 0.19606387329101563, 0.19635411071777345, 0.19579084777832031, 0.19538534545898437, 0.19998512268066407, 0.19741719055175783, 0.19647010803222656, 0.1965101776123047, 0.2021637420654297, 0.19712355041503907, 0.19637759399414062, 0.19676803588867187, 0.19709046936035157, 0.19638943481445312, 0.19591168212890625, 0.19641987609863282, 0.19752841186523437, 0.19604978942871093, 0.19613658142089843, 0.19661798095703126, 0.19620314025878907, 0.19668377685546876, 0.19586659240722656, 0.19659368896484375, 0.19559590148925782, 0.19612710571289063, 0.207204345703125, 0.19564134216308593, 0.19506092834472658, 0.19864863586425782, 0.19715248107910155, 0.19721449279785155, 0.19619839477539064, 0.19665625, 0.19522854614257812, 0.196044921875, 0.19607948303222655, 0.1954693145751953, 0.19575398254394533, 0.19531776428222655, 0.19612672424316407, 0.1967796173095703, 0.195950439453125, 0.195740478515625, 0.19820445251464844, 0.1973690185546875, 0.1981908874511719, 0.1975767059326172, 0.19742025756835938, 0.19738272094726564, 0.1982587127685547, 0.19796409606933593, 0.19812051391601562, 0.1976471710205078, 0.20258950805664064, 0.19870790100097657, 0.2003619842529297, 0.20137350463867187, 0.20069366455078125, 0.19902423095703126, 0.19693644714355468, 0.19682492065429688, 0.19708650207519532, 0.2061504669189453, 0.197697509765625, 0.19705654907226564, 0.19825788879394532, 0.20012144470214843, 0.19713401794433594, 0.19686604309082031, 0.19714866638183592, 0.19841421508789062, 0.19690054321289063, 0.19695660400390624, 0.196890625, 0.20159896850585937, 0.19778764343261718, 0.19855360412597656, 0.2023916473388672, 0.1988153533935547, 0.19839619445800782, 0.1994219512939453, 0.19830758666992188, 0.19857843017578125, 0.20064271545410156, 0.19802301025390626, 0.20007344055175783, 0.19901622009277345, 0.19826687622070313, 0.19745791625976564, 0.197514404296875, 0.1990718994140625, 0.19879093933105468, 0.19748751831054687, 0.19735142517089843, 0.20006646728515626, 0.19952890014648436, 0.19926850891113282, 0.20015309143066407, 0.2009169921875, 0.20049510192871095, 0.2008423309326172, 0.20001426696777344, 0.1988817596435547]",tokens/s,5.063949878626485,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,26452.04992,13903.003648,0.0,13507.756032,13505.835008,s,1,53.68265234375,53.68265234375,0.0,53.68265234375,53.68265234375,53.68265234375,53.68265234375,[53.68265234375],,kWh,0.0013594839654375126,0.00014995422307555558,0.0005131501327420018,0.00202258832125507,,MB,1217.015808,14796.3904,0.0,14380.171264,14175.648768,s,10,1.681370559692383,0.16813705596923828,0.0018570011091590974,0.16832992553710938,0.17007421112060547,0.17025881576538085,0.17040649948120118,"[0.16331564331054688, 0.16753407287597658, 0.1692528076171875, 0.1676363525390625, 0.16837705993652344, 0.16782252502441405, 0.16828279113769531, 0.17044342041015625, 0.17003318786621094, 0.16867269897460938]",tokens/s,1522.5673991034837,kWh,4.8628614256249915e-06,5.362798158514407e-07,3.2332248087999875e-06,8.632366050276419e-06,tokens/kWh,29655832.307042003,MB,1234.681856,14838.33344,0.0,14422.114304,14358.052352,s,10,59.112801757812505,5.9112801757812505,0.0234726679418505,5.9043562011718755,5.937619677734375,5.951708764648438,5.9629800341796875,"[5.93448876953125, 5.9657978515625, 5.8935556640625, 5.89496630859375, 5.8959716796875, 5.92111669921875, 5.8934638671875, 5.88408740234375, 5.91274072265625, 5.91661279296875]",tokens/s,10.657589917343708,kWh,0.00017206384317645744,1.8979360233535417e-05,0.0001142792025343999,0.0003053224059443928,tokens/kWh,206339.26227960468,,s,630,59.108717163085984,0.0938233605763269,0.0010554806569597194,0.0936074562072754,0.09468990707397461,0.095438130569458,0.09734189987182618,"[0.09622608184814453, 0.09662601470947266, 0.09388877105712891, 0.0938532485961914, 0.09452835083007813, 0.09446809387207031, 0.09410355377197266, 0.09430630493164062, 0.09533235168457031, 0.0963583984375, 0.09428118133544922, 0.09403852844238281, 0.0951644515991211, 0.09409471893310548, 0.09427168273925782, 0.0939012451171875, 0.09453772735595703, 0.09508364868164063, 0.09391798400878906, 0.0938025894165039, 0.09370829010009765, 0.09389164733886719, 0.09408608245849609, 0.09418508911132813, 0.09389699554443359, 0.09418761444091797, 0.09410530853271484, 0.09353209686279297, 0.09394620513916016, 0.09456006622314453, 0.09358560180664062, 0.09338050842285156, 0.09423881530761719, 0.09356185913085938, 0.09443571472167969, 0.09355068969726563, 0.09467715454101562, 0.09384384155273437, 0.09425920104980469, 0.0937676773071289, 0.09338470458984376, 0.09336627197265625, 0.09415376281738282, 0.09418851470947266, 0.09380249786376953, 0.09322700500488282, 0.09329571533203125, 0.09344035339355469, 0.0931681900024414, 0.09323929595947265, 0.09394790649414063, 0.09526067352294922, 0.09426124572753906, 0.09429401397705078, 0.0939375991821289, 0.0943839340209961, 0.09428607940673828, 0.09388751983642578, 0.09377616119384766, 0.09434591674804688, 0.09480397033691407, 0.09591584014892578, 0.0935323486328125, 0.09446806335449219, 0.09538969421386718, 0.09454329681396484, 0.09702047729492187, 0.09471743774414063, 0.10125945281982422, 0.09612105560302735, 0.09524214172363281, 0.09418482971191407, 0.0943373794555664, 0.0939012451171875, 0.09433235168457031, 0.09405091094970704, 0.0940052490234375, 0.09468883514404297, 0.09436409759521484, 0.09436723327636719, 0.09422016143798828, 0.09492697906494141, 0.09362188720703125, 0.09420652770996094, 0.09420217895507813, 0.09412326049804688, 0.09613523101806641, 0.0948410873413086, 0.0952713623046875, 0.09452537536621093, 0.0944865951538086, 0.09591193389892579, 0.09517056274414062, 0.09380863952636719, 0.09389814758300781, 0.09453628540039062, 0.09429548645019531, 0.09388809967041016, 0.09499337768554687, 0.09472605133056641, 0.09475827026367188, 0.09423741149902344, 0.09431017303466797, 0.09361392211914063, 0.09327638244628907, 0.09740064239501953, 0.09423088073730469, 0.09444761657714844, 0.0937185287475586, 0.0945469741821289, 0.09503337860107422, 0.09372767639160157, 0.09458483123779297, 0.09432784271240234, 0.09713558197021484, 0.09371647644042969, 0.09447158050537109, 0.09411196899414062, 0.09391142272949218, 0.09399235534667968, 0.09491107177734374, 0.09442713928222657, 0.0941813735961914, 0.09518045043945313, 0.09424726104736328, 0.09406259155273437, 0.09574921417236328, 0.09394464111328125, 0.09359919738769532, 0.09364339447021484, 0.09352336120605469, 0.0935955810546875, 0.09332393646240235, 0.09327378845214844, 0.09311468505859374, 0.09313279724121094, 0.09347650909423828, 0.09324527740478515, 0.09299404907226562, 0.0932105941772461, 0.09322908782958984, 0.09303244781494141, 0.09295667266845703, 0.09333103942871093, 0.09272918701171876, 0.09286643218994141, 0.09355948638916016, 0.09334374237060547, 0.09394790649414063, 0.09349056243896485, 0.09424140930175781, 0.09336585235595703, 0.0931024932861328, 0.0934583969116211, 0.09469955444335937, 0.09370355224609375, 0.09355942535400391, 0.09390694427490234, 0.09309184265136719, 0.09307071685791016, 0.09316802978515625, 0.09333782196044922, 0.09330806732177735, 0.09318895721435547, 0.09378121948242188, 0.09455286407470703, 0.09424076843261718, 0.0925880355834961, 0.09244041442871094, 0.09306537628173828, 0.0929443817138672, 0.09340518188476563, 0.09390489959716797, 0.09637068939208984, 0.09279078674316406, 0.0934823989868164, 0.09287535858154297, 0.09338470458984376, 0.09367756652832031, 0.09448448181152344, 0.09362022399902344, 0.09348915100097656, 0.09719808197021484, 0.09362432098388672, 0.09307667541503906, 0.09355347442626953, 0.09353536224365235, 0.0930226593017578, 0.09259846496582032, 0.0941588134765625, 0.09305276489257812, 0.09323945617675782, 0.09314412689208984, 0.09280745697021485, 0.09324610900878906, 0.09361817932128906, 0.09282559967041015, 0.09302377319335937, 0.09347734069824219, 0.09351372528076173, 0.09379014587402344, 0.09414252471923829, 0.09309120178222656, 0.09334770965576172, 0.094155517578125, 0.09390223693847656, 0.09406729888916016, 0.09404621124267579, 0.09752780914306641, 0.09374310302734375, 0.09367961883544922, 0.09403392028808594, 0.09351535797119141, 0.09330934143066406, 0.09300518035888672, 0.09322560119628906, 0.09313645172119141, 0.09581613159179687, 0.09304678344726562, 0.09311641693115234, 0.09321670532226563, 0.09294582366943359, 0.0932768325805664, 0.09650745391845703, 0.09281375885009766, 0.09281449890136718, 0.0932168960571289, 0.09322364807128906, 0.09285427093505859, 0.09273958587646484, 0.09305702209472656, 0.09297020721435546, 0.09327490997314453, 0.09287474822998047, 0.09342499542236328, 0.09320246124267578, 0.09381314849853516, 0.09316070556640625, 0.09390563201904296, 0.09365325164794921, 0.09316761779785156, 0.09343180847167969, 0.09367113494873047, 0.09317814636230469, 0.09344393920898438, 0.09406185913085938, 0.09366822052001954, 0.09350144195556641, 0.09436774444580077, 0.0934277114868164, 0.09443488311767578, 0.09344249725341797, 0.09380448150634765, 0.0937470703125, 0.0932371826171875, 0.09352825927734375, 0.09317298889160157, 0.09455193328857422, 0.09369900512695313, 0.09321782684326171, 0.09302870178222657, 0.09416153717041016, 0.0934645767211914, 0.09360492706298829, 0.0937008285522461, 0.09387849426269532, 0.09365299224853516, 0.09378157043457032, 0.09310662078857422, 0.0984247055053711, 0.09353836822509766, 0.0941937255859375, 0.09368316650390625, 0.09338448333740235, 0.09308601379394531, 0.09434925079345703, 0.09620326232910156, 0.09329459381103515, 0.09312620544433593, 0.09317011260986328, 0.0928786849975586, 0.09287875366210938, 0.09349097442626954, 0.09354287719726563, 0.09325894165039063, 0.09322579193115234, 0.09342361450195312, 0.09364457702636719, 0.09310844421386719, 0.09357926177978515, 0.09289113616943359, 0.09329049682617188, 0.09315500640869141, 0.09387245178222656, 0.09396633911132812, 0.0937000961303711, 0.09359910583496094, 0.09326860809326172, 0.09341334533691406, 0.09347894287109375, 0.09352601623535156, 0.0932042236328125, 0.0934238739013672, 0.09344409942626954, 0.09305907440185547, 0.09343590545654297, 0.0930561294555664, 0.09295756530761719, 0.092837890625, 0.09274534606933593, 0.09276201629638672, 0.09354227447509765, 0.09321433258056641, 0.09464643096923828, 0.09427027130126953, 0.09388646697998047, 0.09328598022460938, 0.093108642578125, 0.09358131408691406, 0.0935997085571289, 0.09409334564208985, 0.1065902099609375, 0.09362022399902344, 0.09439571380615235, 0.09365363311767579, 0.09374214172363281, 0.09366591644287109, 0.09395180511474609, 0.09372319793701171, 0.0935251235961914, 0.09298623657226562, 0.09413017272949219, 0.0936099853515625, 0.09432876586914063, 0.09343392181396484, 0.0937492446899414, 0.09355059051513671, 0.09401036834716797, 0.09404307556152344, 0.09396640014648437, 0.09385081481933594, 0.09379923248291015, 0.09415225219726563, 0.09382342529296875, 0.09396428680419922, 0.09418675231933593, 0.09334976196289063, 0.09350233459472657, 0.09395334625244141, 0.09409811401367188, 0.09376972961425781, 0.0936607666015625, 0.09334150695800782, 0.093787841796875, 0.09420278167724609, 0.09369583892822266, 0.09407504272460937, 0.09373004913330078, 0.09341795349121093, 0.09308134460449219, 0.09301990509033203, 0.09315408325195312, 0.09585868835449218, 0.09433087921142579, 0.09358745574951172, 0.09399501037597656, 0.09418752288818359, 0.09326182556152343, 0.09325542449951171, 0.09326412963867188, 0.09304473876953125, 0.09326182556152343, 0.09368370819091797, 0.09382297515869141, 0.0954777603149414, 0.09410765075683594, 0.09428355407714843, 0.09340898895263672, 0.10031568145751953, 0.09615360260009766, 0.0939163818359375, 0.09366812896728516, 0.09371238708496094, 0.09355264282226562, 0.09368339538574219, 0.09345260620117188, 0.09343590545654297, 0.09329459381103515, 0.09333548736572266, 0.09302960205078124, 0.09302716827392578, 0.09338841247558594, 0.09343801879882813, 0.09284754943847656, 0.09302278137207032, 0.09263340759277344, 0.09346662139892578, 0.09331423950195313, 0.09246514892578125, 0.09325833892822266, 0.09312483215332032, 0.09282355499267578, 0.0929106216430664, 0.0924167709350586, 0.09297062683105468, 0.09358601379394531, 0.09347071838378906, 0.09359564971923828, 0.09378345489501953, 0.09405856323242187, 0.09323948669433593, 0.0940670394897461, 0.09381385803222657, 0.09351465606689453, 0.09293004608154297, 0.09312854766845703, 0.09679468536376953, 0.09356905364990234, 0.09338681793212891, 0.0930709457397461, 0.09325775909423828, 0.09304307556152344, 0.09322700500488282, 0.09270454406738281, 0.09313862609863281, 0.09241654205322265, 0.09578233337402343, 0.09305760192871093, 0.09354576110839843, 0.09302671813964844, 0.09307552337646484, 0.09325145721435547, 0.09295500946044923, 0.0926740493774414, 0.09278463745117188, 0.09339647674560547, 0.0933473892211914, 0.09316242980957032, 0.09517056274414062, 0.09341929626464844, 0.09391702270507812, 0.09418470764160156, 0.09379097747802734, 0.09281126403808594, 0.09296281433105469, 0.09340694427490234, 0.09325596618652343, 0.0932489242553711, 0.0926009292602539, 0.09340518188476563, 0.09307734680175782, 0.09310428619384765, 0.09279488372802734, 0.09340239715576172, 0.09322569274902344, 0.09324748992919922, 0.09271910095214844, 0.09284947204589844, 0.09306947326660156, 0.09292649841308594, 0.09327942657470703, 0.09303327941894532, 0.09321842956542968, 0.09316105651855469, 0.09359849548339844, 0.09496166229248047, 0.09413836669921875, 0.09341133117675782, 0.09341133117675782, 0.09447334289550781, 0.09333631896972656, 0.09318617248535156, 0.09292390441894531, 0.09348710632324218, 0.09282755279541016, 0.0930137939453125, 0.09280915069580079, 0.09568895721435547, 0.0922108154296875, 0.09289078521728515, 0.0925192642211914, 0.09291522979736327, 0.0929755859375, 0.09308979034423828, 0.09552025604248048, 0.09345619201660156, 0.09287248229980469, 0.09314972686767578, 0.0931036148071289, 0.0930487060546875, 0.09340415954589844, 0.09345555114746094, 0.09333638763427735, 0.09393494415283203, 0.09314166259765624, 0.09349894714355468, 0.09361369323730469, 0.0933978271484375, 0.09444454193115234, 0.09372128295898438, 0.09331270599365235, 0.09475545501708985, 0.09415679931640625, 0.09369599914550782, 0.09425315093994141, 0.09310899353027344, 0.09285222625732421, 0.09264947509765625, 0.0930298843383789, 0.09324390411376954, 0.09373081970214844, 0.093189697265625, 0.09316121673583984, 0.09304338836669922, 0.09276150512695312, 0.09289174652099609, 0.09329177856445313, 0.09289952087402344, 0.09311283111572266, 0.09534233856201171, 0.09405471801757813, 0.09388851165771485, 0.09357926177978515, 0.09392127990722657, 0.0934169921875, 0.09340156555175781, 0.09382093048095703, 0.09365039825439453, 0.09317977905273438, 0.09302082824707031, 0.09626969909667969, 0.09349724578857421, 0.09315715026855469, 0.09313581085205078, 0.09640656280517577, 0.09479267120361329, 0.09401344299316407, 0.09448006439208985, 0.09425132751464843, 0.09396009826660157, 0.09398486328125, 0.09392947387695312, 0.09427942657470703, 0.09496601867675782, 0.09395574188232422, 0.09418582153320312, 0.09425299072265625, 0.09465782165527344, 0.0949271011352539, 0.09395362854003907, 0.09395449829101563, 0.09336268615722657, 0.0952279052734375, 0.09402982330322265, 0.09391420745849609, 0.09334877014160156, 0.09487503814697265, 0.09386418914794922, 0.09360406494140625, 0.09378828430175781, 0.09497510528564453, 0.09344403076171875, 0.09293714904785157, 0.09415679931640625, 0.09450476837158203, 0.09353759765625, 0.09324384307861328, 0.09402665710449219, 0.094455810546875, 0.09439231872558594, 0.09319219207763672, 0.09333299255371094, 0.09387059020996094, 0.0944736328125, 0.09355721282958984, 0.0933315200805664, 0.0942490234375, 0.0940475845336914, 0.09339151763916016, 0.09380863952636719, 0.09801312255859375, 0.09385171508789063, 0.09328844451904297, 0.094455810546875, 0.09448348999023437, 0.09398985290527344, 0.09355574035644532, 0.09694041442871094, 0.09403453063964844, 0.0937000961303711, 0.0930508804321289, 0.09442463684082031, 0.09329641723632813, 0.09425552368164063, 0.09358316802978515, 0.0941789779663086, 0.09346867370605469, 0.09319709014892578, 0.09413120269775391, 0.09377689361572265, 0.09331001281738281, 0.09403692626953125, 0.09321180725097657, 0.09389469146728516, 0.09385862731933593, 0.09461321258544922, 0.09370626831054688, 0.09395635223388672, 0.09309798431396485, 0.09452947235107421, 0.09375341033935547, 0.09371555328369141, 0.09332339477539063, 0.09502595520019531, 0.09410150146484375, 0.09406003570556641, 0.0942003173828125, 0.09452668762207031, 0.09318070220947265, 0.09284812927246094, 0.09406668853759766, 0.09377129364013671, 0.09295625305175781, 0.09378224182128907, 0.09535350036621094, 0.09318985748291016, 0.09323548889160156, 0.09321881866455078, 0.09288838195800782, 0.09301382446289062]",tokens/s,10.658326389689304,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,MB,1092.112384,608.043008,0.0,205.520896,177.265664,s,1,8.2373134765625,8.2373134765625,0.0,8.2373134765625,8.2373134765625,8.2373134765625,8.2373134765625,[8.2373134765625],,kWh,2.356280774162466e-05,2.5919201425356697e-06,7.209450212014579e-06,3.336417809617491e-05,,MB,1329.881088,710.803456,0.0,287.309824,257.382912,s,10,0.27590617561340336,0.027590617561340337,0.00014455517763817656,0.02758148765563965,0.02776354808807373,0.027825741863250732,0.027875496883392335,"[0.02751312065124512, 0.027458784103393554, 0.027586879730224608, 0.027749727249145508, 0.02757609558105469, 0.02763590431213379, 0.027631168365478517, 0.027529279708862306, 0.0273372802734375, 0.027887935638427733]",tokens/s,9278.51648955855,kWh,8.201658365638235e-07,9.044938334331752e-08,3.155456644213707e-07,1.2261608843285116e-06,tokens/kWh,208781737.5940797,MB,1343.377408,731.774976,0.0,308.281344,257.58208,s,10,16.47860791015625,1.6478607910156249,0.006680413561662069,1.6496075439453124,1.6568090454101563,1.6573290832519532,1.6577451135253907,"[1.6515277099609376, 1.6441129150390625, 1.6509481201171874, 1.6566934814453125, 1.650267578125, 1.648947509765625, 1.65784912109375, 1.6367396240234375, 1.6407335205078124, 1.640788330078125]",tokens/s,38.23138480112222,kWh,4.909606642968518e-05,5.414988021790756e-06,1.6263384265378885e-05,7.077443871685481e-05,tokens/kWh,890151.884524895,,s,630,16.473763818740842,0.026148831458318804,0.0004964440954890817,0.026083744049072266,0.02633921012878418,0.026541895961761475,0.028079526805877695,"[0.026025760650634767, 0.026221887588500976, 0.026104736328125, 0.026198015213012696, 0.02603113555908203, 0.026102399826049803, 0.026075231552124024, 0.02902150344848633, 0.026713056564331053, 0.026328800201416015, 0.026087263107299804, 0.026086048126220705, 0.02629747200012207, 0.02607142448425293, 0.025996768951416015, 0.02604070472717285, 0.026029632568359374, 0.027521087646484376, 0.02600956726074219, 0.025995328903198243, 0.025950592041015626, 0.026084896087646484, 0.026257535934448243, 0.025997312545776367, 0.025948896408081054, 0.025962656021118163, 0.025941375732421876, 0.025925664901733397, 0.026016191482543947, 0.02596659278869629, 0.026013952255249023, 0.02609270477294922, 0.026105440139770508, 0.026018815994262694, 0.02614067268371582, 0.026871679306030273, 0.026129888534545898, 0.02600422477722168, 0.027264575958251953, 0.026806400299072265, 0.02630291175842285, 0.026131744384765624, 0.02618822479248047, 0.02603628730773926, 0.026012992858886717, 0.02592038345336914, 0.025982784271240233, 0.02613043212890625, 0.026050559997558592, 0.02611359977722168, 0.02614255905151367, 0.02642905616760254, 0.026198720932006835, 0.026047008514404297, 0.02599241638183594, 0.026083871841430663, 0.02608278465270996, 0.026108448028564452, 0.026034175872802736, 0.025980928421020507, 0.026043968200683595, 0.02637664031982422, 0.026212352752685547, 0.025800832748413084, 0.02589743995666504, 0.025823392868041993, 0.025812992095947264, 0.025949695587158202, 0.02583420753479004, 0.025849632263183594, 0.025796607971191408, 0.025878528594970703, 0.02591916847229004, 0.025915136337280275, 0.02587411117553711, 0.02589583969116211, 0.025912511825561525, 0.02585475158691406, 0.026077184677124023, 0.030115968704223634, 0.02635251235961914, 0.02590742492675781, 0.02587113571166992, 0.025855775833129882, 0.026089120864868164, 0.02596086311340332, 0.025882368087768556, 0.026034591674804687, 0.026574848175048828, 0.02602191925048828, 0.025976064682006837, 0.025901920318603517, 0.025841535568237303, 0.025874080657958983, 0.026155359268188478, 0.025956352233886718, 0.02603011131286621, 0.0260229434967041, 0.0262030086517334, 0.02603424072265625, 0.026066944122314452, 0.025987071990966795, 0.026001407623291017, 0.026124351501464842, 0.026224224090576172, 0.0259520320892334, 0.02594160079956055, 0.025952735900878907, 0.025926015853881837, 0.026054784774780272, 0.026302463531494142, 0.02608742332458496, 0.02603615951538086, 0.026105920791625978, 0.026016063690185547, 0.025992160797119142, 0.02602057647705078, 0.026249216079711913, 0.02666700744628906, 0.02656198310852051, 0.0260882568359375, 0.02616703987121582, 0.026109952926635743, 0.026119775772094726, 0.026139135360717772, 0.02599519920349121, 0.025688831329345702, 0.025974336624145507, 0.025869855880737303, 0.026000288009643553, 0.026029983520507814, 0.025962175369262694, 0.025864063262939455, 0.025881120681762695, 0.025989120483398437, 0.025998432159423827, 0.02588764762878418, 0.025953664779663085, 0.025905792236328124, 0.026169343948364256, 0.026066944122314452, 0.02614271926879883, 0.02615091133117676, 0.026048511505126954, 0.02611942481994629, 0.026022655487060547, 0.026023935317993165, 0.026076576232910157, 0.026301183700561524, 0.026095455169677734, 0.0261079044342041, 0.02613983917236328, 0.026151744842529297, 0.02612460708618164, 0.026136255264282225, 0.02619308853149414, 0.026562847137451173, 0.026137088775634764, 0.029804576873779298, 0.026148672103881835, 0.02619001579284668, 0.026093183517456056, 0.02613859176635742, 0.026486656188964845, 0.026075679779052733, 0.02626924705505371, 0.026132831573486327, 0.02618502426147461, 0.026060895919799806, 0.026145248413085936, 0.026097888946533202, 0.02612838363647461, 0.026085376739501953, 0.026346559524536132, 0.026079519271850586, 0.026063520431518553, 0.02614681625366211, 0.026214399337768556, 0.02608064079284668, 0.026234848022460937, 0.026187711715698243, 0.02619260787963867, 0.026145023345947267, 0.026199968338012695, 0.02645590400695801, 0.027025312423706056, 0.026486879348754884, 0.026302463531494142, 0.026458112716674805, 0.025741823196411134, 0.02635379219055176, 0.027359359741210936, 0.026277631759643556, 0.0260665283203125, 0.026036544799804686, 0.026179744720458985, 0.02625529670715332, 0.026557952880859374, 0.026315231323242188, 0.026353023529052735, 0.02631747245788574, 0.02629852867126465, 0.026273056030273436, 0.026430240631103517, 0.026070816040039062, 0.02677350425720215, 0.02625436782836914, 0.02625619125366211, 0.026209440231323242, 0.02617414474487305, 0.02622496032714844, 0.026144479751586912, 0.02611347198486328, 0.026130655288696288, 0.02618841552734375, 0.026143775939941407, 0.026174432754516603, 0.026161151885986327, 0.026203199386596678, 0.02614566421508789, 0.026349632263183594, 0.02605084800720215, 0.026195680618286133, 0.02627084732055664, 0.026339584350585938, 0.02600204849243164, 0.026252735137939454, 0.026419551849365234, 0.026409151077270508, 0.02627587127685547, 0.026288063049316406, 0.026122400283813477, 0.026054559707641603, 0.026144447326660155, 0.02676358413696289, 0.02690217590332031, 0.0265567684173584, 0.026447872161865234, 0.02675712013244629, 0.026392000198364258, 0.026147392272949217, 0.02614067268371582, 0.02617046356201172, 0.026296960830688478, 0.026208320617675782, 0.026239200592041014, 0.026172800064086912, 0.026425567626953125, 0.02663007926940918, 0.026141151428222657, 0.026155071258544924, 0.02629216003417969, 0.02605500793457031, 0.026181631088256836, 0.02633113670349121, 0.02621343994140625, 0.026184640884399413, 0.02625027275085449, 0.026116512298583985, 0.02619843292236328, 0.026161312103271484, 0.02612838363647461, 0.026077472686767578, 0.026148416519165038, 0.0260501766204834, 0.026112543106079102, 0.026170560836791992, 0.02621232032775879, 0.026119007110595702, 0.02610371208190918, 0.026050399780273438, 0.0260897274017334, 0.026099136352539062, 0.026308639526367188, 0.026245248794555663, 0.02645167922973633, 0.026351423263549806, 0.02635977554321289, 0.025990047454833985, 0.026101696014404298, 0.026201183319091798, 0.02614806365966797, 0.026181472778320312, 0.026163360595703126, 0.02618297576904297, 0.026116544723510743, 0.026201759338378906, 0.026050912857055665, 0.02627916717529297, 0.026121152877807616, 0.026119007110595702, 0.026084320068359375, 0.02628118324279785, 0.026149152755737304, 0.02660812759399414, 0.026081279754638673, 0.026214239120483398, 0.026243200302124025, 0.026217599868774415, 0.026143775939941407, 0.026150016784667968, 0.026119935989379884, 0.02612668800354004, 0.02626736068725586, 0.02603059196472168, 0.02614726448059082, 0.026107648849487304, 0.026147071838378905, 0.026091775894165038, 0.026207359313964843, 0.02618047904968262, 0.026234207153320314, 0.026280351638793945, 0.02654412841796875, 0.026505216598510743, 0.025775808334350586, 0.025981664657592773, 0.026077152252197266, 0.026140031814575197, 0.025941791534423827, 0.026029983520507814, 0.02612521553039551, 0.026255392074584962, 0.026202112197875976, 0.02626905632019043, 0.02615769577026367, 0.02596249580383301, 0.026052608489990234, 0.025997312545776367, 0.026059904098510743, 0.026057600021362304, 0.02609369659423828, 0.025992191314697266, 0.026065919876098635, 0.026001407623291017, 0.02666409683227539, 0.026088031768798828, 0.026245248794555663, 0.026402816772460938, 0.026177536010742186, 0.02627203178405762, 0.026258943557739257, 0.026120288848876953, 0.026086847305297853, 0.026157663345336913, 0.026085472106933592, 0.026204511642456053, 0.02612803268432617, 0.02624867248535156, 0.026153663635253906, 0.0262326717376709, 0.026390560150146486, 0.026308576583862306, 0.026101760864257813, 0.02612633514404297, 0.02628316879272461, 0.026251167297363282, 0.026177791595458983, 0.02616419219970703, 0.026124256134033203, 0.026202911376953124, 0.026139616012573242, 0.026228736877441407, 0.02613654327392578, 0.026173280715942382, 0.026220352172851562, 0.026183231353759766, 0.02622572708129883, 0.02615475273132324, 0.026277183532714844, 0.02624732780456543, 0.026270368576049804, 0.02619100761413574, 0.026298912048339843, 0.026120256423950196, 0.026257055282592773, 0.02619375991821289, 0.026241567611694334, 0.025942304611206054, 0.026217632293701172, 0.026147647857666014, 0.026064640045166017, 0.027159744262695313, 0.027579200744628905, 0.026456064224243164, 0.026283456802368165, 0.026130176544189452, 0.02628895950317383, 0.026219999313354492, 0.026312768936157225, 0.026026527404785157, 0.02600259208679199, 0.02715932846069336, 0.026128576278686522, 0.026013439178466796, 0.02599737548828125, 0.026036096572875977, 0.026255359649658205, 0.02619024085998535, 0.026095327377319337, 0.026129728317260743, 0.02603660774230957, 0.02594643211364746, 0.026029439926147462, 0.0259931526184082, 0.02607174491882324, 0.026154304504394533, 0.025998016357421876, 0.026050048828125, 0.02602035140991211, 0.026193920135498046, 0.02606710433959961, 0.026054496765136718, 0.027905855178833008, 0.03434140777587891, 0.026261152267456053, 0.026058944702148437, 0.02601103973388672, 0.02595408058166504, 0.026083616256713866, 0.025953983306884764, 0.02602684783935547, 0.02609334373474121, 0.025950496673583984, 0.02598886489868164, 0.026339168548583983, 0.026407072067260742, 0.026130495071411134, 0.026167295455932618, 0.026105791091918944, 0.02603523254394531, 0.026223583221435545, 0.026064128875732423, 0.02595916748046875, 0.025980127334594726, 0.02596329689025879, 0.02589823913574219, 0.025981695175170898, 0.025943231582641602, 0.02609552001953125, 0.025985408782958984, 0.025612287521362305, 0.02602016067504883, 0.02584364891052246, 0.025853696823120116, 0.0258287353515625, 0.025890527725219728, 0.0259019832611084, 0.025817087173461914, 0.02587388801574707, 0.02582953643798828, 0.025831520080566408, 0.0260032958984375, 0.025780672073364257, 0.025954336166381837, 0.02581705665588379, 0.025806655883789064, 0.026142112731933592, 0.025952640533447265, 0.025917184829711913, 0.02595292854309082, 0.02600044822692871, 0.02594207954406738, 0.026539167404174804, 0.026054431915283203, 0.0260382080078125, 0.025929279327392578, 0.02602003288269043, 0.026040672302246094, 0.02594540786743164, 0.025887231826782226, 0.02594620704650879, 0.025878528594970703, 0.026447999954223634, 0.025918560028076174, 0.02592438316345215, 0.026042335510253905, 0.0259072322845459, 0.025964479446411132, 0.02597603225708008, 0.026163839340209962, 0.025929952621459963, 0.02601523208618164, 0.02588252830505371, 0.02591744041442871, 0.025919456481933594, 0.025977184295654297, 0.025983072280883788, 0.026091712951660156, 0.025948160171508788, 0.026058656692504883, 0.025960416793823243, 0.026007680892944335, 0.026208255767822267, 0.026372095108032227, 0.02615910339355469, 0.025884544372558594, 0.026030431747436522, 0.025841503143310546, 0.025989055633544922, 0.025903104782104492, 0.02591948890686035, 0.026086687088012695, 0.025961183547973634, 0.02569830322265625, 0.025984031677246094, 0.02613734436035156, 0.026095935821533203, 0.026038175582885743, 0.025933216094970703, 0.02606492805480957, 0.025854400634765625, 0.02595193672180176, 0.025964160919189454, 0.02581587219238281, 0.02589286422729492, 0.025877824783325197, 0.02589286422729492, 0.026008352279663086, 0.026156864166259765, 0.026026079177856445, 0.025923391342163087, 0.02583103942871094, 0.026046815872192382, 0.02591152000427246, 0.025926944732666015, 0.025919872283935545, 0.02597635269165039, 0.02593417549133301, 0.025815391540527345, 0.02588912010192871, 0.025806623458862303, 0.025849952697753906, 0.025861312866210937, 0.028150463104248048, 0.02830134391784668, 0.026025983810424806, 0.025817087173461914, 0.025968639373779297, 0.025786367416381836, 0.025946271896362304, 0.025919456481933594, 0.02582489585876465, 0.025835775375366212, 0.026062496185302736, 0.025983552932739257, 0.026392351150512694, 0.02594927978515625, 0.02590403175354004, 0.026040319442749024, 0.02587648010253906, 0.02592758369445801, 0.02586591911315918, 0.025886943817138672, 0.025907392501831054, 0.02601487922668457, 0.025842464447021485, 0.025929792404174805, 0.02590924835205078, 0.026003040313720704, 0.026087583541870116, 0.026196063995361327, 0.026252832412719727, 0.026259967803955078, 0.02624028778076172, 0.025955167770385743, 0.0260994873046875, 0.028841440200805663, 0.02597875213623047, 0.02591200065612793, 0.02581657600402832, 0.02585433578491211, 0.02610367965698242, 0.026183935165405275, 0.02596249580383301, 0.025882623672485353, 0.02579199981689453, 0.02615872001647949, 0.025956672668457033, 0.026286399841308594, 0.02592793655395508, 0.025996768951416015, 0.026012447357177733, 0.026014656066894532, 0.02599603271484375, 0.026132543563842772, 0.025904512405395506, 0.026069696426391602, 0.02588431930541992, 0.025999488830566405, 0.02586182403564453, 0.02623945617675781, 0.02598297691345215, 0.026001407623291017, 0.025884767532348633, 0.026038175582885743, 0.025845535278320314, 0.025855487823486328, 0.025821792602539063, 0.025933311462402343, 0.025885311126708985, 0.026069215774536133, 0.026013471603393554, 0.02593903923034668, 0.025992000579833984, 0.025929824829101562, 0.02590224075317383, 0.025876735687255858, 0.02597702407836914, 0.025882047653198244, 0.025864864349365236, 0.026302783966064454, 0.02584377670288086, 0.026001312255859374, 0.026248767852783204, 0.02619990348815918, 0.025976831436157227, 0.02619475173950195, 0.026076992034912108, 0.026021951675415038, 0.025903039932250977, 0.02593401527404785, 0.02584761619567871, 0.02603343963623047, 0.026471136093139648, 0.02612838363647461, 0.025923391342163087, 0.025906719207763673, 0.025909343719482423, 0.025930335998535156]",tokens/s,38.24262669611063,, 8bit-bnb-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,,MB,1885.319168,1057.947648,0.0,662.700032,622.833664,s,1,9.176845703125,9.176845703125,0.0,9.176845703125,9.176845703125,9.176845703125,9.176845703125,[9.176845703125],,kWh,5.983104099579274e-05,6.592579648255514e-06,2.243474016999647e-05,8.885836081404472e-05,,MB,1932.9024,1181.679616,0.0,765.46048,735.57504,s,10,0.5673114814758301,0.05673114814758301,0.0006719552344458886,0.056575199127197266,0.05756223526000977,0.057724654388427735,0.05785458969116211,"[0.05568246459960938, 0.05743948745727539, 0.05628220748901367, 0.0578870735168457, 0.056626686096191405, 0.0559666862487793, 0.056914398193359375, 0.056462623596191405, 0.05652371215820313, 0.05752614212036133]",tokens/s,4512.512232857158,kWh,1.6440276262641052e-06,1.8130722293314727e-07,7.553095930112511e-07,2.5806444422085035e-06,tokens/kWh,99200027.64151283,MB,1937.088512,1194.262528,0.0,778.043392,751.3984,s,10,35.474713867187496,3.54747138671875,0.007734135746039402,3.5455054931640624,3.55517255859375,3.5604150146484375,3.5646089794921876,"[3.541433837890625, 3.544659912109375, 3.554007568359375, 3.53791064453125, 3.565657470703125, 3.54635107421875, 3.541282470703125, 3.5521962890625, 3.54877880859375, 3.542435791015625]",tokens/s,17.75912844170172,kWh,0.00010247511134373794,1.130311947542708e-05,4.0495640074390033e-05,0.00015427387089355504,tokens/kWh,408364.68051980337,,s,630,35.46810222244262,0.05629857495625813,0.0007064865954460904,0.0562174072265625,0.05679550514221192,0.05721389503479004,0.05880051448822022,"[0.05526563262939453, 0.05572528076171875, 0.05762736129760742, 0.056774879455566404, 0.056012641906738284, 0.05588940811157227, 0.05678540802001953, 0.056677440643310546, 0.05599878311157227, 0.056285152435302736, 0.05600102233886719, 0.05904579162597656, 0.05614412689208984, 0.057524223327636716, 0.057877727508544925, 0.0562861442565918, 0.05600649642944336, 0.055981632232666015, 0.05604191970825195, 0.05592268753051758, 0.05595340728759766, 0.05602479934692383, 0.055433662414550784, 0.055285633087158205, 0.05556835174560547, 0.056395774841308595, 0.05536547088623047, 0.05553308868408203, 0.055761505126953125, 0.05616249465942383, 0.05604131317138672, 0.05590156936645508, 0.05593363189697265, 0.05576512145996094, 0.055772319793701175, 0.055417022705078124, 0.05543993759155273, 0.05603263854980469, 0.05643686294555664, 0.05625692749023437, 0.055820289611816405, 0.055640064239501956, 0.05687686538696289, 0.05662464141845703, 0.055684799194335936, 0.0555445442199707, 0.055936897277832034, 0.056560222625732424, 0.05610886383056641, 0.056264129638671875, 0.055823104858398434, 0.05585036849975586, 0.05596614456176758, 0.05609062576293945, 0.05607628631591797, 0.05652684783935547, 0.05629536056518555, 0.05670691299438477, 0.05638780975341797, 0.05668044662475586, 0.05739708709716797, 0.056750431060791015, 0.05677587127685547, 0.05590249633789063, 0.05670064163208008, 0.05655094528198242, 0.0565968017578125, 0.05629465484619141, 0.0572545280456543, 0.056054080963134766, 0.056063201904296874, 0.05610134506225586, 0.056037216186523436, 0.05608607864379883, 0.060066688537597654, 0.05804032135009766, 0.056575679779052736, 0.056258846282958984, 0.055932960510253905, 0.056559616088867185, 0.05605971145629883, 0.0559617919921875, 0.05638265609741211, 0.056288063049316404, 0.05572544097900391, 0.055569023132324216, 0.055569984436035155, 0.05528639984130859, 0.05545964813232422, 0.05568921661376953, 0.05630752182006836, 0.05616966247558594, 0.05626544189453125, 0.05573007965087891, 0.056004543304443356, 0.05686489486694336, 0.05598998260498047, 0.05576870346069336, 0.05581727981567383, 0.05618678283691406, 0.05610691070556641, 0.05641020965576172, 0.05595091247558594, 0.05601443099975586, 0.056099681854248046, 0.056094688415527345, 0.056428577423095705, 0.05631292724609375, 0.05634121704101563, 0.05629990386962891, 0.056749855041503906, 0.056369182586669925, 0.056411903381347654, 0.057159934997558594, 0.056155807495117185, 0.05706159973144531, 0.056419681549072266, 0.05627328109741211, 0.056244640350341796, 0.05615734481811523, 0.055769214630126955, 0.05592160034179688, 0.05559888076782227, 0.05556803131103515, 0.05586956787109375, 0.056053600311279296, 0.05600531387329102, 0.05574431991577149, 0.055769088745117185, 0.05882166290283203, 0.056545761108398436, 0.056116767883300785, 0.05613881683349609, 0.05610432052612305, 0.05621334457397461, 0.056148670196533204, 0.05633638381958008, 0.05575299072265625, 0.055780223846435543, 0.055712608337402346, 0.05591839981079102, 0.05614400100708008, 0.056256065368652346, 0.05762911987304688, 0.057290912628173825, 0.05648169708251953, 0.05640758514404297, 0.05631206512451172, 0.056793312072753906, 0.05623756790161133, 0.05622809600830078, 0.056219806671142576, 0.05657379150390625, 0.05657596969604492, 0.05746051025390625, 0.05660675048828125, 0.05679766464233398, 0.05663948822021484, 0.056446975708007815, 0.05671446228027344, 0.056539936065673826, 0.05677081680297852, 0.05677155303955078, 0.056580894470214846, 0.056825599670410155, 0.057229217529296876, 0.05709865570068359, 0.05769209671020508, 0.05600604629516601, 0.05650697708129883, 0.05630361557006836, 0.0571146240234375, 0.05543945693969726, 0.05572099304199219, 0.0559788818359375, 0.05596521759033203, 0.055900768280029295, 0.056446369171142576, 0.05781142425537109, 0.05669907379150391, 0.05601670455932617, 0.0556844482421875, 0.05567299270629883, 0.055502689361572266, 0.056119968414306644, 0.05571964645385742, 0.05602102279663086, 0.05617625427246094, 0.05611119842529297, 0.0563485107421875, 0.055932960510253905, 0.055539840698242186, 0.05535878372192383, 0.05522211074829102, 0.0558616943359375, 0.056254814147949216, 0.0560722541809082, 0.05567273712158203, 0.055814239501953126, 0.05584076690673828, 0.055357440948486325, 0.05652070236206055, 0.05643264007568359, 0.05650636672973633, 0.05874873733520508, 0.056953025817871095, 0.05687705612182617, 0.056209247589111326, 0.05631129455566406, 0.05652492904663086, 0.05611775970458984, 0.05657193756103516, 0.05633865737915039, 0.05629884719848633, 0.05625062561035156, 0.056323486328125, 0.05598287963867187, 0.056257919311523436, 0.055758560180664066, 0.05594345474243164, 0.05583536148071289, 0.05569910430908203, 0.056172576904296875, 0.055862560272216796, 0.056470462799072266, 0.056027137756347656, 0.05603033447265625, 0.056021888732910155, 0.05616857528686523, 0.05575811386108399, 0.05590095901489258, 0.056156158447265625, 0.05593683242797851, 0.055777278900146485, 0.05621500778198242, 0.05631817626953125, 0.05606361770629883, 0.05656643295288086, 0.05600259017944336, 0.05608857727050781, 0.056051712036132816, 0.05591603088378906, 0.05632275390625, 0.05560710525512695, 0.05568022537231445, 0.05567567825317383, 0.05541616058349609, 0.055892478942871096, 0.05602278518676758, 0.05629510498046875, 0.05843628692626953, 0.056631488800048826, 0.05613116836547852, 0.05656326293945312, 0.05694550323486328, 0.05689984130859375, 0.0567825927734375, 0.05654732894897461, 0.05663875198364258, 0.0566952018737793, 0.056377281188964845, 0.056420703887939454, 0.05641836929321289, 0.056534591674804686, 0.056103199005126954, 0.05613087844848633, 0.05640047836303711, 0.05628863906860351, 0.05611536026000977, 0.056594688415527346, 0.05580563354492187, 0.055970558166503905, 0.05670297622680664, 0.05624745559692383, 0.06635810852050782, 0.05611110305786133, 0.05620956802368164, 0.05604502487182617, 0.0560316162109375, 0.056061183929443356, 0.057243457794189455, 0.05599942398071289, 0.056051712036132816, 0.05630265426635742, 0.055796417236328125, 0.055549312591552734, 0.05530713653564453, 0.05586329650878906, 0.05531238555908203, 0.05679017639160156, 0.05628195190429688, 0.056387008666992186, 0.05617523193359375, 0.05634799957275391, 0.05650908660888672, 0.056772415161132815, 0.05601705551147461, 0.05602659225463867, 0.056449535369873044, 0.056545280456542966, 0.05661491012573242, 0.056635265350341794, 0.05638361740112305, 0.056506591796875, 0.056403743743896485, 0.06072662353515625, 0.05719516754150391, 0.05693993759155273, 0.056662113189697265, 0.0569126091003418, 0.0564890251159668, 0.056703071594238284, 0.056652000427246094, 0.05623235321044922, 0.05613302230834961, 0.055217952728271485, 0.05583647918701172, 0.05610902404785156, 0.05637129592895508, 0.056043487548828125, 0.05579814529418945, 0.05647564697265625, 0.05532380676269531, 0.055479137420654294, 0.05581414413452149, 0.055979839324951174, 0.05617273712158203, 0.05623311996459961, 0.055892223358154296, 0.05568368148803711, 0.05520793533325195, 0.05528163146972656, 0.05536550521850586, 0.05828623962402344, 0.05612518310546875, 0.056282848358154294, 0.056351264953613284, 0.05620121765136719, 0.055737632751464844, 0.05607916641235351, 0.05599555206298828, 0.05741027069091797, 0.05667638397216797, 0.05648998260498047, 0.05659830474853516, 0.05627734375, 0.05633001708984375, 0.056069889068603516, 0.056420574188232424, 0.056242496490478515, 0.05664303970336914, 0.05635712051391602, 0.05870191955566406, 0.05695398330688477, 0.056539104461669924, 0.05663199996948242, 0.05641836929321289, 0.056659423828125, 0.05634076690673828, 0.056596446990966796, 0.057444801330566404, 0.056598464965820314, 0.056465377807617186, 0.05698569488525391, 0.0580824966430664, 0.05647411346435547, 0.056043617248535155, 0.05600185775756836, 0.05595568084716797, 0.05595808029174805, 0.05622822570800781, 0.05611289596557617, 0.055943168640136716, 0.055965023040771486, 0.05553014373779297, 0.05598751831054687, 0.05618153762817383, 0.05605292892456055, 0.05582166290283203, 0.05621033477783203, 0.056033184051513675, 0.056182880401611325, 0.055760894775390625, 0.05580361557006836, 0.05596092987060547, 0.05585359954833984, 0.05610947036743164, 0.05590995025634766, 0.056062400817871096, 0.05590774536132812, 0.05589052963256836, 0.055919776916503905, 0.055962753295898435, 0.055854816436767575, 0.056104991912841795, 0.056885215759277345, 0.056569854736328126, 0.05637907028198242, 0.056677921295166016, 0.05610063934326172, 0.05605478286743164, 0.0560225601196289, 0.056062049865722656, 0.05617907333374023, 0.056233985900878906, 0.05634038543701172, 0.05624358367919922, 0.056226558685302734, 0.05628067016601562, 0.0567704963684082, 0.05606787109375, 0.05600937652587891, 0.05596979141235352, 0.05669887924194336, 0.056301376342773435, 0.05619036865234375, 0.056510814666748045, 0.05575929641723633, 0.05628124618530273, 0.05599948883056641, 0.055974494934082034, 0.05615846252441406, 0.056309471130371096, 0.05625263977050781, 0.05711635208129883, 0.05728704071044922, 0.05608441543579101, 0.05692627334594726, 0.0555068473815918, 0.055550048828125, 0.05599814224243164, 0.05681388854980469, 0.05623993682861328, 0.05635910415649414, 0.056240127563476565, 0.055860897064208985, 0.05718460845947266, 0.05605574417114258, 0.05609392166137695, 0.056162208557128904, 0.05623855972290039, 0.05595286560058594, 0.056056224822998046, 0.05596377563476562, 0.05658012771606445, 0.056065601348876955, 0.05616419219970703, 0.05628780746459961, 0.056869056701660155, 0.05651846313476563, 0.05607628631591797, 0.05643199920654297, 0.05639641571044922, 0.05657190322875977, 0.056448478698730466, 0.05640758514404297, 0.05651968002319336, 0.05646764755249024, 0.05654230499267578, 0.05617532730102539, 0.056354686737060546, 0.056616127014160154, 0.05606063842773437, 0.05664521789550781, 0.05620374298095703, 0.05618700790405273, 0.0569727668762207, 0.05640975952148437, 0.05633331298828125, 0.05646121597290039, 0.05624812698364258, 0.05620915222167969, 0.05618719863891602, 0.056223743438720705, 0.056403358459472655, 0.0563721923828125, 0.05627897644042969, 0.05648774337768555, 0.05679091262817383, 0.05679526519775391, 0.059418014526367184, 0.0568276481628418, 0.056462047576904296, 0.056289409637451174, 0.05650022506713867, 0.05613363265991211, 0.055811393737792966, 0.0556201286315918, 0.05585321426391601, 0.05601887893676758, 0.05610707092285156, 0.0561868782043457, 0.056281089782714844, 0.05633638381958008, 0.05613158416748047, 0.05608652877807617, 0.05633219146728516, 0.0563590087890625, 0.056240127563476565, 0.056306720733642575, 0.056228832244873045, 0.056338432312011716, 0.056586238861083986, 0.05632624053955078, 0.055942337036132814, 0.05624803161621094, 0.056395809173583986, 0.05559091186523438, 0.055695358276367186, 0.05589347076416016, 0.05659292984008789, 0.05610671997070313, 0.055427425384521486, 0.055535457611083985, 0.055967487335205075, 0.05588620758056641, 0.057097599029541014, 0.056156768798828124, 0.056452766418457034, 0.056392318725585935, 0.056245952606201174, 0.05647577667236328, 0.05605337524414063, 0.05619535827636719, 0.055944480895996095, 0.055998497009277344, 0.05608428955078125, 0.05846499252319336, 0.05639759826660156, 0.05644905471801758, 0.05599795150756836, 0.056750431060791015, 0.05620172882080078, 0.05630316925048828, 0.05596819305419922, 0.05629747009277344, 0.05829033660888672, 0.056446815490722654, 0.05698675155639649, 0.05739334487915039, 0.05594591903686524, 0.056207359313964846, 0.05629494476318359, 0.05615811157226563, 0.05632883071899414, 0.05637273788452148, 0.056301151275634766, 0.057432926177978516, 0.056346622467041016, 0.05689260864257813, 0.056420833587646484, 0.05638588714599609, 0.05633433532714844, 0.0562852783203125, 0.056354721069335936, 0.05612972640991211, 0.05621126556396484, 0.055799137115478514, 0.055543838500976564, 0.05584751892089844, 0.0558263053894043, 0.056475807189941406, 0.05626675033569336, 0.056282176971435546, 0.056140735626220704, 0.056831390380859374, 0.05637139129638672, 0.05521587371826172, 0.05617484664916992, 0.056220897674560545, 0.05605257415771484, 0.056154048919677735, 0.05607424163818359, 0.05607769775390625, 0.055604991912841795, 0.055433406829833984, 0.05541328048706055, 0.05587907028198242, 0.05595827102661133, 0.056016895294189455, 0.056371070861816405, 0.05610079956054687, 0.0562355842590332, 0.05640569686889649, 0.05683091354370117, 0.05616131210327149, 0.05647062301635742, 0.056317054748535156, 0.056267295837402344, 0.056412384033203124, 0.05632220840454102, 0.0563218879699707, 0.05624627304077148, 0.05636310577392578, 0.056473182678222655, 0.0562465934753418, 0.056578144073486325, 0.05634652709960938, 0.056784896850585936, 0.05904703903198242, 0.056922016143798826, 0.0565689582824707, 0.05637923049926758, 0.05630361557006836, 0.05633446502685547, 0.05602249526977539, 0.05600710296630859, 0.05605782318115234, 0.05599132919311523, 0.05635171127319336, 0.05593241500854492, 0.05601897430419922, 0.05593132781982422, 0.056070369720458986, 0.056212894439697264, 0.056209823608398435, 0.05615756988525391, 0.05631203079223633, 0.05631340789794922, 0.05655420684814453, 0.056068225860595705, 0.05563699340820313, 0.05573324966430664, 0.056118305206298826, 0.05621427154541016, 0.05629359817504883, 0.0562213134765625, 0.05631606292724609, 0.05589436721801758, 0.05606387329101563]",tokens/s,17.762438938764653,,