config.name,config.backend.name,config.backend.version,config.backend._target_,config.backend.task,config.backend.library,config.backend.model_type,config.backend.model,config.backend.processor,config.backend.device,config.backend.device_ids,config.backend.seed,config.backend.inter_op_num_threads,config.backend.intra_op_num_threads,config.backend.model_kwargs.trust_remote_code,config.backend.no_weights,config.backend.device_map,config.backend.torch_dtype,config.backend.eval_mode,config.backend.to_bettertransformer,config.backend.low_cpu_mem_usage,config.backend.attn_implementation,config.backend.cache_implementation,config.backend.autocast_enabled,config.backend.autocast_dtype,config.backend.torch_compile,config.backend.torch_compile_target,config.backend.quantization_scheme,config.backend.quantization_config.llm_int8_threshold,config.backend.quantization_config.load_in_8bit,config.backend.deepspeed_inference,config.backend.peft_type,config.scenario.name,config.scenario._target_,config.scenario.iterations,config.scenario.duration,config.scenario.warmup_runs,config.scenario.input_shapes.batch_size,config.scenario.input_shapes.num_choices,config.scenario.input_shapes.sequence_length,config.scenario.new_tokens,config.scenario.memory,config.scenario.latency,config.scenario.energy,config.scenario.generate_kwargs.max_new_tokens,config.scenario.generate_kwargs.min_new_tokens,config.launcher.name,config.launcher._target_,config.launcher.device_isolation,config.launcher.device_isolation_action,config.launcher.numactl,config.launcher.start_method,config.environment.cpu,config.environment.cpu_count,config.environment.cpu_ram_mb,config.environment.system,config.environment.machine,config.environment.platform,config.environment.processor,config.environment.python_version,config.environment.gpu,config.environment.gpu_count,config.environment.gpu_vram_mb,config.environment.optimum_benchmark_version,config.environment.optimum_benchmark_commit,config.environment.transformers_version,config.environment.transformers_commit,config.environment.accelerate_version,config.environment.accelerate_commit,config.environment.diffusers_version,config.environment.diffusers_commit,config.environment.optimum_version,config.environment.optimum_commit,config.environment.timm_version,config.environment.timm_commit,config.environment.peft_version,config.environment.peft_commit,report.traceback,report.load.memory.unit,report.load.memory.max_ram,report.load.memory.max_global_vram,report.load.memory.max_process_vram,report.load.memory.max_reserved,report.load.memory.max_allocated,report.load.latency.unit,report.load.latency.count,report.load.latency.total,report.load.latency.mean,report.load.latency.stdev,report.load.latency.p50,report.load.latency.p90,report.load.latency.p95,report.load.latency.p99,report.load.latency.values,report.load.throughput,report.load.energy.unit,report.load.energy.cpu,report.load.energy.ram,report.load.energy.gpu,report.load.energy.total,report.load.efficiency,report.prefill.memory.unit,report.prefill.memory.max_ram,report.prefill.memory.max_global_vram,report.prefill.memory.max_process_vram,report.prefill.memory.max_reserved,report.prefill.memory.max_allocated,report.prefill.latency.unit,report.prefill.latency.count,report.prefill.latency.total,report.prefill.latency.mean,report.prefill.latency.stdev,report.prefill.latency.p50,report.prefill.latency.p90,report.prefill.latency.p95,report.prefill.latency.p99,report.prefill.latency.values,report.prefill.throughput.unit,report.prefill.throughput.value,report.prefill.energy.unit,report.prefill.energy.cpu,report.prefill.energy.ram,report.prefill.energy.gpu,report.prefill.energy.total,report.prefill.efficiency.unit,report.prefill.efficiency.value,report.decode.memory.unit,report.decode.memory.max_ram,report.decode.memory.max_global_vram,report.decode.memory.max_process_vram,report.decode.memory.max_reserved,report.decode.memory.max_allocated,report.decode.latency.unit,report.decode.latency.count,report.decode.latency.total,report.decode.latency.mean,report.decode.latency.stdev,report.decode.latency.p50,report.decode.latency.p90,report.decode.latency.p95,report.decode.latency.p99,report.decode.latency.values,report.decode.throughput.unit,report.decode.throughput.value,report.decode.energy.unit,report.decode.energy.cpu,report.decode.energy.ram,report.decode.energy.gpu,report.decode.energy.total,report.decode.efficiency.unit,report.decode.efficiency.value,report.per_token.memory,report.per_token.latency.unit,report.per_token.latency.count,report.per_token.latency.total,report.per_token.latency.mean,report.per_token.latency.stdev,report.per_token.latency.p50,report.per_token.latency.p90,report.per_token.latency.p95,report.per_token.latency.p99,report.per_token.latency.values,report.per_token.throughput.unit,report.per_token.throughput.value,report.per_token.energy,report.per_token.efficiency,config.backend.quantization_config.load_in_4bit 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 60, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1054, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 856, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 596, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 426, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1013, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 839, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 199, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 615, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 160, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 282, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,14672.269312,7846.428672,0.0,7451.181056,7445.507072,s,1,32.763345703125,32.763345703125,0.0,32.763345703125,32.763345703125,32.763345703125,32.763345703125,[32.763345703125],,kWh,0.0007489382572000029,8.260589899749046e-05,0.0002828680040719983,0.0011144121602694916,,MB,1351.00416,7997.423616,0.0,7581.20448,7570.843648,s,10,1.2404065856933593,0.12404065856933592,0.00030640996927625664,0.12393275070190429,0.12430681533813477,0.12457982215881347,0.12479822761535644,"[0.12392790222167968, 0.12415155029296875, 0.12395753479003906, 0.12372345733642579, 0.1239375991821289, 0.12389715576171875, 0.12390732574462891, 0.12485282897949218, 0.12424614715576172, 0.12380508422851562]",tokens/s,2063.8394132428907,kWh,3.6786270893749892e-06,4.056642756230992e-07,2.4400644520500062e-06,6.524355817048094e-06,tokens/kWh,39237590.22018294,MB,1371.017216,8010.006528,0.0,7593.787392,7514.46784,s,10,73.00622705078125,7.3006227050781245,0.016843565972124462,7.300080810546875,7.32080166015625,7.3236291015625,7.3258910546875,"[7.310501953125, 7.32017333984375, 7.32645654296875, 7.31858203125, 7.2788623046875, 7.3018740234375, 7.29828759765625, 7.2871904296875, 7.2786982421875, 7.2856005859375]",tokens/s,8.629400880582258,kWh,0.0002144734256918747,2.3657444612008882e-05,0.00010982166424615,0.00034795253455003364,tokens/kWh,181059.1783200273,,s,630,73.00248457336426,0.11587695964026072,0.00110443629695133,0.11570410919189453,0.11685232391357421,0.11739402809143067,0.11972997146606446,"[0.11452957153320313, 0.11483833312988281, 0.11656124877929687, 0.11619590759277344, 0.11579801940917969, 0.11623744201660156, 0.11662425231933594, 0.11568879699707031, 0.11590313720703126, 0.11606221008300781, 0.11550310516357422, 0.11642403411865235, 0.11569538879394531, 0.11946640014648438, 0.11668527984619141, 0.11578368377685547, 0.1155125732421875, 0.11620771026611328, 0.11576182556152344, 0.11564543914794922, 0.11673193359375, 0.11538022613525391, 0.11539759826660156, 0.11620297241210938, 0.11548316955566407, 0.11542034912109375, 0.11580604553222656, 0.11577632141113281, 0.11681517028808594, 0.11605487823486328, 0.1160478744506836, 0.11687036895751954, 0.11608246612548828, 0.11632128143310547, 0.11547443389892578, 0.11624038696289063, 0.11539826965332031, 0.11659506988525391, 0.11608380889892578, 0.11556515502929687, 0.11602333068847656, 0.11582825469970703, 0.1155917739868164, 0.11728233337402344, 0.11528256225585938, 0.11529148864746094, 0.11691677093505859, 0.11581862640380859, 0.11657981109619141, 0.11656031799316406, 0.11620156860351563, 0.11576521301269531, 0.11589430236816406, 0.11626697540283203, 0.11675651550292969, 0.11668275451660157, 0.11625676727294922, 0.11612569427490234, 0.11578691101074219, 0.11580038452148438, 0.11547293090820313, 0.11570175933837891, 0.11536895751953125, 0.11687737274169922, 0.11508806610107422, 0.11549465942382812, 0.11538022613525391, 0.11739981079101562, 0.11721932983398438, 0.11541814422607422, 0.11634505462646484, 0.11545062255859374, 0.11627254486083985, 0.11633724975585938, 0.11598438262939453, 0.11570508575439453, 0.11619609832763672, 0.11603753662109376, 0.11586975860595704, 0.11719388580322265, 0.11563097381591797, 0.11904204559326172, 0.11523043060302735, 0.1154985580444336, 0.1155939483642578, 0.11538019561767578, 0.11484162902832032, 0.1147146224975586, 0.11590198516845703, 0.11548925018310546, 0.11686707305908203, 0.11655577850341797, 0.11528806304931641, 0.11583001708984375, 0.1160947494506836, 0.11587200164794922, 0.11686780548095703, 0.11605197143554688, 0.11841741180419922, 0.11738861083984375, 0.11623465728759766, 0.11661891174316406, 0.11627372741699218, 0.11578374481201172, 0.11564236450195313, 0.11643084716796875, 0.11521228790283203, 0.11620556640625, 0.11628134155273437, 0.1159331817626953, 0.11620352172851563, 0.11607244873046875, 0.11576729583740235, 0.11539250946044922, 0.1168506851196289, 0.11611039733886719, 0.11683936309814454, 0.11711270141601562, 0.11536192321777344, 0.11720294189453125, 0.11638169860839843, 0.1161871337890625, 0.11709645080566407, 0.11624447631835938, 0.11681177520751954, 0.116748291015625, 0.11527318572998047, 0.11543401336669921, 0.11540995025634766, 0.11503043365478516, 0.11558560180664063, 0.11524079895019532, 0.11697577667236328, 0.11695254516601562, 0.11592963409423829, 0.11561779022216796, 0.11573407745361328, 0.11544416046142578, 0.11647369384765625, 0.1158043212890625, 0.11612911987304687, 0.11678543853759765, 0.11621932983398438, 0.11657305908203125, 0.11668262481689454, 0.11643309020996094, 0.11674009704589844, 0.11842912292480469, 0.11768070220947266, 0.11574877166748047, 0.11583853149414063, 0.11601692962646484, 0.11547519683837891, 0.11855046081542969, 0.11601926422119141, 0.11558707427978515, 0.11647984313964843, 0.11621392059326172, 0.11643907165527344, 0.11696028900146484, 0.11560374450683594, 0.11595843505859375, 0.11663359832763671, 0.11612569427490234, 0.11711443328857422, 0.11586342620849609, 0.12008096313476563, 0.117136962890625, 0.11636573028564454, 0.11649846649169922, 0.11624652862548829, 0.11608882904052735, 0.115525634765625, 0.11608198547363281, 0.1155038070678711, 0.11671961975097657, 0.11607202911376953, 0.11635689544677734, 0.11618303680419922, 0.11603321838378906, 0.11581321716308594, 0.11521647644042969, 0.11638486480712891, 0.11721820831298828, 0.11633574676513672, 0.11634108734130859, 0.11624880218505859, 0.11627142333984375, 0.11613798522949219, 0.11616416168212891, 0.11554962921142578, 0.11656089782714844, 0.11655958557128906, 0.11545629119873047, 0.11870003509521485, 0.11551948547363282, 0.11541241455078124, 0.11497325134277343, 0.1163076171875, 0.11509590148925782, 0.11628463745117187, 0.11531753540039062, 0.11491680145263672, 0.11540316772460937, 0.11536124420166016, 0.11543385314941407, 0.1155423355102539, 0.11577932739257812, 0.1160660171508789, 0.11579996490478515, 0.11607920074462891, 0.11622841644287109, 0.11564972686767579, 0.1157801284790039, 0.11557273864746094, 0.11568476867675781, 0.11533168029785157, 0.1160970230102539, 0.11662739562988281, 0.11583209228515624, 0.11573725128173828, 0.11552985382080078, 0.11511398315429687, 0.11516108703613281, 0.11540275573730469, 0.11520614624023437, 0.11580006408691407, 0.11532083129882813, 0.11495423889160156, 0.11555225372314454, 0.11504169464111329, 0.11504227447509766, 0.11609970855712891, 0.11823030090332032, 0.11625468444824219, 0.1170862045288086, 0.11584998321533203, 0.11606221008300781, 0.12886015319824218, 0.11599180603027344, 0.11555916595458984, 0.11538960266113281, 0.11507183837890625, 0.11600873565673828, 0.11629180908203125, 0.1157918701171875, 0.11571405029296875, 0.11593059539794921, 0.11589647674560546, 0.116271484375, 0.11578892517089844, 0.12510502624511718, 0.11464498901367187, 0.11496979522705078, 0.11554835510253907, 0.11577571105957031, 0.11553833770751953, 0.11566079711914062, 0.11619328308105469, 0.11679948425292969, 0.11659056091308594, 0.11601923370361328, 0.11529420471191407, 0.11530400085449219, 0.11545849609375, 0.11508274841308594, 0.11535411071777343, 0.11777606201171875, 0.11600313568115235, 0.11548652648925781, 0.11446495819091797, 0.11589836883544923, 0.11531999969482422, 0.1149714584350586, 0.11472473907470702, 0.1153148193359375, 0.11522787475585937, 0.1162402572631836, 0.11573289489746094, 0.11688396453857422, 0.11598783874511719, 0.11566758728027343, 0.1160060806274414, 0.11558380889892578, 0.11530976104736328, 0.11490982055664062, 0.11540908813476562, 0.11472383880615235, 0.11536630249023437, 0.1152518081665039, 0.11517513275146485, 0.11479273223876953, 0.11486729431152344, 0.11498384094238281, 0.11565007781982421, 0.11554045104980469, 0.11450748443603516, 0.11555458831787109, 0.11536383819580077, 0.11473101043701171, 0.11535529327392578, 0.11622434997558594, 0.1161124496459961, 0.11739846038818359, 0.1167831039428711, 0.11582054138183594, 0.1159188461303711, 0.1151119384765625, 0.11510578918457032, 0.11470579528808594, 0.11499378967285157, 0.115525634765625, 0.11549462127685547, 0.11498086547851563, 0.11529379272460938, 0.11411670684814453, 0.11467472076416016, 0.11737187194824218, 0.1153611831665039, 0.11579596710205078, 0.11557743835449219, 0.11555379486083985, 0.12084889221191407, 0.11642259216308594, 0.11577667236328125, 0.11585775756835938, 0.11622252655029297, 0.115736572265625, 0.11689299011230468, 0.11626566314697266, 0.11529734039306641, 0.11509446716308594, 0.11529216003417969, 0.11589753723144532, 0.11549983978271484, 0.11558911895751953, 0.11799552154541015, 0.11575462341308594, 0.11486991882324218, 0.11506723022460938, 0.11505427551269531, 0.11476573181152344, 0.11488092803955079, 0.115089599609375, 0.11552086639404296, 0.11547325134277343, 0.1158818588256836, 0.1151878433227539, 0.11618029022216797, 0.11619193267822266, 0.11604563140869141, 0.1158852767944336, 0.11848802947998047, 0.11654659271240235, 0.11593212890625, 0.11509760284423828, 0.11547853088378907, 0.11558297729492187, 0.1151072006225586, 0.11537471771240235, 0.11601110076904297, 0.11583888244628907, 0.11573043060302735, 0.11558204650878906, 0.1155367660522461, 0.11564220428466797, 0.11663318634033203, 0.11507977294921876, 0.11521024322509765, 0.11493775939941406, 0.11575033569335938, 0.11785282897949219, 0.1165660171508789, 0.11696537780761719, 0.11634073638916016, 0.11640831756591796, 0.11642675018310547, 0.11637964630126953, 0.11514243316650391, 0.11537612915039062, 0.11561583709716797, 0.11594560241699219, 0.11534646606445312, 0.11527164459228516, 0.11720953369140626, 0.11546422576904297, 0.115392578125, 0.11623881530761719, 0.11964415740966797, 0.11600895690917969, 0.1157754898071289, 0.11559241485595703, 0.11547318267822265, 0.11555840301513672, 0.1156193618774414, 0.1159110107421875, 0.11647193908691406, 0.11600281524658203, 0.11582182312011718, 0.11624479675292969, 0.11561619567871094, 0.11538329315185547, 0.1190983657836914, 0.11521842956542969, 0.11550105285644531, 0.11493312072753906, 0.11514534759521484, 0.11546419525146484, 0.11618035125732422, 0.11477046203613281, 0.11458342742919922, 0.11455276489257812, 0.11488655853271484, 0.11474777221679687, 0.1149111328125, 0.11619868469238281, 0.1154629135131836, 0.11552703857421875, 0.11597395324707031, 0.11639078521728516, 0.11929097747802735, 0.12006486511230469, 0.11651225280761719, 0.11616108703613282, 0.11594547271728516, 0.11477401733398437, 0.11515443420410157, 0.11530496215820313, 0.11524505615234375, 0.1146081314086914, 0.1152146224975586, 0.11593494415283204, 0.11546141052246094, 0.11540348815917968, 0.1153986587524414, 0.11592499542236329, 0.11523430633544922, 0.11510012817382813, 0.11743030548095704, 0.11578163146972656, 0.11629519653320312, 0.11486300659179688, 0.11534726715087891, 0.11598799896240235, 0.11627798461914063, 0.11551542663574219, 0.11631206512451171, 0.1156648941040039, 0.11658035278320313, 0.11745442962646484, 0.11464054107666016, 0.11527449798583984, 0.11492147064208984, 0.11475302124023437, 0.11493836975097656, 0.11556790161132813, 0.11580486297607422, 0.11555433654785156, 0.11554716491699218, 0.11587068939208985, 0.11560755157470703, 0.11505049896240234, 0.11480025482177734, 0.11490243530273438, 0.11586653137207031, 0.11570553588867187, 0.11593891143798828, 0.11583977508544922, 0.11565650939941406, 0.11546588897705078, 0.11580620574951171, 0.11529475402832032, 0.11477398681640626, 0.11639318084716797, 0.11510150146484376, 0.11568172454833985, 0.1150469741821289, 0.11505609893798828, 0.11469878387451173, 0.1152696304321289, 0.11472077178955079, 0.11540070343017578, 0.11531059265136719, 0.11496166229248046, 0.11542400360107422, 0.11492710113525391, 0.11532486724853516, 0.11784614562988281, 0.11624428558349609, 0.11549734497070313, 0.11582486724853516, 0.11602537536621094, 0.11586339569091797, 0.11976502227783203, 0.11631001281738282, 0.11598451232910156, 0.11549081420898437, 0.11571772766113281, 0.11545846557617187, 0.1172152328491211, 0.11567922973632813, 0.11570313262939454, 0.11511465454101563, 0.1161523208618164, 0.11456671905517578, 0.11443654632568359, 0.11782345581054687, 0.11531849670410156, 0.1149603500366211, 0.1152569580078125, 0.11484355163574218, 0.11507379150390624, 0.11519391632080078, 0.11592908477783204, 0.11622809600830078, 0.1167237091064453, 0.11624447631835938, 0.1160263671875, 0.11568156433105468, 0.11551817321777344, 0.11504774475097657, 0.11537667083740234, 0.1172031021118164, 0.11671142578125, 0.11492556762695312, 0.11512387084960937, 0.11513203430175781, 0.11528422546386718, 0.1152248306274414, 0.1157798080444336, 0.11562300872802735, 0.11537059020996093, 0.11661138916015625, 0.1156888656616211, 0.11584976196289062, 0.11620912170410157, 0.11556486511230468, 0.11563037109375, 0.11561574554443359, 0.11579718780517578, 0.11608553314208984, 0.11619261169433594, 0.11529196929931641, 0.11514969635009766, 0.11481292724609375, 0.11512131500244141, 0.11493666839599609, 0.11474931335449219, 0.11454038238525391, 0.11586326599121094, 0.11523129272460937, 0.114789794921875, 0.11492617797851562, 0.11482099151611327, 0.11505059051513672, 0.11475772857666015, 0.11585939025878907, 0.11445452880859375, 0.11580003356933594, 0.11611539459228516, 0.11541891479492188, 0.11693849945068359, 0.11555213165283203, 0.11575759887695312, 0.11596611022949219, 0.11527161407470703, 0.115281982421875, 0.11398477172851562, 0.113991455078125, 0.11429484558105468, 0.11448822021484376, 0.11439449310302735, 0.11692505645751954, 0.11445577239990234, 0.11445327758789063, 0.11527155303955078, 0.11632621002197266, 0.11954402923583984, 0.11546562957763672, 0.11484844970703124, 0.1145528335571289, 0.1148375015258789, 0.1152732162475586, 0.11818982696533203, 0.1166867218017578, 0.11629206085205078, 0.1159559326171875, 0.11615574645996093, 0.11649520111083984, 0.11457091522216797, 0.11501590728759765, 0.11500147247314453, 0.1203773422241211, 0.11702297973632812, 0.11712281799316407, 0.11530159759521484, 0.11517008209228516, 0.11482281494140625, 0.11503392028808594, 0.11487081909179687, 0.1164222412109375, 0.1154677734375, 0.11506781005859375, 0.11548876953125, 0.1152791976928711, 0.11550348663330078, 0.11497459411621094, 0.11826959991455079, 0.11698198699951172, 0.11628125, 0.11780364990234375, 0.11544534301757813, 0.11539907073974609, 0.11517542266845703, 0.11500543975830078, 0.11467571258544922, 0.11506630706787109, 0.11584159851074219, 0.11535155487060547, 0.1148960952758789, 0.11508406066894532, 0.1151119384765625, 0.11504790496826171, 0.11530499267578125, 0.11487026977539062, 0.11573248291015625, 0.11535769653320313, 0.11602031707763671, 0.11568624114990235, 0.11543062591552734]",tokens/s,8.629843267414795,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1054, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 856, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 596, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 426, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 60, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 60, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 60, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1141, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 944, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 677, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 500, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1054, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 856, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 596, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 426, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 60, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1054, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 856, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 596, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 426, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 60, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,14028.345344,7835.942912,0.0,7440.695296,7427.899392,s,1,31.845142578125,31.845142578125,0.0,31.845142578125,31.845142578125,31.845142578125,31.845142578125,[31.845142578125],,kWh,0.0007130096238708574,7.86430868783795e-05,0.0002699088270380068,0.0010615615377872437,,MB,1197.400064,8416.854016,0.0,8000.63488,7875.673088,s,10,0.9571953277587891,0.09571953277587891,0.0002780228861242757,0.09565481567382812,0.09612270812988281,0.09613992919921875,0.0961537060546875,"[0.09561958312988281, 0.0955525131225586, 0.09569004821777344, 0.0959486083984375, 0.09580300903320313, 0.09615715026855469, 0.09520767974853515, 0.09550665283203125, 0.09611888122558594, 0.09559120178222656]",tokens/s,2674.4802505399543,kWh,2.9104864475250007e-06,3.2097065990848037e-07,1.9323502807522923e-06,5.163807388185773e-06,tokens/kWh,49575822.79031167,MB,1215.070208,8437.825536,0.0,8021.6064,7976.51712,s,10,46.77368115234375,4.6773681152343745,0.006220704046563695,4.6768447265625,4.6834939453125,4.68686806640625,4.68956736328125,"[4.6902421875, 4.68072509765625, 4.673880859375, 4.669634765625, 4.6815068359375, 4.67028515625, 4.682744140625, 4.6764287109375, 4.67097265625, 4.6772607421875]",tokens/s,13.469113066984505,kWh,0.00013614018957080752,1.5016638126850834e-05,8.281410640524727e-05,0.00023397093410290563,tokens/kWh,269264.2154101552,,s,630,46.77019203186032,0.07423840005057197,0.0007574064742200948,0.07412756729125977,0.07484287872314453,0.0753373062133789,0.07690783737182619,"[0.07384646606445312, 0.07495516967773437, 0.07387904357910156, 0.07421788787841797, 0.0753267822265625, 0.07419551849365234, 0.07395136260986328, 0.07432806396484375, 0.07442617797851563, 0.07405133056640625, 0.07421737670898437, 0.07420572662353515, 0.07417855834960937, 0.07492198181152344, 0.07469612884521484, 0.07372576141357422, 0.07386934661865234, 0.07431375885009765, 0.07379161834716796, 0.07350943756103516, 0.07369932556152343, 0.07396966552734376, 0.07435369873046875, 0.07426137542724609, 0.07407369232177734, 0.07368141174316406, 0.07334706878662109, 0.07310131072998047, 0.07318732452392578, 0.07305548858642578, 0.0737118377685547, 0.074144287109375, 0.0743180160522461, 0.07401862335205078, 0.07528575897216797, 0.07415676879882813, 0.07420317077636719, 0.07468614196777344, 0.07795334625244141, 0.07461478424072265, 0.0746618881225586, 0.07605862426757813, 0.07501414489746094, 0.07449350738525391, 0.0748415985107422, 0.07443052673339844, 0.07450224304199218, 0.07426742553710937, 0.07977708435058593, 0.07535481262207032, 0.07832371520996094, 0.0746393585205078, 0.07452035522460937, 0.07430950164794922, 0.07418675231933594, 0.07441238403320312, 0.07403110504150391, 0.07403520202636718, 0.07472128295898438, 0.07396521759033203, 0.07382806396484375, 0.07371456146240235, 0.07344614410400391, 0.07390223693847656, 0.07424188995361328, 0.07408422088623047, 0.07394111633300782, 0.07417414093017578, 0.07400441741943359, 0.07421609497070313, 0.0743584976196289, 0.07400563049316407, 0.07415283203125, 0.07396886444091796, 0.07385167694091797, 0.07394416046142578, 0.07433424377441407, 0.0744283218383789, 0.07406230163574219, 0.07406246185302734, 0.0743196792602539, 0.07425759887695313, 0.07445916748046875, 0.07585878753662109, 0.0743724136352539, 0.07499369812011719, 0.07436908721923828, 0.0746615982055664, 0.07434278106689453, 0.07388211059570313, 0.07459225463867188, 0.07414988708496094, 0.07400653076171874, 0.07392870330810547, 0.07387059020996094, 0.07559273529052735, 0.07500895690917969, 0.07461315155029297, 0.07420105743408204, 0.07396803283691407, 0.07390643310546875, 0.0739530258178711, 0.07517183685302735, 0.07425027465820312, 0.07425987243652343, 0.07375929260253906, 0.0736885757446289, 0.07380633544921875, 0.07389794921875, 0.07411100769042969, 0.07467417907714843, 0.07414963531494141, 0.07412556457519531, 0.07416831970214843, 0.07440589141845703, 0.07447756958007813, 0.07455948638916016, 0.07436083221435547, 0.0744120330810547, 0.07423590087890625, 0.07411436462402343, 0.07422045135498047, 0.074297119140625, 0.07485440063476563, 0.07536831665039062, 0.073984130859375, 0.07405165100097656, 0.07450902557373047, 0.07384512329101563, 0.07444483184814453, 0.07397465515136718, 0.07417948913574218, 0.07388159942626953, 0.07373836517333984, 0.07352678680419922, 0.074316162109375, 0.07442022705078125, 0.07405875396728516, 0.07348287963867188, 0.07333039855957031, 0.07363967895507813, 0.07349750518798828, 0.07415558624267578, 0.07420342254638672, 0.07430569458007813, 0.07400380706787109, 0.07385897827148437, 0.07492793273925781, 0.07421228790283203, 0.07466393280029297, 0.0741346206665039, 0.07414390563964844, 0.07412995147705079, 0.07436003112792969, 0.07422806549072265, 0.07419971466064453, 0.07436310577392578, 0.07409347534179687, 0.07435148620605468, 0.07432806396484375, 0.07449190521240234, 0.07434464263916016, 0.07446304321289063, 0.07488864135742188, 0.07543251037597656, 0.07389756774902344, 0.07483433532714844, 0.07320543670654298, 0.07362332916259766, 0.07421129608154296, 0.07558771514892579, 0.07434003448486329, 0.07363452911376953, 0.07515103912353516, 0.07368739318847656, 0.07405487823486329, 0.07471389007568359, 0.07409574127197266, 0.07391337585449219, 0.07386640167236327, 0.07356690979003906, 0.07466950225830078, 0.07503622436523437, 0.07395382690429687, 0.07409712219238282, 0.07379558563232422, 0.07401273345947265, 0.07412745666503906, 0.07425791931152344, 0.0749494400024414, 0.07404051208496094, 0.0738885726928711, 0.07410688018798828, 0.07389798736572266, 0.07429555511474609, 0.07409168243408203, 0.07408290863037109, 0.07374835205078124, 0.07351513671875, 0.07363510131835938, 0.07330889892578125, 0.07366233825683594, 0.074268798828125, 0.07411302185058594, 0.07342243194580078, 0.07373833465576173, 0.07357266998291015, 0.07340985870361329, 0.073614013671875, 0.07383577728271484, 0.07367935943603515, 0.07333283233642578, 0.07369868469238282, 0.07413820648193359, 0.07350905609130859, 0.0737525405883789, 0.07409257507324218, 0.0739202880859375, 0.07380604553222657, 0.07379132843017579, 0.07394486236572266, 0.07385740661621094, 0.07388694763183594, 0.07384758758544922, 0.0740474853515625, 0.07422557067871094, 0.0744090576171875, 0.07675801849365234, 0.07511357116699219, 0.07444351959228515, 0.07445065307617188, 0.07449967956542969, 0.07409954833984375, 0.07430729675292969, 0.07420336151123047, 0.07407939147949219, 0.07427369689941406, 0.07417826843261718, 0.0734169921875, 0.0734900131225586, 0.074336669921875, 0.07435279846191406, 0.07457164764404296, 0.07446080017089844, 0.07429145812988282, 0.07424348449707031, 0.07730643463134766, 0.07422156524658204, 0.07430976104736328, 0.07433216094970703, 0.07430944061279297, 0.07408150482177735, 0.07401055908203125, 0.07384457397460938, 0.0740167007446289, 0.07429676818847657, 0.07397618865966797, 0.07412374114990235, 0.07407202911376953, 0.07416015625, 0.07488841247558593, 0.07444764709472657, 0.07451551818847656, 0.07437612915039063, 0.07496908569335937, 0.0741949462890625, 0.0748636474609375, 0.074531005859375, 0.07481375885009765, 0.07451491546630859, 0.07425433349609376, 0.07408025360107422, 0.07356396484375, 0.07369337463378907, 0.07386726379394531, 0.07411673736572266, 0.07372838592529297, 0.0737423324584961, 0.07369910430908203, 0.07351660919189452, 0.0735647964477539, 0.07421900939941406, 0.07406003570556641, 0.07398838043212891, 0.07361459350585937, 0.07353148651123047, 0.07358432006835937, 0.07407100677490235, 0.07415596771240235, 0.07391596984863281, 0.07394972991943359, 0.07410070037841797, 0.07381775665283204, 0.07405299377441406, 0.0738987808227539, 0.07460275268554688, 0.07430342102050781, 0.07408640289306641, 0.07546470642089843, 0.07443456268310547, 0.07435465240478516, 0.07429244995117187, 0.07429203033447265, 0.07606476593017578, 0.07413123321533203, 0.07696201324462891, 0.07534591674804687, 0.0743034896850586, 0.07473942565917968, 0.07410307312011719, 0.07410406494140626, 0.0736447982788086, 0.07371981048583984, 0.07393075561523438, 0.07888243103027344, 0.07417804718017579, 0.07379404449462891, 0.07380931091308594, 0.07399689483642578, 0.07405391693115235, 0.07408406066894531, 0.07451443481445312, 0.07414112091064454, 0.07381459045410156, 0.07344918060302734, 0.07369929504394532, 0.073695068359375, 0.07385545349121093, 0.07426457977294922, 0.074176513671875, 0.07410665893554688, 0.07359715270996094, 0.0739368667602539, 0.07409693145751953, 0.07436815643310547, 0.07452528381347656, 0.07426850891113282, 0.07427907562255859, 0.07439961242675781, 0.07439279937744141, 0.07439862060546874, 0.07402301025390624, 0.07428521728515625, 0.07443430328369141, 0.07459996795654297, 0.07455382537841797, 0.07403110504150391, 0.07419513702392579, 0.07434591674804687, 0.07404102325439453, 0.07366726684570313, 0.0737996826171875, 0.07383782196044922, 0.073755615234375, 0.07411033630371094, 0.074, 0.0736848602294922, 0.07330477142333984, 0.0737314224243164, 0.07595507049560547, 0.07426457977294922, 0.07496498870849609, 0.07403110504150391, 0.07401634979248047, 0.07375276947021485, 0.07349612426757812, 0.07384646606445312, 0.07395996856689453, 0.07407830047607422, 0.07382675170898438, 0.07396342468261718, 0.07412041473388672, 0.07451718139648437, 0.07432316589355469, 0.07457369232177734, 0.07511459350585938, 0.07445929718017578, 0.07433462524414063, 0.07575843048095703, 0.07457917022705078, 0.07433296203613281, 0.07442022705078125, 0.07351471710205078, 0.07323062133789063, 0.0732357406616211, 0.07336540985107422, 0.07448834991455078, 0.0761178207397461, 0.07524508666992187, 0.07415609741210938, 0.07446617889404297, 0.07411650848388672, 0.0741013412475586, 0.07453052520751953, 0.07383200073242188, 0.07435132598876953, 0.07457746887207031, 0.07401251220703126, 0.0736960678100586, 0.07347586822509766, 0.07463321685791016, 0.07463868713378906, 0.07433225250244141, 0.074271484375, 0.0746780776977539, 0.07488716888427735, 0.07391027069091796, 0.07427474975585938, 0.07426054382324218, 0.07403929901123046, 0.0742762222290039, 0.07445974731445312, 0.07532546997070312, 0.07500908660888672, 0.0747734375, 0.07458611297607422, 0.07420873260498047, 0.07446720123291016, 0.07431644439697266, 0.07389321899414063, 0.0740337905883789, 0.0739840316772461, 0.07379894256591797, 0.07381439971923828, 0.0738977279663086, 0.07387811279296876, 0.07387503814697266, 0.07442086029052734, 0.07394461059570312, 0.07379987335205078, 0.07445526123046875, 0.07421731567382812, 0.07400431823730469, 0.07378755187988281, 0.07414169311523437, 0.07447142028808594, 0.07544217681884766, 0.07570022583007813, 0.07466172790527344, 0.07435689544677734, 0.07486585235595702, 0.07456531524658203, 0.07419731140136719, 0.07665821075439454, 0.07592352294921875, 0.07556531524658203, 0.07458956909179687, 0.07424079895019531, 0.074176513671875, 0.07400857543945312, 0.07423766326904296, 0.07421389007568359, 0.07439542388916015, 0.07499571228027344, 0.07430758666992188, 0.07428822326660156, 0.07371663665771484, 0.07371161651611328, 0.07355596923828125, 0.07347200012207031, 0.07429238128662109, 0.07396438598632812, 0.0740126724243164, 0.07369522857666015, 0.07407001495361328, 0.0739546890258789, 0.07369987487792969, 0.07391165161132812, 0.07412767791748047, 0.07377555084228515, 0.07399209594726562, 0.07392041778564454, 0.07370873260498047, 0.07392092895507812, 0.0738842544555664, 0.07443267059326172, 0.07368482971191406, 0.07390729522705078, 0.07389654541015625, 0.07428860473632813, 0.07466070556640625, 0.07428688049316406, 0.07412582397460937, 0.07415369415283203, 0.07412931060791016, 0.074174560546875, 0.0742681884765625, 0.07448828887939453, 0.07443389129638672, 0.07413622283935548, 0.074176513671875, 0.07647859191894531, 0.07513279724121094, 0.0744981460571289, 0.07412726593017578, 0.07385088348388671, 0.07409037017822266, 0.07391203308105469, 0.07350678253173829, 0.0737959976196289, 0.0740884780883789, 0.07426585388183594, 0.07394509124755859, 0.07341862487792969, 0.07359305572509765, 0.07458233642578126, 0.07430115509033203, 0.07398783874511719, 0.0736982421875, 0.07437721252441407, 0.07381196594238282, 0.07391836547851563, 0.07401904296875, 0.0742053451538086, 0.0741250228881836, 0.0742113265991211, 0.07399971008300782, 0.07404611206054687, 0.07420905303955078, 0.07471250915527344, 0.07436908721923828, 0.07426841735839844, 0.07427283477783203, 0.0740832290649414, 0.07385292816162109, 0.07452982330322265, 0.07423792266845704, 0.07411974334716796, 0.07387506866455078, 0.07401529693603516, 0.07383475494384766, 0.07385702514648437, 0.07382179260253906, 0.07393321228027344, 0.0741396484375, 0.07350032043457032, 0.07400240325927734, 0.0736396484375, 0.07394480133056641, 0.07377804565429688, 0.07406390380859375, 0.07570845031738281, 0.07424409484863281, 0.074176513671875, 0.07378329467773438, 0.0741949462890625, 0.07412918090820313, 0.0738736343383789, 0.07375772857666016, 0.0740703353881836, 0.0738617935180664, 0.07413276672363281, 0.07383446502685546, 0.07423667144775391, 0.0739835205078125, 0.07361151885986328, 0.07415580749511719, 0.07434223937988281, 0.07411158752441406, 0.074720703125, 0.07431206512451172, 0.07460269165039063, 0.07497843170166016, 0.07424224090576172, 0.07511248016357422, 0.07416054534912109, 0.07434381103515625, 0.07677519989013672, 0.0758121566772461, 0.07377581024169921, 0.07370925140380859, 0.07489977264404298, 0.07361516571044922, 0.0735888671875, 0.07380588531494141, 0.07448726654052734, 0.07392105865478515, 0.0838287353515625, 0.07399628448486328, 0.07375433349609375, 0.0735931167602539, 0.07356825256347656, 0.07335836791992187, 0.07353568267822265, 0.07370121765136718, 0.0738971176147461, 0.07378614044189453, 0.07381718444824219, 0.07355545806884765, 0.07642562866210938, 0.0739797134399414, 0.07396371459960938, 0.07396514892578125, 0.07389430236816406, 0.07426399993896485, 0.07393849945068359, 0.07424649810791016, 0.07404541015625, 0.07461942291259765, 0.07416438293457031, 0.07417855834960937, 0.0739205093383789, 0.07439949035644532, 0.07393888092041015, 0.07385043334960938, 0.07393551635742188, 0.07389603424072265, 0.07369075012207031, 0.07351641845703125, 0.07333740997314453, 0.07362928009033202, 0.0740626220703125, 0.0742011489868164, 0.07380786895751953, 0.07372211456298829, 0.07353078460693359, 0.0735215072631836, 0.07340013122558593, 0.07386918640136719, 0.07446150207519531, 0.07427382659912109, 0.07365897369384766, 0.07379154968261718, 0.07398563385009765, 0.07568793487548828, 0.07433296203613281, 0.07414777374267578, 0.07462092590332031, 0.07434361267089844, 0.0748572769165039]",tokens/s,13.470117881295792,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,2016.866304,1252.982784,0.0,857.735168,829.14304,s,1,9.9037060546875,9.9037060546875,0.0,9.9037060546875,9.9037060546875,9.9037060546875,9.9037060546875,[9.9037060546875],,kWh,7.237408021669579e-05,7.9762110518436e-06,2.5385575864012022e-05,0.00010573586713255141,,MB,2069.7088,1542.38976,0.0,1126.170624,1096.740864,s,10,0.8928128356933593,0.08928128356933593,0.0006608523235376482,0.08932579040527344,0.08998932495117187,0.09003573532104492,0.09007286361694336,"[0.09008214569091796, 0.08997901153564453, 0.08905856323242188, 0.08850822448730469, 0.08941951751708985, 0.0879260482788086, 0.08981951904296875, 0.08923206329345704, 0.08985135650634765, 0.08893638610839844]",tokens/s,2867.342289060956,kWh,2.6036028127213455e-06,2.8713099847076336e-07,1.1604655301417716e-06,4.05119934133388e-06,tokens/kWh,63191163.512509495,MB,2073.796608,1565.458432,0.0,1149.239296,1096.743424,s,10,54.44341552734375,5.444341552734375,0.01229974840854743,5.44352294921875,5.4570804199218745,5.463798999023438,5.469173862304688,"[5.45218359375, 5.44805810546875, 5.4460361328125, 5.43594287109375, 5.441009765625, 5.4293740234375, 5.4284873046875, 5.45558740234375, 5.470517578125, 5.43621875]",tokens/s,11.57164725794229,kWh,0.0001584023618814468,1.7472330678598248e-05,6.178527927086001e-05,0.00023765997183090504,tokens/kWh,265084.6060220207,,s,630,54.4407574539184,0.08641390072050549,0.0010975190457818203,0.08617566680908204,0.08732050094604492,0.08799230728149414,0.09161460517883302,"[0.08679596710205079, 0.08654541015625, 0.08620751953125, 0.08669680023193359, 0.08647078704833984, 0.08680652618408204, 0.0862371826171875, 0.08638873291015625, 0.0860255355834961, 0.08643449401855469, 0.08658729553222656, 0.08620015716552734, 0.08715084838867188, 0.08659932708740234, 0.0880643539428711, 0.08645760345458985, 0.08768911743164062, 0.08693750762939453, 0.09107100677490235, 0.08609423828125, 0.0857456283569336, 0.08613890838623046, 0.08606924438476563, 0.08660582733154297, 0.08645782470703126, 0.08905347442626953, 0.08705443572998046, 0.08610012817382813, 0.08673187255859376, 0.08642176055908203, 0.08589584350585938, 0.0858662109375, 0.0866116485595703, 0.08666297912597656, 0.08682575988769531, 0.08644198608398437, 0.08630671691894531, 0.08627005004882812, 0.08645222473144532, 0.08651366424560547, 0.08695152282714844, 0.08606963348388671, 0.08620604705810547, 0.08622329711914062, 0.08617814636230468, 0.0862327651977539, 0.08665814208984375, 0.08599027252197265, 0.08622812652587891, 0.086395263671875, 0.08592227172851563, 0.0862198715209961, 0.08585004425048828, 0.08627001953125, 0.08603536224365234, 0.08692530822753906, 0.08606924438476563, 0.08604579162597656, 0.08583245086669922, 0.08568195343017578, 0.0865713882446289, 0.08634329223632813, 0.08732096099853516, 0.08675353240966797, 0.09192031860351563, 0.08682291412353516, 0.08732044982910156, 0.08725904083251954, 0.08827846527099609, 0.08769580841064453, 0.08686003112792968, 0.08635196685791016, 0.0859156494140625, 0.08629862213134766, 0.08617715454101563, 0.08597392272949218, 0.08585596466064453, 0.08655020904541015, 0.08590684509277344, 0.08622537231445312, 0.08674678039550782, 0.08654723358154297, 0.08596685028076172, 0.08646656036376953, 0.08603871917724609, 0.08628524780273437, 0.08624166107177735, 0.08637900543212891, 0.08599545288085937, 0.08662022399902344, 0.08658943939208984, 0.08637030029296874, 0.086614013671875, 0.08634681701660156, 0.0866517105102539, 0.08610623931884766, 0.08650927734375, 0.08605519866943359, 0.08607743835449219, 0.08637644958496093, 0.08669593811035156, 0.08665702056884765, 0.08654557037353515, 0.08617382049560547, 0.08605465698242187, 0.0862873306274414, 0.08575590515136719, 0.08577228546142578, 0.08594009399414063, 0.08621478271484374, 0.08597299194335938, 0.08600950622558594, 0.0859668197631836, 0.08616588592529296, 0.08646249389648437, 0.08661808013916016, 0.08611225891113282, 0.08684496307373046, 0.08586470031738282, 0.08639250946044921, 0.08736370849609375, 0.08699126434326172, 0.08570674896240234, 0.08596479797363281, 0.0861143035888672, 0.08599868774414063, 0.08610002899169922, 0.0872872314453125, 0.08660163116455079, 0.08594908905029297, 0.086263427734375, 0.08770992279052735, 0.08619570922851562, 0.0853326416015625, 0.08598528289794923, 0.0858005142211914, 0.08627763366699219, 0.08565795135498047, 0.0917523193359375, 0.08838832092285157, 0.08667759704589843, 0.0860322265625, 0.08593004608154296, 0.08587232208251953, 0.0854686050415039, 0.08579366302490235, 0.08672870635986328, 0.08608258819580078, 0.08658147430419921, 0.08562723541259766, 0.08587715148925781, 0.08625698852539063, 0.08682768249511719, 0.08654029083251953, 0.08752537536621094, 0.08610963439941406, 0.08595852661132812, 0.0870284194946289, 0.08597840118408204, 0.0861662368774414, 0.0863121566772461, 0.08641613006591797, 0.08866387176513672, 0.0866746597290039, 0.08625385284423828, 0.08570687866210938, 0.08588540649414063, 0.08588825225830078, 0.08593417358398438, 0.08636278533935547, 0.08616915130615234, 0.08599814605712891, 0.08615666961669922, 0.08587532806396485, 0.08582089233398438, 0.0856171875, 0.08596685028076172, 0.08672051239013671, 0.08571686553955078, 0.0912774429321289, 0.08633753967285156, 0.08607510375976563, 0.08569884490966796, 0.08650752258300781, 0.08612035369873047, 0.08622089385986328, 0.08604057312011719, 0.08674527740478516, 0.08625132751464844, 0.08603014373779297, 0.08566989135742188, 0.08658678436279296, 0.08600166320800781, 0.08615817260742188, 0.08589884948730468, 0.0858862075805664, 0.08563177490234375, 0.08673702239990234, 0.08578457641601563, 0.08688025665283203, 0.08633782196044922, 0.08591862487792969, 0.08574816131591798, 0.08685011291503907, 0.08543344116210938, 0.08629273223876953, 0.08611190032958985, 0.08629891204833984, 0.0867476806640625, 0.08615046691894532, 0.08571539306640626, 0.08575142669677735, 0.0858641586303711, 0.08571116638183594, 0.0864692153930664, 0.08663005065917968, 0.08695228576660156, 0.08617779541015624, 0.08558796691894531, 0.08640128326416016, 0.08617916870117187, 0.08598979187011718, 0.08605900573730468, 0.08560617828369141, 0.08638076782226563, 0.08598118591308594, 0.08808448028564453, 0.08635945892333985, 0.08617772674560546, 0.0863238754272461, 0.08567295837402343, 0.08665542602539063, 0.08606329345703125, 0.08630470275878906, 0.08581999969482422, 0.08605270385742188, 0.0865418243408203, 0.08594419097900391, 0.08622348785400391, 0.08575596618652344, 0.08689862060546875, 0.08697551727294922, 0.08759327697753906, 0.08602588653564452, 0.08618495941162109, 0.08627001953125, 0.09024505615234375, 0.08612454223632812, 0.08580912017822266, 0.08622287750244141, 0.08646604919433594, 0.08626432037353515, 0.08644812774658203, 0.09576038360595703, 0.08624486541748047, 0.08614262390136719, 0.08574886322021484, 0.08567180633544921, 0.0853440933227539, 0.08564736175537109, 0.08615923309326172, 0.08672268676757812, 0.08583984375, 0.08976902770996094, 0.08639382171630859, 0.0864677734375, 0.08633350372314454, 0.08720793914794922, 0.08571571350097656, 0.08518150329589844, 0.08569747161865235, 0.08629043579101563, 0.08636006164550782, 0.08566169738769532, 0.08693283081054687, 0.08557839965820313, 0.08594550323486329, 0.08578953552246094, 0.0859279327392578, 0.08573747253417968, 0.08579424285888672, 0.08608739471435548, 0.08590016174316406, 0.08627811431884766, 0.08609308624267578, 0.08593843078613281, 0.08617417907714844, 0.08633673858642578, 0.0866170883178711, 0.0864150390625, 0.0863018569946289, 0.08572819519042969, 0.08642073822021484, 0.0939702377319336, 0.08617056274414063, 0.08629023742675782, 0.08636835479736328, 0.0859768295288086, 0.08561312103271485, 0.08569014739990234, 0.08539663696289063, 0.08604348754882812, 0.08603443145751953, 0.08573725128173829, 0.0855042266845703, 0.08551321411132813, 0.0860231704711914, 0.08632867431640626, 0.08598137664794922, 0.08809654235839844, 0.08577008056640625, 0.0853831024169922, 0.08551248168945312, 0.08553129577636719, 0.08500994873046876, 0.08557839965820313, 0.08562655639648438, 0.086417724609375, 0.08617372894287109, 0.086087646484375, 0.08545587158203125, 0.08617171478271485, 0.08556845092773438, 0.08546918487548828, 0.08610163116455079, 0.08562726593017578, 0.08606310272216797, 0.08556134033203125, 0.08632319641113281, 0.08560435485839844, 0.08568243408203124, 0.085587646484375, 0.08583139038085938, 0.08645053100585938, 0.0854302749633789, 0.0862259521484375, 0.08625251007080079, 0.08554112243652344, 0.08527232360839844, 0.08581484985351563, 0.087019775390625, 0.08626914978027343, 0.08606339263916016, 0.0861943359375, 0.08633193969726563, 0.08640431976318359, 0.08559081268310546, 0.08655052947998047, 0.08572255706787109, 0.08773894500732422, 0.08662608337402344, 0.08591766357421875, 0.08575926208496094, 0.08561090850830078, 0.08559468841552734, 0.08555519866943359, 0.08797798156738282, 0.08607273864746094, 0.08564595031738281, 0.08584188842773438, 0.08820496368408202, 0.08650582122802734, 0.08548889923095702, 0.08568511962890625, 0.08609779357910156, 0.08647792053222657, 0.08603292846679687, 0.08678403472900391, 0.0959266586303711, 0.08604783630371093, 0.08563302612304688, 0.08536268615722656, 0.08523661041259765, 0.08549375915527344, 0.08538873291015625, 0.0858120346069336, 0.08655232238769531, 0.08594226837158203, 0.08625151824951172, 0.08618502044677734, 0.08576121520996094, 0.08573078155517579, 0.08565094757080079, 0.08554566192626953, 0.08578876495361328, 0.08619213104248047, 0.086329345703125, 0.0859156494140625, 0.08606044769287109, 0.08628489685058593, 0.08581324768066406, 0.08613811492919922, 0.08622156524658203, 0.08562102508544922, 0.08542384338378907, 0.0858419189453125, 0.08561254119873046, 0.08626790618896485, 0.08607350158691406, 0.08538505554199219, 0.08575360107421875, 0.08571286773681641, 0.08600748443603516, 0.08565593719482421, 0.08560176086425782, 0.08568089294433594, 0.08579071807861328, 0.08912467193603515, 0.09313465881347656, 0.08685606384277343, 0.08684748840332031, 0.08619213104248047, 0.08651776123046875, 0.0869552001953125, 0.086067138671875, 0.08590946960449218, 0.08597801971435547, 0.08600707244873047, 0.086001953125, 0.08557202911376953, 0.08599561309814453, 0.08573487854003906, 0.08631110382080077, 0.08620467376708985, 0.08650096130371093, 0.08628585815429687, 0.08568937683105468, 0.0855054702758789, 0.0855712661743164, 0.08556617736816406, 0.08583535766601562, 0.08586246490478516, 0.08617161560058594, 0.08655500793457031, 0.08630009460449219, 0.08587117004394532, 0.08527593231201172, 0.08700387573242188, 0.08605900573730468, 0.08637814331054687, 0.08609417724609375, 0.08650838470458984, 0.08648851013183594, 0.08636678314208984, 0.08635391998291016, 0.08636383819580078, 0.08689081573486328, 0.08611020660400391, 0.08598694610595703, 0.08574755096435546, 0.08613660430908203, 0.08593679809570312, 0.08554710388183594, 0.08608972930908203, 0.08589446258544922, 0.08561529541015625, 0.085728515625, 0.0867701416015625, 0.08713587188720703, 0.08572297668457031, 0.08610419464111328, 0.08559276580810547, 0.08572652435302734, 0.0857545623779297, 0.08654755401611328, 0.08612342071533204, 0.08583372497558593, 0.086165283203125, 0.08624969482421875, 0.08599346923828124, 0.08696627044677735, 0.08600985717773438, 0.08580300903320312, 0.08574156951904296, 0.0877998046875, 0.08697606658935547, 0.08577069091796875, 0.08615849304199219, 0.08596771240234374, 0.08578185272216797, 0.08818720245361328, 0.08780425262451172, 0.08689356994628906, 0.085578369140625, 0.08592620849609375, 0.086412353515625, 0.0867422103881836, 0.08731629180908203, 0.08738323211669922, 0.08672319793701172, 0.08697270202636719, 0.08702738952636718, 0.08763005065917968, 0.08692259216308594, 0.08689427185058594, 0.08788886260986328, 0.08772927856445313, 0.08705667114257812, 0.08851315307617187, 0.0876871337890625, 0.08808448028564453, 0.0876605453491211, 0.08812879943847657, 0.0876981430053711, 0.08767369842529296, 0.08849161529541015, 0.08796591949462891, 0.0869048309326172, 0.08677311706542969, 0.08670066833496094, 0.08716697692871093, 0.08810272216796874, 0.08761974334716797, 0.08720591735839844, 0.08680818939208984, 0.08760972595214844, 0.08707891082763672, 0.08827494049072265, 0.08695990753173828, 0.08742275238037109, 0.09033773040771484, 0.09274742126464844, 0.08713565063476562, 0.08677267456054688, 0.08712806701660156, 0.08759225463867187, 0.08672892761230469, 0.08662207794189453, 0.08772463989257813, 0.08682701110839844, 0.08598086547851562, 0.08572902679443359, 0.08630738830566406, 0.08762931060791015, 0.0860257568359375, 0.0859268798828125, 0.08627523040771484, 0.0865525131225586, 0.08740137481689453, 0.0865322265625, 0.086355712890625, 0.08660185241699218, 0.08639078521728516, 0.08584358215332032, 0.08584604644775391, 0.08589110565185547, 0.08541830444335938, 0.08604054260253906, 0.08644713592529298, 0.08615132904052734, 0.08546390533447265, 0.08583782196044921, 0.0866890869140625, 0.08591609954833984, 0.08554521942138672, 0.08543436431884766, 0.08578195190429687, 0.08609849548339844, 0.08618598175048828, 0.0865955810546875, 0.08620835113525391, 0.08553692626953124, 0.08556947326660157, 0.08576790618896485, 0.09107516479492188, 0.08641510772705079, 0.08641516876220703, 0.08559808349609375, 0.0856170883178711, 0.08581581115722656, 0.08601404571533203, 0.08714387512207031, 0.08601814270019531, 0.0858642578125, 0.08583638763427734, 0.08540070343017578, 0.08570764923095703, 0.08589228820800782, 0.08556752014160156, 0.08573212432861328, 0.08628018951416015, 0.08569356536865234, 0.08576908874511718, 0.08849203491210937, 0.0870645751953125, 0.0859557113647461, 0.08631795501708985, 0.08837308502197265, 0.0865547866821289, 0.08562601470947266, 0.08543830108642578, 0.08571392059326172, 0.08668156433105469, 0.0866295394897461, 0.08749686431884765, 0.08779440307617188, 0.08654656219482422, 0.08577216339111328, 0.0872264633178711, 0.08595855712890625, 0.08567561340332032, 0.08565760040283203, 0.08687593841552735, 0.08601181030273437, 0.08618601226806641, 0.08698745727539063, 0.08647065734863281, 0.08580857849121094, 0.08628896331787109, 0.0864579849243164, 0.086181884765625, 0.08595439910888672, 0.08627458953857423, 0.0880040283203125, 0.08604524993896484, 0.08608358764648437, 0.08564940643310547, 0.08570880126953125, 0.08599961853027344, 0.0860057601928711, 0.08620236968994141, 0.08756371307373047, 0.0869238052368164, 0.08600169372558594, 0.08610774230957031, 0.08796163177490235, 0.0865153579711914, 0.08561942291259765, 0.08564736175537109, 0.08548761749267578]",tokens/s,11.572212244351402,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,6418.624512,3721.330688,0.0,3326.083072,3249.416192,s,1,17.51308203125,17.51308203125,0.0,17.51308203125,17.51308203125,17.51308203125,17.51308203125,[17.51308203125],,kWh,0.00030355860972501125,3.347633576307504e-05,0.0001152642588779923,0.0004522992043660786,,MB,1872.0768,4006.54336,0.0,3590.324224,3521.678336,s,10,0.7537450408935547,0.07537450408935548,0.0015239186641714225,0.07506945419311523,0.07570234146118164,0.07776300926208496,0.07941154350280762,"[0.0750257568359375, 0.07477347564697266, 0.07524441528320312, 0.07415190124511718, 0.07494012451171875, 0.07513983917236328, 0.07520329284667969, 0.07511315155029297, 0.07432940673828126, 0.07982367706298828]",tokens/s,3396.3739210345634,kWh,2.179116079975385e-06,2.4031599320989613e-07,1.440817902238829e-06,3.860249975424111e-06,tokens/kWh,66316948.80637212,MB,1872.0768,4117.692416,0.0,3701.47328,3608.866816,s,10,46.84605126953126,4.684605126953125,0.007524969231583742,4.68286474609375,4.69220595703125,4.6969560058593745,4.700756044921874,"[4.691150390625, 4.6794951171875, 4.68296044921875, 4.67753759765625, 4.689615234375, 4.7017060546875, 4.68276904296875, 4.6844453125, 4.68272998046875, 4.67364208984375]",tokens/s,13.448305309133985,kWh,0.00013488809747252246,1.4878509955376521e-05,6.468262388536061e-05,0.00021444923131325958,tokens/kWh,293775.8257010113,,s,630,46.8426582336426,0.07435342576768662,0.0008832220590586653,0.07422393417358399,0.07506347274780273,0.07541375885009766,0.07775482261657715,"[0.074025634765625, 0.07351910400390625, 0.07401996612548828, 0.07403593444824219, 0.07385874938964844, 0.07375103759765625, 0.0742747802734375, 0.07384678649902343, 0.07427065277099609, 0.0741827163696289, 0.07476950073242188, 0.074380126953125, 0.07473296356201171, 0.07454163360595703, 0.07522022247314453, 0.07421014404296875, 0.07441379547119141, 0.07455158233642578, 0.07421049499511718, 0.07453984069824218, 0.07430963134765625, 0.0750665283203125, 0.07402508544921875, 0.07376950073242187, 0.07423126220703125, 0.07428169250488281, 0.07420928192138672, 0.07418595123291015, 0.07347625732421875, 0.0739815673828125, 0.0765818862915039, 0.07498748779296875, 0.07441206359863281, 0.07443865966796875, 0.07429529571533203, 0.07381145477294922, 0.07432653045654297, 0.0746455078125, 0.07427468872070313, 0.0772179183959961, 0.07436697387695312, 0.07419625854492187, 0.07437385559082031, 0.07434563446044921, 0.07463314819335938, 0.07422643280029297, 0.0747234878540039, 0.07499088287353516, 0.07479564666748047, 0.07472752380371094, 0.07459772491455079, 0.0750241928100586, 0.07492281341552734, 0.07458518218994141, 0.07457888031005859, 0.07452877044677735, 0.07496089935302734, 0.07479296112060548, 0.07433126068115234, 0.07418675231933594, 0.07471603393554688, 0.07414733123779296, 0.07420329284667969, 0.0741456298828125, 0.07455554962158203, 0.07408025360107422, 0.07369872283935547, 0.07378594970703126, 0.07388979339599609, 0.07396966552734376, 0.07413091278076171, 0.073830078125, 0.07413843536376953, 0.0745607681274414, 0.07404579162597656, 0.07455158233642578, 0.07447100830078125, 0.07401734161376954, 0.07440316772460938, 0.0745719985961914, 0.07415443420410156, 0.07387465667724609, 0.07409072113037109, 0.07444742584228516, 0.07606681823730468, 0.07806156921386719, 0.07460969543457031, 0.07473251342773438, 0.07454499053955078, 0.07431798553466797, 0.07454105377197266, 0.07460201263427735, 0.07524809265136718, 0.07431782531738282, 0.07431961822509765, 0.07375440216064454, 0.07394745635986329, 0.07437248229980468, 0.0740994873046875, 0.07385292816162109, 0.07413747406005859, 0.07356377410888672, 0.07372032165527344, 0.07609139251708984, 0.07507977294921875, 0.07382825469970702, 0.07361945343017579, 0.07405158233642578, 0.0739422378540039, 0.07380867004394531, 0.074176513671875, 0.0739854736328125, 0.07374700927734375, 0.07337718200683593, 0.07352499389648437, 0.07376982116699218, 0.07403833770751952, 0.07419789123535156, 0.07434860992431641, 0.07387340545654297, 0.07397154998779297, 0.07364406585693359, 0.07425241851806641, 0.07430143737792969, 0.07506313323974609, 0.07422978973388672, 0.07497920227050782, 0.07464927673339844, 0.07444841766357421, 0.0741580810546875, 0.0753448028564453, 0.07468198394775391, 0.07364415740966797, 0.07355526733398438, 0.07313823699951172, 0.07379443359375, 0.07435250854492187, 0.07427903747558594, 0.07423744201660157, 0.07497516632080078, 0.07385759735107422, 0.07343309020996094, 0.07383245086669922, 0.07407170867919922, 0.0748109130859375, 0.07343392181396484, 0.07360262298583985, 0.07376914978027344, 0.07381996917724609, 0.0743603515625, 0.07436784362792968, 0.0742740478515625, 0.07442262268066406, 0.0754277114868164, 0.07364796447753906, 0.07343497467041016, 0.07399862670898437, 0.07400102233886718, 0.07498751831054687, 0.07438336181640624, 0.07447468566894531, 0.07424076843261719, 0.07508589172363281, 0.07641053009033204, 0.07484860992431641, 0.07430758666992188, 0.07481139373779297, 0.07426252746582031, 0.07437516784667969, 0.07437721252441407, 0.07717683410644531, 0.07761673736572265, 0.07488143920898438, 0.0741560287475586, 0.07421952056884766, 0.07348633575439453, 0.07382630157470703, 0.07418470764160157, 0.07419062042236328, 0.07412329864501953, 0.07374649810791016, 0.07381619262695313, 0.0741396484375, 0.074347900390625, 0.0742938232421875, 0.07445510101318359, 0.07384678649902343, 0.07352524566650391, 0.07327948760986328, 0.07411164855957031, 0.07422566223144532, 0.07411436462402343, 0.07441887664794922, 0.07405773162841797, 0.07377510070800782, 0.07455232238769531, 0.07428546905517579, 0.0743788833618164, 0.07450857543945312, 0.07406003570556641, 0.07441846466064453, 0.07388585662841797, 0.0742850570678711, 0.07408640289306641, 0.07469670104980469, 0.07440998077392579, 0.07449600219726563, 0.07441600036621093, 0.0736277084350586, 0.07403321838378907, 0.07385497283935546, 0.07370547485351563, 0.07410892486572265, 0.07386726379394531, 0.07366041564941406, 0.0736727066040039, 0.07387276458740234, 0.07769718170166015, 0.07413807678222656, 0.0738564453125, 0.07369785308837891, 0.07339584350585937, 0.07382624053955078, 0.0740561294555664, 0.0737996826171875, 0.07351868438720703, 0.07371612548828126, 0.07366614532470703, 0.07396393585205079, 0.07400038146972657, 0.07378112030029296, 0.07414176177978515, 0.07431378936767578, 0.07393417358398438, 0.07422415924072266, 0.07428905487060547, 0.07530028533935547, 0.07737014770507812, 0.07453001403808594, 0.07447795104980469, 0.07440201568603516, 0.07450233459472656, 0.074515869140625, 0.07434095764160156, 0.07476751708984375, 0.07412435150146485, 0.07437904357910156, 0.07391961669921875, 0.0742317123413086, 0.07403823852539063, 0.07443154907226562, 0.07427731323242187, 0.07362710571289062, 0.07352988433837891, 0.07362355041503907, 0.07385292816162109, 0.07430143737792969, 0.07427072143554687, 0.07411302185058594, 0.07356617736816407, 0.07405955505371094, 0.073963134765625, 0.07426227569580078, 0.07393315124511719, 0.07435836791992187, 0.0739276123046875, 0.07341983795166016, 0.07369004821777343, 0.07415094757080078, 0.07452105712890625, 0.07432038116455078, 0.07539670562744141, 0.07436089324951171, 0.07550348663330078, 0.07428144073486329, 0.07452588653564453, 0.07505184173583984, 0.07516476440429687, 0.0755080337524414, 0.07444108581542969, 0.0743503646850586, 0.07477260589599609, 0.07414201354980468, 0.07418182373046875, 0.07467501068115234, 0.07437107086181641, 0.07484361267089844, 0.07391900634765625, 0.07411686706542969, 0.07403266906738282, 0.07479145812988282, 0.07396985626220703, 0.07402425384521484, 0.07337779235839843, 0.07327584075927734, 0.07602543640136719, 0.07611158752441406, 0.0743966064453125, 0.07422566223144532, 0.07329792022705078, 0.07310294342041015, 0.0726944351196289, 0.07388480377197265, 0.07431433868408203, 0.08445545959472656, 0.07465513610839844, 0.07382077026367187, 0.07430095672607422, 0.07443897247314453, 0.07449616241455079, 0.0740997772216797, 0.07508390045166016, 0.07478265380859375, 0.07441907501220703, 0.0740843505859375, 0.07663616180419922, 0.07524518585205078, 0.07516544342041015, 0.07428358459472656, 0.07402706909179688, 0.0741560287475586, 0.07390396881103516, 0.07434051513671874, 0.07387068939208985, 0.0744741439819336, 0.07390364837646485, 0.0741421127319336, 0.07394879913330078, 0.07434230041503906, 0.0744486083984375, 0.07435126495361329, 0.07377458953857421, 0.07366912078857422, 0.07399008178710938, 0.07397193908691406, 0.07436697387695312, 0.07396047973632812, 0.07536911773681641, 0.07472764587402343, 0.07423503875732422, 0.0741976318359375, 0.07462944030761719, 0.07417036437988281, 0.07487010955810547, 0.07473538970947266, 0.07437811279296876, 0.0741212158203125, 0.07459996795654297, 0.07464189147949218, 0.07628089904785157, 0.07577056121826171, 0.07545616149902344, 0.07445769500732421, 0.07463267517089844, 0.07473725128173828, 0.07534467315673828, 0.0750675506591797, 0.0744796142578125, 0.07371981048583984, 0.07430947113037109, 0.0743466567993164, 0.07386930847167969, 0.07406992340087891, 0.07507363128662109, 0.07456905364990235, 0.07446717071533203, 0.07448454284667969, 0.07506739044189453, 0.07868211364746094, 0.07986988830566406, 0.0744940185546875, 0.07438130950927735, 0.07376076507568359, 0.07387728118896485, 0.07419699096679687, 0.07435286712646484, 0.07396147155761719, 0.0739205093383789, 0.07391539001464843, 0.07386316680908203, 0.07396940612792968, 0.07443247985839843, 0.07535577392578124, 0.07456221008300781, 0.07435244750976562, 0.07430889892578126, 0.07395011138916016, 0.07424585723876953, 0.07426889801025391, 0.07433168029785156, 0.07481382751464843, 0.07440809631347656, 0.07391619110107422, 0.07379990386962891, 0.0746470718383789, 0.07390022277832031, 0.07395561981201172, 0.073270751953125, 0.07308108520507813, 0.07384012603759765, 0.07353542327880859, 0.07370838165283203, 0.07367453002929687, 0.07383676910400391, 0.07442966461181641, 0.07407820892333984, 0.07367145538330078, 0.07352476501464844, 0.07472176361083985, 0.07466556549072266, 0.07527056121826171, 0.07399404907226563, 0.07378758239746094, 0.07350067138671874, 0.07375257873535156, 0.07415193939208985, 0.07414086151123046, 0.07393103790283204, 0.0755492172241211, 0.07393827056884765, 0.07430620574951172, 0.07503052520751953, 0.07466146850585938, 0.07433023834228515, 0.07410095977783203, 0.07471520233154297, 0.07444627380371094, 0.07638050842285156, 0.07524784088134766, 0.07445696258544922, 0.07414329528808594, 0.0747844467163086, 0.07528256225585937, 0.07398681640625, 0.07436003112792969, 0.07428377532958984, 0.07701302337646485, 0.07495033264160156, 0.07425651550292969, 0.07438098907470703, 0.07425804901123047, 0.0740495376586914, 0.07437926483154297, 0.07372799682617187, 0.0758497314453125, 0.07370457458496094, 0.07374732971191406, 0.07475609588623047, 0.07407939147949219, 0.07407814025878906, 0.07380655670166016, 0.07452272033691407, 0.07422370910644531, 0.07369728088378906, 0.07393689727783204, 0.07437926483154297, 0.07474176025390625, 0.07514514923095703, 0.07471858978271484, 0.07402470397949219, 0.07489836883544922, 0.07426048278808593, 0.07416146850585938, 0.07399040222167969, 0.07416790771484374, 0.07419785308837891, 0.07384678649902343, 0.07399833679199219, 0.07408201599121093, 0.0741495361328125, 0.07475878143310546, 0.0747540512084961, 0.07426982116699218, 0.07411183929443359, 0.07500393676757812, 0.07428415679931641, 0.07423065948486328, 0.0739532470703125, 0.07333875274658203, 0.07338336181640626, 0.07363817596435547, 0.07400902557373047, 0.07777836608886719, 0.07384524536132812, 0.07413766479492187, 0.0740986557006836, 0.0736993637084961, 0.07393484497070313, 0.07433971405029297, 0.07434697723388672, 0.07419715118408203, 0.07399542236328124, 0.07419084930419922, 0.0742158432006836, 0.07465414428710937, 0.07443199920654296, 0.07460457611083984, 0.07455996704101563, 0.07473766326904296, 0.0742848663330078, 0.07441824340820312, 0.07514476776123047, 0.07721836853027343, 0.0742336654663086, 0.07326787567138672, 0.07348429107666016, 0.07388678741455078, 0.07407046508789063, 0.07412992095947266, 0.07500800323486329, 0.0753477783203125, 0.07447321319580077, 0.07364153289794922, 0.07411801910400391, 0.07405101013183593, 0.0742130584716797, 0.0738372802734375, 0.07335478210449219, 0.07351062774658203, 0.07367362976074218, 0.07425433349609376, 0.07436601257324219, 0.07418707275390625, 0.07415408325195312, 0.07515097808837891, 0.07374531555175781, 0.07368425750732421, 0.07490207672119141, 0.07489113616943359, 0.0741911392211914, 0.07443641662597657, 0.0753420181274414, 0.07461273956298828, 0.0743353271484375, 0.07475087738037109, 0.07431168365478516, 0.07519369506835938, 0.07417513275146484, 0.07439974212646484, 0.07419904327392578, 0.07532479858398437, 0.07404608154296875, 0.07438745880126953, 0.0735373764038086, 0.07392886352539063, 0.07339328002929688, 0.07403183746337891, 0.07455760192871094, 0.07422108459472657, 0.07404182434082031, 0.07475199890136719, 0.07399833679199219, 0.07426662445068359, 0.07390322875976563, 0.07462137603759765, 0.07398854064941406, 0.07439564514160156, 0.07366041564941406, 0.07447862243652344, 0.07402185821533203, 0.07734473419189453, 0.07484623718261718, 0.07423385620117187, 0.07449766540527344, 0.074074462890625, 0.07517801666259766, 0.07525785827636719, 0.08058236694335938, 0.0787212142944336, 0.07431648254394531, 0.07423129272460938, 0.074276611328125, 0.07448371124267578, 0.0745068130493164, 0.0744587173461914, 0.07435939025878906, 0.07421878051757813, 0.07393746948242187, 0.07370972442626954, 0.07456095886230468, 0.07417817687988282, 0.07367266845703126, 0.07388668823242188, 0.07349043273925782, 0.07318732452392578, 0.07367884826660157, 0.07348799896240234, 0.07392294311523437, 0.07411711883544922, 0.07379702758789063, 0.07372032165527344, 0.07362569427490234, 0.07306985473632813, 0.0738741455078125, 0.07403916931152343, 0.07384835052490234, 0.07354838562011719, 0.07332454681396484, 0.07334083557128906, 0.07368224334716797, 0.07391107177734375, 0.07415798187255859, 0.07417046356201172, 0.0745164794921875, 0.07452262115478515, 0.07437337493896484, 0.074487548828125, 0.07420281219482422, 0.07395542144775391, 0.07523721313476563, 0.0742076187133789, 0.07411481475830078, 0.0741849594116211, 0.07440179443359375, 0.07623407745361328, 0.07419766235351563, 0.07392460632324219, 0.0740079345703125, 0.07380441284179687, 0.07499958038330078, 0.07404361724853516, 0.07352114868164063, 0.07349657440185547, 0.07306390380859375, 0.07362000274658204, 0.07332860565185546, 0.07363791656494141, 0.07379299163818359, 0.07372239685058593, 0.07357440185546875]",tokens/s,13.449279433666547,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 60, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,26452.04992,13903.003648,0.0,13507.756032,13505.835008,s,1,53.68265234375,53.68265234375,0.0,53.68265234375,53.68265234375,53.68265234375,53.68265234375,[53.68265234375],,kWh,0.0013594839654375126,0.00014995422307555558,0.0005131501327420018,0.00202258832125507,,MB,1217.015808,14796.3904,0.0,14380.171264,14175.648768,s,10,1.681370559692383,0.16813705596923828,0.0018570011091590974,0.16832992553710938,0.17007421112060547,0.17025881576538085,0.17040649948120118,"[0.16331564331054688, 0.16753407287597658, 0.1692528076171875, 0.1676363525390625, 0.16837705993652344, 0.16782252502441405, 0.16828279113769531, 0.17044342041015625, 0.17003318786621094, 0.16867269897460938]",tokens/s,1522.5673991034837,kWh,4.8628614256249915e-06,5.362798158514407e-07,3.2332248087999875e-06,8.632366050276419e-06,tokens/kWh,29655832.307042003,MB,1234.681856,14838.33344,0.0,14422.114304,14358.052352,s,10,59.112801757812505,5.9112801757812505,0.0234726679418505,5.9043562011718755,5.937619677734375,5.951708764648438,5.9629800341796875,"[5.93448876953125, 5.9657978515625, 5.8935556640625, 5.89496630859375, 5.8959716796875, 5.92111669921875, 5.8934638671875, 5.88408740234375, 5.91274072265625, 5.91661279296875]",tokens/s,10.657589917343708,kWh,0.00017206384317645744,1.8979360233535417e-05,0.0001142792025343999,0.0003053224059443928,tokens/kWh,206339.26227960468,,s,630,59.108717163085984,0.0938233605763269,0.0010554806569597194,0.0936074562072754,0.09468990707397461,0.095438130569458,0.09734189987182618,"[0.09622608184814453, 0.09662601470947266, 0.09388877105712891, 0.0938532485961914, 0.09452835083007813, 0.09446809387207031, 0.09410355377197266, 0.09430630493164062, 0.09533235168457031, 0.0963583984375, 0.09428118133544922, 0.09403852844238281, 0.0951644515991211, 0.09409471893310548, 0.09427168273925782, 0.0939012451171875, 0.09453772735595703, 0.09508364868164063, 0.09391798400878906, 0.0938025894165039, 0.09370829010009765, 0.09389164733886719, 0.09408608245849609, 0.09418508911132813, 0.09389699554443359, 0.09418761444091797, 0.09410530853271484, 0.09353209686279297, 0.09394620513916016, 0.09456006622314453, 0.09358560180664062, 0.09338050842285156, 0.09423881530761719, 0.09356185913085938, 0.09443571472167969, 0.09355068969726563, 0.09467715454101562, 0.09384384155273437, 0.09425920104980469, 0.0937676773071289, 0.09338470458984376, 0.09336627197265625, 0.09415376281738282, 0.09418851470947266, 0.09380249786376953, 0.09322700500488282, 0.09329571533203125, 0.09344035339355469, 0.0931681900024414, 0.09323929595947265, 0.09394790649414063, 0.09526067352294922, 0.09426124572753906, 0.09429401397705078, 0.0939375991821289, 0.0943839340209961, 0.09428607940673828, 0.09388751983642578, 0.09377616119384766, 0.09434591674804688, 0.09480397033691407, 0.09591584014892578, 0.0935323486328125, 0.09446806335449219, 0.09538969421386718, 0.09454329681396484, 0.09702047729492187, 0.09471743774414063, 0.10125945281982422, 0.09612105560302735, 0.09524214172363281, 0.09418482971191407, 0.0943373794555664, 0.0939012451171875, 0.09433235168457031, 0.09405091094970704, 0.0940052490234375, 0.09468883514404297, 0.09436409759521484, 0.09436723327636719, 0.09422016143798828, 0.09492697906494141, 0.09362188720703125, 0.09420652770996094, 0.09420217895507813, 0.09412326049804688, 0.09613523101806641, 0.0948410873413086, 0.0952713623046875, 0.09452537536621093, 0.0944865951538086, 0.09591193389892579, 0.09517056274414062, 0.09380863952636719, 0.09389814758300781, 0.09453628540039062, 0.09429548645019531, 0.09388809967041016, 0.09499337768554687, 0.09472605133056641, 0.09475827026367188, 0.09423741149902344, 0.09431017303466797, 0.09361392211914063, 0.09327638244628907, 0.09740064239501953, 0.09423088073730469, 0.09444761657714844, 0.0937185287475586, 0.0945469741821289, 0.09503337860107422, 0.09372767639160157, 0.09458483123779297, 0.09432784271240234, 0.09713558197021484, 0.09371647644042969, 0.09447158050537109, 0.09411196899414062, 0.09391142272949218, 0.09399235534667968, 0.09491107177734374, 0.09442713928222657, 0.0941813735961914, 0.09518045043945313, 0.09424726104736328, 0.09406259155273437, 0.09574921417236328, 0.09394464111328125, 0.09359919738769532, 0.09364339447021484, 0.09352336120605469, 0.0935955810546875, 0.09332393646240235, 0.09327378845214844, 0.09311468505859374, 0.09313279724121094, 0.09347650909423828, 0.09324527740478515, 0.09299404907226562, 0.0932105941772461, 0.09322908782958984, 0.09303244781494141, 0.09295667266845703, 0.09333103942871093, 0.09272918701171876, 0.09286643218994141, 0.09355948638916016, 0.09334374237060547, 0.09394790649414063, 0.09349056243896485, 0.09424140930175781, 0.09336585235595703, 0.0931024932861328, 0.0934583969116211, 0.09469955444335937, 0.09370355224609375, 0.09355942535400391, 0.09390694427490234, 0.09309184265136719, 0.09307071685791016, 0.09316802978515625, 0.09333782196044922, 0.09330806732177735, 0.09318895721435547, 0.09378121948242188, 0.09455286407470703, 0.09424076843261718, 0.0925880355834961, 0.09244041442871094, 0.09306537628173828, 0.0929443817138672, 0.09340518188476563, 0.09390489959716797, 0.09637068939208984, 0.09279078674316406, 0.0934823989868164, 0.09287535858154297, 0.09338470458984376, 0.09367756652832031, 0.09448448181152344, 0.09362022399902344, 0.09348915100097656, 0.09719808197021484, 0.09362432098388672, 0.09307667541503906, 0.09355347442626953, 0.09353536224365235, 0.0930226593017578, 0.09259846496582032, 0.0941588134765625, 0.09305276489257812, 0.09323945617675782, 0.09314412689208984, 0.09280745697021485, 0.09324610900878906, 0.09361817932128906, 0.09282559967041015, 0.09302377319335937, 0.09347734069824219, 0.09351372528076173, 0.09379014587402344, 0.09414252471923829, 0.09309120178222656, 0.09334770965576172, 0.094155517578125, 0.09390223693847656, 0.09406729888916016, 0.09404621124267579, 0.09752780914306641, 0.09374310302734375, 0.09367961883544922, 0.09403392028808594, 0.09351535797119141, 0.09330934143066406, 0.09300518035888672, 0.09322560119628906, 0.09313645172119141, 0.09581613159179687, 0.09304678344726562, 0.09311641693115234, 0.09321670532226563, 0.09294582366943359, 0.0932768325805664, 0.09650745391845703, 0.09281375885009766, 0.09281449890136718, 0.0932168960571289, 0.09322364807128906, 0.09285427093505859, 0.09273958587646484, 0.09305702209472656, 0.09297020721435546, 0.09327490997314453, 0.09287474822998047, 0.09342499542236328, 0.09320246124267578, 0.09381314849853516, 0.09316070556640625, 0.09390563201904296, 0.09365325164794921, 0.09316761779785156, 0.09343180847167969, 0.09367113494873047, 0.09317814636230469, 0.09344393920898438, 0.09406185913085938, 0.09366822052001954, 0.09350144195556641, 0.09436774444580077, 0.0934277114868164, 0.09443488311767578, 0.09344249725341797, 0.09380448150634765, 0.0937470703125, 0.0932371826171875, 0.09352825927734375, 0.09317298889160157, 0.09455193328857422, 0.09369900512695313, 0.09321782684326171, 0.09302870178222657, 0.09416153717041016, 0.0934645767211914, 0.09360492706298829, 0.0937008285522461, 0.09387849426269532, 0.09365299224853516, 0.09378157043457032, 0.09310662078857422, 0.0984247055053711, 0.09353836822509766, 0.0941937255859375, 0.09368316650390625, 0.09338448333740235, 0.09308601379394531, 0.09434925079345703, 0.09620326232910156, 0.09329459381103515, 0.09312620544433593, 0.09317011260986328, 0.0928786849975586, 0.09287875366210938, 0.09349097442626954, 0.09354287719726563, 0.09325894165039063, 0.09322579193115234, 0.09342361450195312, 0.09364457702636719, 0.09310844421386719, 0.09357926177978515, 0.09289113616943359, 0.09329049682617188, 0.09315500640869141, 0.09387245178222656, 0.09396633911132812, 0.0937000961303711, 0.09359910583496094, 0.09326860809326172, 0.09341334533691406, 0.09347894287109375, 0.09352601623535156, 0.0932042236328125, 0.0934238739013672, 0.09344409942626954, 0.09305907440185547, 0.09343590545654297, 0.0930561294555664, 0.09295756530761719, 0.092837890625, 0.09274534606933593, 0.09276201629638672, 0.09354227447509765, 0.09321433258056641, 0.09464643096923828, 0.09427027130126953, 0.09388646697998047, 0.09328598022460938, 0.093108642578125, 0.09358131408691406, 0.0935997085571289, 0.09409334564208985, 0.1065902099609375, 0.09362022399902344, 0.09439571380615235, 0.09365363311767579, 0.09374214172363281, 0.09366591644287109, 0.09395180511474609, 0.09372319793701171, 0.0935251235961914, 0.09298623657226562, 0.09413017272949219, 0.0936099853515625, 0.09432876586914063, 0.09343392181396484, 0.0937492446899414, 0.09355059051513671, 0.09401036834716797, 0.09404307556152344, 0.09396640014648437, 0.09385081481933594, 0.09379923248291015, 0.09415225219726563, 0.09382342529296875, 0.09396428680419922, 0.09418675231933593, 0.09334976196289063, 0.09350233459472657, 0.09395334625244141, 0.09409811401367188, 0.09376972961425781, 0.0936607666015625, 0.09334150695800782, 0.093787841796875, 0.09420278167724609, 0.09369583892822266, 0.09407504272460937, 0.09373004913330078, 0.09341795349121093, 0.09308134460449219, 0.09301990509033203, 0.09315408325195312, 0.09585868835449218, 0.09433087921142579, 0.09358745574951172, 0.09399501037597656, 0.09418752288818359, 0.09326182556152343, 0.09325542449951171, 0.09326412963867188, 0.09304473876953125, 0.09326182556152343, 0.09368370819091797, 0.09382297515869141, 0.0954777603149414, 0.09410765075683594, 0.09428355407714843, 0.09340898895263672, 0.10031568145751953, 0.09615360260009766, 0.0939163818359375, 0.09366812896728516, 0.09371238708496094, 0.09355264282226562, 0.09368339538574219, 0.09345260620117188, 0.09343590545654297, 0.09329459381103515, 0.09333548736572266, 0.09302960205078124, 0.09302716827392578, 0.09338841247558594, 0.09343801879882813, 0.09284754943847656, 0.09302278137207032, 0.09263340759277344, 0.09346662139892578, 0.09331423950195313, 0.09246514892578125, 0.09325833892822266, 0.09312483215332032, 0.09282355499267578, 0.0929106216430664, 0.0924167709350586, 0.09297062683105468, 0.09358601379394531, 0.09347071838378906, 0.09359564971923828, 0.09378345489501953, 0.09405856323242187, 0.09323948669433593, 0.0940670394897461, 0.09381385803222657, 0.09351465606689453, 0.09293004608154297, 0.09312854766845703, 0.09679468536376953, 0.09356905364990234, 0.09338681793212891, 0.0930709457397461, 0.09325775909423828, 0.09304307556152344, 0.09322700500488282, 0.09270454406738281, 0.09313862609863281, 0.09241654205322265, 0.09578233337402343, 0.09305760192871093, 0.09354576110839843, 0.09302671813964844, 0.09307552337646484, 0.09325145721435547, 0.09295500946044923, 0.0926740493774414, 0.09278463745117188, 0.09339647674560547, 0.0933473892211914, 0.09316242980957032, 0.09517056274414062, 0.09341929626464844, 0.09391702270507812, 0.09418470764160156, 0.09379097747802734, 0.09281126403808594, 0.09296281433105469, 0.09340694427490234, 0.09325596618652343, 0.0932489242553711, 0.0926009292602539, 0.09340518188476563, 0.09307734680175782, 0.09310428619384765, 0.09279488372802734, 0.09340239715576172, 0.09322569274902344, 0.09324748992919922, 0.09271910095214844, 0.09284947204589844, 0.09306947326660156, 0.09292649841308594, 0.09327942657470703, 0.09303327941894532, 0.09321842956542968, 0.09316105651855469, 0.09359849548339844, 0.09496166229248047, 0.09413836669921875, 0.09341133117675782, 0.09341133117675782, 0.09447334289550781, 0.09333631896972656, 0.09318617248535156, 0.09292390441894531, 0.09348710632324218, 0.09282755279541016, 0.0930137939453125, 0.09280915069580079, 0.09568895721435547, 0.0922108154296875, 0.09289078521728515, 0.0925192642211914, 0.09291522979736327, 0.0929755859375, 0.09308979034423828, 0.09552025604248048, 0.09345619201660156, 0.09287248229980469, 0.09314972686767578, 0.0931036148071289, 0.0930487060546875, 0.09340415954589844, 0.09345555114746094, 0.09333638763427735, 0.09393494415283203, 0.09314166259765624, 0.09349894714355468, 0.09361369323730469, 0.0933978271484375, 0.09444454193115234, 0.09372128295898438, 0.09331270599365235, 0.09475545501708985, 0.09415679931640625, 0.09369599914550782, 0.09425315093994141, 0.09310899353027344, 0.09285222625732421, 0.09264947509765625, 0.0930298843383789, 0.09324390411376954, 0.09373081970214844, 0.093189697265625, 0.09316121673583984, 0.09304338836669922, 0.09276150512695312, 0.09289174652099609, 0.09329177856445313, 0.09289952087402344, 0.09311283111572266, 0.09534233856201171, 0.09405471801757813, 0.09388851165771485, 0.09357926177978515, 0.09392127990722657, 0.0934169921875, 0.09340156555175781, 0.09382093048095703, 0.09365039825439453, 0.09317977905273438, 0.09302082824707031, 0.09626969909667969, 0.09349724578857421, 0.09315715026855469, 0.09313581085205078, 0.09640656280517577, 0.09479267120361329, 0.09401344299316407, 0.09448006439208985, 0.09425132751464843, 0.09396009826660157, 0.09398486328125, 0.09392947387695312, 0.09427942657470703, 0.09496601867675782, 0.09395574188232422, 0.09418582153320312, 0.09425299072265625, 0.09465782165527344, 0.0949271011352539, 0.09395362854003907, 0.09395449829101563, 0.09336268615722657, 0.0952279052734375, 0.09402982330322265, 0.09391420745849609, 0.09334877014160156, 0.09487503814697265, 0.09386418914794922, 0.09360406494140625, 0.09378828430175781, 0.09497510528564453, 0.09344403076171875, 0.09293714904785157, 0.09415679931640625, 0.09450476837158203, 0.09353759765625, 0.09324384307861328, 0.09402665710449219, 0.094455810546875, 0.09439231872558594, 0.09319219207763672, 0.09333299255371094, 0.09387059020996094, 0.0944736328125, 0.09355721282958984, 0.0933315200805664, 0.0942490234375, 0.0940475845336914, 0.09339151763916016, 0.09380863952636719, 0.09801312255859375, 0.09385171508789063, 0.09328844451904297, 0.094455810546875, 0.09448348999023437, 0.09398985290527344, 0.09355574035644532, 0.09694041442871094, 0.09403453063964844, 0.0937000961303711, 0.0930508804321289, 0.09442463684082031, 0.09329641723632813, 0.09425552368164063, 0.09358316802978515, 0.0941789779663086, 0.09346867370605469, 0.09319709014892578, 0.09413120269775391, 0.09377689361572265, 0.09331001281738281, 0.09403692626953125, 0.09321180725097657, 0.09389469146728516, 0.09385862731933593, 0.09461321258544922, 0.09370626831054688, 0.09395635223388672, 0.09309798431396485, 0.09452947235107421, 0.09375341033935547, 0.09371555328369141, 0.09332339477539063, 0.09502595520019531, 0.09410150146484375, 0.09406003570556641, 0.0942003173828125, 0.09452668762207031, 0.09318070220947265, 0.09284812927246094, 0.09406668853759766, 0.09377129364013671, 0.09295625305175781, 0.09378224182128907, 0.09535350036621094, 0.09318985748291016, 0.09323548889160156, 0.09321881866455078, 0.09288838195800782, 0.09301382446289062]",tokens/s,10.658326389689304,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 615, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 160, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 282, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,846.467072,565.116928,0.0,169.869312,150.669312,s,1,8.1319296875,8.1319296875,0.0,8.1319296875,8.1319296875,8.1319296875,8.1319296875,[8.1319296875],,kWh,2.2475241483327103e-05,2.4720796588808212e-06,8.348062234014852e-06,3.3295383376222775e-05,,MB,1151.246336,625.934336,0.0,209.7152,193.680384,s,11,0.1753775987625122,0.01594341806931929,0.0001830208789097381,0.01591209602355957,0.016218271255493164,0.016245919227600096,0.016268037605285644,"[0.016158304214477538, 0.01592643165588379, 0.015769344329833984, 0.015905728340148927, 0.015714240074157716, 0.015849472045898438, 0.01591209602355957, 0.015912799835205077, 0.01627356719970703, 0.016218271255493164, 0.015737343788146972]",tokens/s,16056.782735480887,kWh,4.7600317202966865e-07,5.2494784996865563e-08,1.9738234025121626e-07,7.258802972777505e-07,tokens/kWh,352675228.9049172,MB,1162.346496,628.031488,0.0,211.812352,193.682944,s,11,10.631011718749999,0.9664556107954545,0.004707955255336871,0.9647833862304688,0.9734203491210938,0.9755199279785156,0.9771995910644531,"[0.9612041015625, 0.963017578125, 0.9734203491210938, 0.9776195068359375, 0.9675733642578125, 0.9644152221679687, 0.9658329467773438, 0.9645062255859375, 0.962111328125, 0.9647833862304688, 0.9665277099609375]",tokens/s,65.18664623215027,kWh,2.7935242829482866e-05,3.08080831378856e-06,1.0652626339749065e-05,4.16686774830205e-05,tokens/kWh,1511927.0350174126,,s,693,10.624617526054392,0.015331338421434894,0.0002571975798281347,0.01528831958770752,0.015467743682861327,0.01559829750061035,0.016082920684814452,"[0.015103391647338867, 0.015395520210266113, 0.015293439865112305, 0.015240351676940917, 0.015201984405517578, 0.015198271751403808, 0.015108096122741698, 0.015095168113708497, 0.015278719902038574, 0.015388031959533692, 0.015181856155395508, 0.015217151641845703, 0.015165504455566406, 0.015199295997619629, 0.015212544441223145, 0.015178879737854005, 0.015255392074584961, 0.015431039810180664, 0.015291040420532226, 0.015262911796569825, 0.015321887969970704, 0.015157407760620117, 0.015150912284851074, 0.015194144248962402, 0.015167776107788087, 0.015210335731506347, 0.015195232391357422, 0.015143487930297852, 0.015628512382507325, 0.015151167869567871, 0.015534015655517578, 0.015282336235046387, 0.01513372802734375, 0.015101759910583497, 0.01520844841003418, 0.015121567726135253, 0.015132512092590332, 0.015108096122741698, 0.015118335723876953, 0.015128576278686523, 0.0151364164352417, 0.015270591735839844, 0.015163040161132813, 0.015118335723876953, 0.015151103973388673, 0.01518182373046875, 0.015159104347229004, 0.015199647903442384, 0.015176095962524415, 0.015278464317321777, 0.015263680458068848, 0.015758975982666016, 0.015300160408020019, 0.01531388759613037, 0.015300512313842773, 0.01562828826904297, 0.015278079986572265, 0.015233247756958009, 0.015353568077087402, 0.015361791610717773, 0.015433279991149903, 0.015382911682128906, 0.015280799865722656, 0.015210495948791505, 0.015159296035766602, 0.015250720024108887, 0.015338208198547363, 0.015213600158691406, 0.015199423789978028, 0.015144800186157227, 0.015251423835754395, 0.015199392318725587, 0.015217472076416015, 0.015230463981628419, 0.015234848022460938, 0.01510268783569336, 0.015616000175476074, 0.015124480247497558, 0.015689536094665526, 0.015327327728271485, 0.015276127815246583, 0.015232768058776856, 0.015247615814208984, 0.015183775901794434, 0.015189248085021973, 0.01519702434539795, 0.015193696022033692, 0.015118751525878906, 0.015143936157226562, 0.015176704406738281, 0.015152128219604492, 0.015148032188415527, 0.015408672332763673, 0.01533795166015625, 0.015219840049743652, 0.01514367961883545, 0.015323264122009277, 0.015318143844604492, 0.01525011157989502, 0.015300671577453613, 0.015187328338623046, 0.01519699192047119, 0.015296319961547851, 0.015244447708129882, 0.015260640144348145, 0.015245247840881347, 0.015386688232421874, 0.015284223556518555, 0.015187552452087402, 0.015194527626037598, 0.015306495666503906, 0.015263104438781738, 0.015407999992370606, 0.015248703956604003, 0.015440768241882324, 0.015291423797607421, 0.015257439613342284, 0.01530470371246338, 0.01524227237701416, 0.015463520050048828, 0.01570915222167969, 0.015294303894042969, 0.015346943855285644, 0.015250304222106933, 0.01532096004486084, 0.015720191955566405, 0.015302687644958497, 0.015285696029663086, 0.015344287872314453, 0.015351807594299317, 0.01532271957397461, 0.015348383903503418, 0.015326784133911133, 0.015396832466125488, 0.015339743614196778, 0.015394816398620606, 0.015266880035400391, 0.01608185577392578, 0.01529856014251709, 0.015191424369812011, 0.015264384269714356, 0.015263744354248047, 0.015278207778930663, 0.015333215713500976, 0.015315103530883788, 0.015312095642089844, 0.015286944389343261, 0.01548902416229248, 0.015381888389587402, 0.015315584182739258, 0.01534943962097168, 0.015308352470397949, 0.015481151580810548, 0.015294912338256837, 0.01534329605102539, 0.015288736343383789, 0.015376288414001465, 0.015435520172119141, 0.01525715160369873, 0.015421664237976074, 0.015407487869262696, 0.01533683204650879, 0.015309727668762207, 0.015243071556091309, 0.015234975814819337, 0.015222880363464355, 0.01521664047241211, 0.0167869758605957, 0.018315744400024415, 0.015548383712768555, 0.015445759773254394, 0.015501855850219726, 0.015332223892211914, 0.015397215843200683, 0.015251999855041503, 0.015357952117919921, 0.015422752380371094, 0.015357855796813966, 0.015769696235656737, 0.015634336471557618, 0.015490943908691407, 0.01540940761566162, 0.015311103820800781, 0.015823007583618164, 0.015400927543640136, 0.015328991889953613, 0.01531760025024414, 0.015333375930786132, 0.015277440071105958, 0.015350720405578614, 0.015233983993530273, 0.015158304214477539, 0.015282719612121583, 0.015655360221862792, 0.015238719940185547, 0.015257087707519532, 0.015783072471618653, 0.015260895729064942, 0.015220352172851562, 0.015248319625854493, 0.015294303894042969, 0.015829312324523927, 0.01815100860595703, 0.015614208221435546, 0.015381952285766602, 0.015341600418090821, 0.015431743621826172, 0.01586390399932861, 0.015642175674438475, 0.015995360374450682, 0.015628640174865722, 0.01547878360748291, 0.015581184387207032, 0.01526748752593994, 0.015352160453796386, 0.015394559860229492, 0.015476448059082031, 0.015362591743469238, 0.015264927864074707, 0.015239744186401368, 0.01563881587982178, 0.015378432273864746, 0.015230976104736327, 0.015322527885437011, 0.015204992294311524, 0.015257375717163086, 0.015228832244873047, 0.015299072265625, 0.015182847976684571, 0.015180704116821288, 0.015283743858337402, 0.015227231979370117, 0.015263903617858886, 0.015275744438171387, 0.015446271896362305, 0.0152674560546875, 0.015251711845397949, 0.015187968254089355, 0.015362048149108886, 0.015348832130432128, 0.015588319778442383, 0.015335359573364259, 0.015544320106506348, 0.015507007598876953, 0.015415712356567383, 0.015417216300964355, 0.015796607971191406, 0.01859971237182617, 0.01674569511413574, 0.015301440238952637, 0.015427552223205566, 0.015243264198303222, 0.015580127716064453, 0.015332159996032714, 0.015313247680664062, 0.015255328178405761, 0.015260831832885742, 0.01531766414642334, 0.015280320167541503, 0.015333375930786132, 0.015237088203430177, 0.015310879707336425, 0.015180800437927246, 0.015407999992370606, 0.015273311614990234, 0.015305536270141602, 0.01526576042175293, 0.015343615531921387, 0.015300288200378417, 0.015355648040771485, 0.015340096473693847, 0.01528217601776123, 0.015339584350585937, 0.015261119842529296, 0.015319711685180664, 0.015398752212524415, 0.015265791893005372, 0.015316927909851074, 0.01545248031616211, 0.015277695655822755, 0.015554847717285157, 0.01544332790374756, 0.015434240341186524, 0.015554176330566407, 0.015606111526489257, 0.015411231994628906, 0.015370207786560059, 0.015286304473876954, 0.015549759864807128, 0.015317279815673828, 0.01567372798919678, 0.015378432273864746, 0.015247360229492187, 0.015224255561828614, 0.015235039710998535, 0.015317279815673828, 0.015323455810546876, 0.015288288116455079, 0.015364031791687011, 0.015288415908813477, 0.015292415618896485, 0.015290047645568848, 0.015298879623413086, 0.015195391654968261, 0.01533414363861084, 0.015253503799438477, 0.015435775756835938, 0.015261695861816407, 0.015497311592102051, 0.015327136039733886, 0.01568563175201416, 0.015333279609680176, 0.015284128189086914, 0.015366239547729492, 0.015335488319396972, 0.015226431846618652, 0.015237407684326172, 0.015400896072387696, 0.01519983959197998, 0.015142911911010743, 0.015172351837158203, 0.01520400047302246, 0.015171584129333495, 0.015278079986572265, 0.015257087707519532, 0.01529203224182129, 0.015263872146606445, 0.015264512062072754, 0.015204352378845215, 0.015197952270507813, 0.015397024154663087, 0.015283807754516602, 0.015241632461547852, 0.015394751548767089, 0.015304415702819825, 0.015274656295776368, 0.01538428783416748, 0.015363743782043457, 0.015274432182312012, 0.0152740478515625, 0.015232928276062012, 0.015234880447387696, 0.015214783668518066, 0.015331487655639648, 0.015239007949829101, 0.015257599830627442, 0.015312383651733399, 0.0152542724609375, 0.015280991554260255, 0.015383456230163574, 0.015386015892028808, 0.015243871688842774, 0.015363295555114747, 0.015309503555297851, 0.015196255683898926, 0.01520035171508789, 0.015279871940612793, 0.015298720359802245, 0.015263936042785645, 0.015259200096130372, 0.01528384017944336, 0.01547283172607422, 0.015372672080993653, 0.015384639739990234, 0.015370240211486816, 0.01529036808013916, 0.01535110378265381, 0.015372960090637208, 0.015358016014099122, 0.015288064002990723, 0.01537660789489746, 0.015369376182556153, 0.015344703674316406, 0.015338335990905762, 0.015348320007324218, 0.015348064422607421, 0.01558035182952881, 0.01533420753479004, 0.015210847854614257, 0.015219679832458496, 0.015196864128112792, 0.015253631591796876, 0.015239040374755859, 0.015245311737060547, 0.015398624420166016, 0.015249631881713866, 0.015317055702209472, 0.015284416198730468, 0.015363871574401855, 0.015245344161987305, 0.015302656173706054, 0.015245599746704102, 0.015228639602661133, 0.01539891242980957, 0.015339103698730469, 0.015293919563293457, 0.015275168418884277, 0.015256671905517579, 0.015303359985351563, 0.015242752075195312, 0.015241567611694335, 0.015327391624450683, 0.015255552291870117, 0.015230976104736327, 0.015238143920898438, 0.015287551879882812, 0.015302528381347656, 0.015255423545837402, 0.015284223556518555, 0.015224639892578124, 0.015240960121154785, 0.015183712005615234, 0.015262304306030273, 0.015212544441223145, 0.01530031967163086, 0.015255840301513671, 0.015366144180297851, 0.015277600288391114, 0.015345600128173828, 0.015254048347473145, 0.015187968254089355, 0.015209535598754882, 0.015186783790588378, 0.015177439689636231, 0.015260191917419433, 0.015172608375549316, 0.015323904037475586, 0.015345696449279786, 0.015351200103759765, 0.015315615653991699, 0.015380319595336913, 0.015831199645996094, 0.01673040008544922, 0.015359711647033692, 0.01537382411956787, 0.015303232192993165, 0.015253472328186034, 0.015409119606018066, 0.015229215621948242, 0.01593929576873779, 0.015460351943969726, 0.015253536224365235, 0.01525545597076416, 0.015236960411071778, 0.015244383811950684, 0.015219807624816895, 0.015212320327758789, 0.015150272369384765, 0.015291232109069824, 0.015265248298645019, 0.01527660846710205, 0.01526912021636963, 0.015225567817687989, 0.015237119674682617, 0.015195679664611817, 0.015468799591064453, 0.015266016006469726, 0.015211872100830078, 0.015092384338378907, 0.01521459197998047, 0.015332991600036622, 0.01522316837310791, 0.015312352180480958, 0.015294560432434082, 0.015253503799438477, 0.015223360061645508, 0.015834848403930665, 0.015322943687438965, 0.015374688148498536, 0.015232159614562988, 0.01528115177154541, 0.015291584014892579, 0.015348352432250977, 0.015335455894470214, 0.015388671875, 0.015341535568237304, 0.01536841583251953, 0.015373663902282715, 0.01532156753540039, 0.015309951782226562, 0.015288288116455079, 0.015291296005249023, 0.015382528305053711, 0.015347647666931152, 0.015312992095947265, 0.015317055702209472, 0.015347583770751953, 0.015275424003601074, 0.015288031578063965, 0.015299712181091309, 0.015322912216186523, 0.015327263832092285, 0.01526576042175293, 0.015294464111328124, 0.015304736137390137, 0.01526576042175293, 0.015275615692138672, 0.015288736343383789, 0.015249407768249512, 0.015243264198303222, 0.015290271759033204, 0.015405152320861816, 0.015390239715576171, 0.01548755168914795, 0.01535654354095459, 0.01533743953704834, 0.015315072059631347, 0.015275584220886231, 0.015212896347045899, 0.01519820785522461, 0.015307007789611816, 0.015187711715698242, 0.015283488273620605, 0.015210240364074708, 0.015203295707702636, 0.015216768264770508, 0.015205280303955078, 0.015277055740356446, 0.015249664306640626, 0.015298208236694337, 0.015188032150268555, 0.015257599830627442, 0.01520639991760254, 0.015299967765808106, 0.015254143714904786, 0.015212800025939941, 0.01526144027709961, 0.015341567993164062, 0.015297696113586426, 0.015276896476745605, 0.015259743690490723, 0.015194016456604004, 0.015218688011169433, 0.01516147232055664, 0.015233920097351075, 0.015219136238098145, 0.015370816230773925, 0.01543887996673584, 0.0154552001953125, 0.015388671875, 0.015357952117919921, 0.015309151649475097, 0.01552451229095459, 0.015200703620910645, 0.01516220760345459, 0.015133472442626953, 0.015229887962341308, 0.015287360191345216, 0.015164383888244629, 0.015305888175964356, 0.015323871612548829, 0.015285408020019531, 0.015377568244934083, 0.015217632293701171, 0.015239999771118165, 0.015161343574523926, 0.015142687797546387, 0.015206111907958984, 0.015211008071899413, 0.01523311996459961, 0.015154208183288574, 0.015189984321594237, 0.015245408058166503, 0.015223615646362305, 0.015219903945922852, 0.01552188777923584, 0.015241855621337891, 0.015517696380615235, 0.015749183654785157, 0.01549715232849121, 0.0153121919631958, 0.015284000396728515, 0.015211135864257812, 0.015243328094482422, 0.0151976318359375, 0.015285087585449218, 0.015290111541748046, 0.015265983581542969, 0.015322463989257813, 0.01535638427734375, 0.015359647750854492, 0.015432224273681641, 0.015427359580993652, 0.015235296249389648, 0.015407103538513184, 0.015484928131103515, 0.01528831958770752, 0.015246560096740722, 0.015372096061706543, 0.015245951652526856, 0.015329664230346679, 0.015170623779296875, 0.015221664428710938, 0.015239232063293457, 0.015269824028015137, 0.015265791893005372, 0.015238719940185547, 0.015241663932800293, 0.015267935752868653, 0.015184991836547852, 0.01529535961151123, 0.015338848114013672, 0.015387231826782227, 0.015308032035827637, 0.015554911613464355, 0.015314720153808593, 0.01528876781463623, 0.015241408348083496, 0.015170559883117676, 0.015227904319763183, 0.015229984283447266, 0.015166432380676269, 0.015221887588500977, 0.015211487770080566, 0.015343520164489746, 0.01528217601776123, 0.015474687576293946, 0.01535763168334961, 0.015224479675292968, 0.015212800025939941, 0.015182239532470703, 0.015497440338134766, 0.015400383949279785, 0.015298944473266601, 0.015215840339660645, 0.015211168289184571, 0.01523862361907959, 0.015265983581542969, 0.0152478084564209, 0.015339103698730469, 0.015108096122741698, 0.015294719696044921, 0.015593088150024415, 0.015335552215576172, 0.015292415618896485, 0.015281344413757324, 0.015204480171203613, 0.015273951530456542, 0.01530339241027832, 0.015259231567382812, 0.015214143753051758, 0.015196288108825683, 0.015237536430358887, 0.015300928115844726, 0.015300831794738769, 0.015206175804138184, 0.015239168167114257, 0.015196160316467285, 0.015265439987182617, 0.015298303604125976, 0.015200863838195801, 0.015226335525512695, 0.01529695987701416, 0.015374719619750976, 0.015394304275512695, 0.01536451244354248, 0.015304448127746582, 0.01609516716003418, 0.01540828800201416, 0.01537337589263916, 0.015363903999328613, 0.015361472129821778, 0.015288991928100586, 0.015210399627685547, 0.01530070400238037, 0.01517363166809082, 0.01527830410003662, 0.015282976150512695, 0.015293439865112305, 0.01520639991760254, 0.01520639991760254, 0.01528217601776123, 0.015295488357543945, 0.015311871528625488, 0.015335424423217774, 0.015429632186889648, 0.015471839904785156, 0.015339391708374023, 0.015389599800109864, 0.015531007766723632, 0.015393759727478027, 0.015267295837402344, 0.015368351936340332, 0.01537712001800537, 0.015372063636779785, 0.015378335952758788, 0.01529036808013916, 0.01548697566986084, 0.015454208374023438, 0.015383808135986329, 0.015480640411376954, 0.015514143943786621, 0.015411616325378418]",tokens/s,65.2258773834051,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,9917.075456,6193.872896,0.0,5798.62528,5404.427264,s,1,21.5315625,21.5315625,0.0,21.5315625,21.5315625,21.5315625,21.5315625,[21.5315625],,kWh,0.0004143135149416745,4.569452913317382e-05,0.0001556220689419574,0.0006156301130168057,,MB,5705.5232,6493.765632,0.0,6077.546496,5755.124736,s,10,1.47804443359375,0.14780444335937498,0.001349333772418473,0.14716375732421877,0.15014812927246093,0.15027644958496092,0.15037910583496092,"[0.14720985412597656, 0.14711766052246095, 0.14669081115722657, 0.14700889587402344, 0.15040476989746093, 0.15011961364746093, 0.14687986755371094, 0.14865615844726562, 0.14744854736328125, 0.14650825500488282]",tokens/s,1732.0182951303836,kWh,4.3325030819245035e-06,4.777992596634437e-07,2.6351655068235386e-06,7.4454678484114864e-06,tokens/kWh,34383332.94993926,MB,5709.623296,6495.862784,0.0,6079.643648,5755.127296,s,10,89.78707128906251,8.978707128906251,0.01121965840479262,8.97895068359375,8.993672851562499,8.9947978515625,8.9956978515625,"[8.971361328125, 8.9959228515625, 8.9616923828125, 8.972267578125, 8.963349609375, 8.975248046875, 8.9826533203125, 8.9934228515625, 8.9868359375, 8.9843173828125]",tokens/s,7.016600396417474,kWh,0.00026102251405099374,2.8792178177252373e-05,0.00011523787323597876,0.00040505256546422485,tokens/kWh,155535.36842260612,,s,630,89.78394540405283,0.14251419905405197,0.0011855416921898443,0.1422651824951172,0.1436530029296875,0.1447293182373047,0.14692891906738284,"[0.14134521484375, 0.14241334533691405, 0.14179385375976564, 0.14496809387207032, 0.142129150390625, 0.1414185028076172, 0.14166835021972657, 0.14118655395507812, 0.14231398010253907, 0.14118655395507812, 0.14086604309082032, 0.1428683776855469, 0.14226220703125, 0.14250408935546874, 0.14135200500488282, 0.14323155212402344, 0.14167266845703125, 0.14166207885742188, 0.14345449829101561, 0.1424199676513672, 0.14226226806640624, 0.14183180236816406, 0.1427439422607422, 0.14236262512207032, 0.143067138671875, 0.1419325408935547, 0.1421859130859375, 0.1416730194091797, 0.14135891723632812, 0.14240077209472657, 0.14203298950195312, 0.14160368347167968, 0.14267964172363282, 0.14249002075195313, 0.14245887756347655, 0.14223490905761718, 0.14221151733398438, 0.14314271545410157, 0.14704832458496095, 0.1432493438720703, 0.14182371520996093, 0.14184364318847656, 0.14243113708496094, 0.143300537109375, 0.14393437194824218, 0.14280908203125, 0.14276991271972655, 0.1419983367919922, 0.14220611572265626, 0.144903076171875, 0.14223149108886718, 0.14202470397949218, 0.14189523315429686, 0.14284841918945312, 0.14200218200683593, 0.14193238830566407, 0.14225961303710938, 0.14237965393066407, 0.1421600341796875, 0.14207708740234376, 0.14354515075683594, 0.14217219543457033, 0.1418050231933594, 0.1427392578125, 0.14313221740722656, 0.14239590454101564, 0.14365855407714845, 0.14242247009277345, 0.14196531677246094, 0.1421332550048828, 0.14274124145507813, 0.14196762084960937, 0.1436708221435547, 0.14298570251464843, 0.1423155212402344, 0.14266983032226563, 0.14352102661132812, 0.14223231506347656, 0.14221693420410156, 0.14225392150878907, 0.14312867736816406, 0.14316989135742186, 0.15363072204589845, 0.14231884765625, 0.14323583984375, 0.14264643859863282, 0.14320025634765626, 0.144763427734375, 0.14331111145019532, 0.14322285461425782, 0.14287443542480469, 0.14313285827636718, 0.14240765380859374, 0.14236265563964845, 0.1430241241455078, 0.14288076782226564, 0.1428225555419922, 0.14268208312988281, 0.14255174255371095, 0.14218873596191406, 0.14238674926757813, 0.14303890991210938, 0.14195916748046875, 0.14212506103515626, 0.14212506103515626, 0.1416663055419922, 0.14186700439453126, 0.144787109375, 0.142420166015625, 0.14173977661132814, 0.14163980102539062, 0.14178536987304688, 0.14258586120605468, 0.14341325378417968, 0.14264524841308593, 0.14243161010742186, 0.1411405487060547, 0.14188064575195314, 0.14663658142089844, 0.14173898315429687, 0.1419939880371094, 0.14185061645507813, 0.14236854553222655, 0.1415554504394531, 0.1417732849121094, 0.14147975158691406, 0.14230323791503907, 0.14223974609375, 0.1411558074951172, 0.14223004150390625, 0.14207327270507814, 0.14216560363769531, 0.14205027770996093, 0.14237490844726564, 0.142781982421875, 0.1424054718017578, 0.1421334686279297, 0.1420413818359375, 0.14189785766601562, 0.1416841278076172, 0.14102383422851564, 0.1437071990966797, 0.14348793029785156, 0.142556640625, 0.14186732482910155, 0.14165373229980469, 0.141338623046875, 0.1417150421142578, 0.14203776550292968, 0.14275935363769532, 0.14355506896972656, 0.1422025604248047, 0.14196534729003907, 0.14192076110839844, 0.1418419189453125, 0.14187776184082032, 0.1422143096923828, 0.1418411865234375, 0.14212921142578125, 0.1414776611328125, 0.141650146484375, 0.14189292907714843, 0.142072509765625, 0.14228866577148438, 0.1423784942626953, 0.1422587890625, 0.142084228515625, 0.1422274627685547, 0.1416273956298828, 0.14178028869628906, 0.14337094116210938, 0.14312200927734375, 0.14254531860351563, 0.14219378662109375, 0.1413207092285156, 0.1417322235107422, 0.14160018920898437, 0.145574462890625, 0.14149171447753905, 0.1413695068359375, 0.14125244140625, 0.141366943359375, 0.14245269775390626, 0.1428942108154297, 0.14099542236328125, 0.14244137573242188, 0.14128128051757813, 0.14891183471679686, 0.1424878387451172, 0.14263066101074218, 0.14265171813964844, 0.14244645690917968, 0.14208428955078126, 0.1419489288330078, 0.14205337524414063, 0.14207180786132811, 0.14155282592773438, 0.14164012145996094, 0.14220892333984375, 0.1420042266845703, 0.1416053466796875, 0.14143693542480468, 0.14138291931152344, 0.14154733276367187, 0.1414606475830078, 0.14250985717773437, 0.14267596435546875, 0.14213909912109374, 0.14168707275390624, 0.1415509490966797, 0.14199197387695311, 0.14380633544921875, 0.1418798370361328, 0.1418918151855469, 0.14246633911132814, 0.14128201293945314, 0.14181283569335937, 0.14135081481933592, 0.14136627197265625, 0.1426507568359375, 0.14224038696289062, 0.1422704620361328, 0.14194688415527343, 0.14238021850585938, 0.14143161010742186, 0.1414817352294922, 0.14322099304199218, 0.14174412536621095, 0.14233798217773438, 0.142291015625, 0.14206259155273437, 0.14246194458007813, 0.1421946258544922, 0.14306918334960939, 0.14274771118164062, 0.14322006225585937, 0.14243612670898437, 0.1448536376953125, 0.1428625946044922, 0.14649754333496093, 0.1442586212158203, 0.14246524047851564, 0.14287484741210937, 0.14259814453125, 0.1425919952392578, 0.14226809692382814, 0.14502085876464843, 0.14365528869628907, 0.14260018920898437, 0.1421721649169922, 0.14199562072753907, 0.1439129638671875, 0.14212300109863282, 0.14336614990234375, 0.1419502410888672, 0.1436021728515625, 0.1432674560546875, 0.14241670227050782, 0.14204617309570314, 0.14170195007324218, 0.14311013793945312, 0.14215081787109374, 0.14433775329589843, 0.14168006896972657, 0.14392787170410157, 0.141653564453125, 0.141666748046875, 0.1435299835205078, 0.14171955871582032, 0.14267391967773438, 0.14171136474609375, 0.14183424377441406, 0.14147378540039063, 0.14132211303710937, 0.14205349731445313, 0.14193814086914064, 0.14225836181640625, 0.14151100158691407, 0.1417072296142578, 0.141343994140625, 0.1444639434814453, 0.144005859375, 0.14193804931640625, 0.14243084716796875, 0.14164991760253906, 0.14239334106445312, 0.1416417236328125, 0.14236058044433594, 0.14315110778808593, 0.14299545288085938, 0.14764031982421874, 0.14202879333496093, 0.14308665466308593, 0.14206396484375, 0.14126345825195313, 0.14206108093261718, 0.14197398376464843, 0.14237008666992187, 0.14147625732421876, 0.14181581115722655, 0.14120787048339845, 0.14112553405761719, 0.14191421508789062, 0.14099623107910156, 0.14472434997558595, 0.14134681701660157, 0.14157414245605468, 0.14102528381347657, 0.14148786926269533, 0.14175975036621094, 0.1421068115234375, 0.14250425720214843, 0.14130227661132813, 0.1414430694580078, 0.14163148498535155, 0.1438756103515625, 0.14266438293457032, 0.14169929504394532, 0.1417431640625, 0.14346949768066405, 0.1440498809814453, 0.1420572509765625, 0.14242367553710938, 0.14241007995605467, 0.14312620544433594, 0.14517744445800781, 0.14146070861816407, 0.14273411560058594, 0.14152088928222656, 0.142077880859375, 0.14199404907226562, 0.14252032470703124, 0.14284384155273439, 0.1423289337158203, 0.1427404479980469, 0.14152499389648437, 0.14282281494140625, 0.14268048095703126, 0.14308348083496095, 0.1432855987548828, 0.1421762237548828, 0.14215606689453125, 0.14236735534667969, 0.14269850158691405, 0.14261453247070313, 0.14257151794433592, 0.14317945861816406, 0.141998046875, 0.14250172424316407, 0.14495989990234376, 0.14258367919921874, 0.14276016235351563, 0.1430546875, 0.14315740966796875, 0.14191194152832032, 0.14145138549804687, 0.1420185546875, 0.14193049621582032, 0.142376953125, 0.14195712280273437, 0.1430768280029297, 0.1422380828857422, 0.14247100830078124, 0.14123992919921874, 0.14183084106445312, 0.14171749877929687, 0.14185430908203125, 0.1422683563232422, 0.14164373779296874, 0.14119168090820314, 0.14080613708496093, 0.14169491577148438, 0.1418363494873047, 0.14173590087890625, 0.14792643737792968, 0.1417628173828125, 0.14161456298828126, 0.14129244995117188, 0.14224461364746094, 0.14185894775390626, 0.1425569305419922, 0.14178317260742188, 0.1418997802734375, 0.14170317077636718, 0.14523187255859374, 0.14171955871582032, 0.14127923583984375, 0.14205746459960938, 0.1426321258544922, 0.1449451446533203, 0.14163848876953125, 0.1418014373779297, 0.1417523193359375, 0.1434066925048828, 0.14441696166992188, 0.14159062194824218, 0.14126092529296874, 0.14123353576660155, 0.1417222442626953, 0.1420221405029297, 0.14159635925292968, 0.14280111694335937, 0.14160751342773437, 0.1418318328857422, 0.14127488708496094, 0.14254893493652343, 0.14394026184082032, 0.14210588073730468, 0.14262109375, 0.14141676330566405, 0.14134413146972657, 0.1411273956298828, 0.1422181396484375, 0.1421721649169922, 0.14231756591796876, 0.14349241638183594, 0.14252703857421875, 0.14211903381347657, 0.14507615661621093, 0.14330201721191407, 0.1433504638671875, 0.1430160675048828, 0.14413792419433594, 0.1425531768798828, 0.1422514953613281, 0.14230348205566407, 0.14301011657714843, 0.1456681671142578, 0.14359539794921874, 0.14436979675292969, 0.14271078491210937, 0.1429679412841797, 0.14217100524902343, 0.14280455017089844, 0.14206108093261718, 0.1425396728515625, 0.14342710876464843, 0.14219644165039064, 0.142629638671875, 0.14272306823730468, 0.14365866088867188, 0.14273638916015624, 0.14347775268554688, 0.1431183319091797, 0.14240972900390625, 0.14194793701171876, 0.14212556457519532, 0.1420333709716797, 0.14244224548339843, 0.1428372802734375, 0.1423585205078125, 0.14193539428710938, 0.14226988220214845, 0.1452999725341797, 0.14248098754882813, 0.14214793395996095, 0.143931396484375, 0.14421612548828125, 0.14473338317871093, 0.14275616455078124, 0.14187161254882813, 0.14225340270996092, 0.14242422485351564, 0.14493717956542967, 0.14243458557128907, 0.14249481201171876, 0.14249871826171875, 0.14243983459472656, 0.14213711547851562, 0.1430187225341797, 0.14419363403320312, 0.1422142791748047, 0.1430414123535156, 0.14248089599609376, 0.14250137329101562, 0.14358377075195314, 0.14272149658203126, 0.14343782043457032, 0.14219059753417967, 0.14213046264648438, 0.1421543426513672, 0.1423538818359375, 0.14239811706542968, 0.142202880859375, 0.14313641357421875, 0.14263536071777344, 0.14257122802734376, 0.14627049255371094, 0.14284092712402344, 0.1420914306640625, 0.14254464721679688, 0.14304013061523438, 0.14215000915527343, 0.14156390380859374, 0.141844482421875, 0.14197760009765625, 0.14252236938476562, 0.14577186584472657, 0.14148880004882813, 0.14272848510742187, 0.14234083557128907, 0.14184640502929688, 0.1421510009765625, 0.14221597290039062, 0.14287411499023436, 0.143077880859375, 0.14142416381835937, 0.14149014282226563, 0.14245120239257814, 0.14240339660644533, 0.14234828186035156, 0.14223744201660157, 0.14307801818847657, 0.14231446838378906, 0.14207472229003906, 0.1434582061767578, 0.14251596069335937, 0.14241993713378906, 0.14239578247070311, 0.1429811248779297, 0.14165402221679688, 0.1421917724609375, 0.14426109313964844, 0.14200694274902342, 0.14221539306640624, 0.1429667205810547, 0.144872802734375, 0.14549066162109375, 0.14206565856933595, 0.1434207305908203, 0.14279750061035157, 0.14275788879394533, 0.14442425537109374, 0.14344053649902344, 0.14227459716796875, 0.14186087036132813, 0.14254197692871093, 0.14316566467285155, 0.14255783081054688, 0.1426117706298828, 0.14366586303710938, 0.14260223388671875, 0.14171676635742186, 0.14248419189453124, 0.14324327087402344, 0.14216397094726563, 0.1426241912841797, 0.14310588073730468, 0.14214620971679687, 0.14230514526367188, 0.1423771514892578, 0.1427671661376953, 0.14179014587402344, 0.1420977325439453, 0.14319276428222658, 0.1428868865966797, 0.1450449523925781, 0.1423797149658203, 0.14217613220214845, 0.14191346740722657, 0.14156454467773438, 0.14365286254882811, 0.14179122924804688, 0.1415557098388672, 0.14189567565917968, 0.14240562438964843, 0.14185218811035155, 0.14373001098632812, 0.1425408935546875, 0.1422689971923828, 0.14194464111328126, 0.1416992645263672, 0.14249903869628905, 0.14230335998535157, 0.1432930908203125, 0.1416110076904297, 0.14160076904296875, 0.14162944030761718, 0.14145436096191405, 0.14149935913085937, 0.14232984924316405, 0.14313827514648436, 0.14136358642578126, 0.14129168701171874, 0.14291763305664062, 0.14163740539550781, 0.14185084533691406, 0.14154054260253907, 0.1426164855957031, 0.14229530334472656, 0.1419204406738281, 0.142018310546875, 0.14759738159179686, 0.1419639434814453, 0.14228274536132812, 0.14365426635742187, 0.1430946807861328, 0.14268821716308594, 0.14197673034667968, 0.1414617919921875, 0.14543228149414061, 0.15279983520507812, 0.14431996154785157, 0.14248194885253906, 0.1415017547607422, 0.141935302734375, 0.14375730895996094, 0.14200218200683593, 0.14151632690429689, 0.14208000183105468, 0.14178761291503905, 0.1415302734375, 0.1416343994140625, 0.14259365844726563, 0.14243247985839844, 0.14178466796875, 0.14330857849121093, 0.14184938049316406, 0.14194073486328124, 0.14270054626464843, 0.14197555541992188, 0.14163699340820313, 0.14190861511230468, 0.1428152618408203, 0.142034912109375, 0.1454571533203125, 0.14314227294921875, 0.14184512329101562, 0.1420384979248047, 0.144046630859375]",tokens/s,7.01684468381095,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1885.319168,1057.947648,0.0,662.700032,622.833664,s,1,9.176845703125,9.176845703125,0.0,9.176845703125,9.176845703125,9.176845703125,9.176845703125,[9.176845703125],,kWh,5.983104099579274e-05,6.592579648255514e-06,2.243474016999647e-05,8.885836081404472e-05,,MB,1932.9024,1181.679616,0.0,765.46048,735.57504,s,10,0.5673114814758301,0.05673114814758301,0.0006719552344458886,0.056575199127197266,0.05756223526000977,0.057724654388427735,0.05785458969116211,"[0.05568246459960938, 0.05743948745727539, 0.05628220748901367, 0.0578870735168457, 0.056626686096191405, 0.0559666862487793, 0.056914398193359375, 0.056462623596191405, 0.05652371215820313, 0.05752614212036133]",tokens/s,4512.512232857158,kWh,1.6440276262641052e-06,1.8130722293314727e-07,7.553095930112511e-07,2.5806444422085035e-06,tokens/kWh,99200027.64151283,MB,1937.088512,1194.262528,0.0,778.043392,751.3984,s,10,35.474713867187496,3.54747138671875,0.007734135746039402,3.5455054931640624,3.55517255859375,3.5604150146484375,3.5646089794921876,"[3.541433837890625, 3.544659912109375, 3.554007568359375, 3.53791064453125, 3.565657470703125, 3.54635107421875, 3.541282470703125, 3.5521962890625, 3.54877880859375, 3.542435791015625]",tokens/s,17.75912844170172,kWh,0.00010247511134373794,1.130311947542708e-05,4.0495640074390033e-05,0.00015427387089355504,tokens/kWh,408364.68051980337,,s,630,35.46810222244262,0.05629857495625813,0.0007064865954460904,0.0562174072265625,0.05679550514221192,0.05721389503479004,0.05880051448822022,"[0.05526563262939453, 0.05572528076171875, 0.05762736129760742, 0.056774879455566404, 0.056012641906738284, 0.05588940811157227, 0.05678540802001953, 0.056677440643310546, 0.05599878311157227, 0.056285152435302736, 0.05600102233886719, 0.05904579162597656, 0.05614412689208984, 0.057524223327636716, 0.057877727508544925, 0.0562861442565918, 0.05600649642944336, 0.055981632232666015, 0.05604191970825195, 0.05592268753051758, 0.05595340728759766, 0.05602479934692383, 0.055433662414550784, 0.055285633087158205, 0.05556835174560547, 0.056395774841308595, 0.05536547088623047, 0.05553308868408203, 0.055761505126953125, 0.05616249465942383, 0.05604131317138672, 0.05590156936645508, 0.05593363189697265, 0.05576512145996094, 0.055772319793701175, 0.055417022705078124, 0.05543993759155273, 0.05603263854980469, 0.05643686294555664, 0.05625692749023437, 0.055820289611816405, 0.055640064239501956, 0.05687686538696289, 0.05662464141845703, 0.055684799194335936, 0.0555445442199707, 0.055936897277832034, 0.056560222625732424, 0.05610886383056641, 0.056264129638671875, 0.055823104858398434, 0.05585036849975586, 0.05596614456176758, 0.05609062576293945, 0.05607628631591797, 0.05652684783935547, 0.05629536056518555, 0.05670691299438477, 0.05638780975341797, 0.05668044662475586, 0.05739708709716797, 0.056750431060791015, 0.05677587127685547, 0.05590249633789063, 0.05670064163208008, 0.05655094528198242, 0.0565968017578125, 0.05629465484619141, 0.0572545280456543, 0.056054080963134766, 0.056063201904296874, 0.05610134506225586, 0.056037216186523436, 0.05608607864379883, 0.060066688537597654, 0.05804032135009766, 0.056575679779052736, 0.056258846282958984, 0.055932960510253905, 0.056559616088867185, 0.05605971145629883, 0.0559617919921875, 0.05638265609741211, 0.056288063049316404, 0.05572544097900391, 0.055569023132324216, 0.055569984436035155, 0.05528639984130859, 0.05545964813232422, 0.05568921661376953, 0.05630752182006836, 0.05616966247558594, 0.05626544189453125, 0.05573007965087891, 0.056004543304443356, 0.05686489486694336, 0.05598998260498047, 0.05576870346069336, 0.05581727981567383, 0.05618678283691406, 0.05610691070556641, 0.05641020965576172, 0.05595091247558594, 0.05601443099975586, 0.056099681854248046, 0.056094688415527345, 0.056428577423095705, 0.05631292724609375, 0.05634121704101563, 0.05629990386962891, 0.056749855041503906, 0.056369182586669925, 0.056411903381347654, 0.057159934997558594, 0.056155807495117185, 0.05706159973144531, 0.056419681549072266, 0.05627328109741211, 0.056244640350341796, 0.05615734481811523, 0.055769214630126955, 0.05592160034179688, 0.05559888076782227, 0.05556803131103515, 0.05586956787109375, 0.056053600311279296, 0.05600531387329102, 0.05574431991577149, 0.055769088745117185, 0.05882166290283203, 0.056545761108398436, 0.056116767883300785, 0.05613881683349609, 0.05610432052612305, 0.05621334457397461, 0.056148670196533204, 0.05633638381958008, 0.05575299072265625, 0.055780223846435543, 0.055712608337402346, 0.05591839981079102, 0.05614400100708008, 0.056256065368652346, 0.05762911987304688, 0.057290912628173825, 0.05648169708251953, 0.05640758514404297, 0.05631206512451172, 0.056793312072753906, 0.05623756790161133, 0.05622809600830078, 0.056219806671142576, 0.05657379150390625, 0.05657596969604492, 0.05746051025390625, 0.05660675048828125, 0.05679766464233398, 0.05663948822021484, 0.056446975708007815, 0.05671446228027344, 0.056539936065673826, 0.05677081680297852, 0.05677155303955078, 0.056580894470214846, 0.056825599670410155, 0.057229217529296876, 0.05709865570068359, 0.05769209671020508, 0.05600604629516601, 0.05650697708129883, 0.05630361557006836, 0.0571146240234375, 0.05543945693969726, 0.05572099304199219, 0.0559788818359375, 0.05596521759033203, 0.055900768280029295, 0.056446369171142576, 0.05781142425537109, 0.05669907379150391, 0.05601670455932617, 0.0556844482421875, 0.05567299270629883, 0.055502689361572266, 0.056119968414306644, 0.05571964645385742, 0.05602102279663086, 0.05617625427246094, 0.05611119842529297, 0.0563485107421875, 0.055932960510253905, 0.055539840698242186, 0.05535878372192383, 0.05522211074829102, 0.0558616943359375, 0.056254814147949216, 0.0560722541809082, 0.05567273712158203, 0.055814239501953126, 0.05584076690673828, 0.055357440948486325, 0.05652070236206055, 0.05643264007568359, 0.05650636672973633, 0.05874873733520508, 0.056953025817871095, 0.05687705612182617, 0.056209247589111326, 0.05631129455566406, 0.05652492904663086, 0.05611775970458984, 0.05657193756103516, 0.05633865737915039, 0.05629884719848633, 0.05625062561035156, 0.056323486328125, 0.05598287963867187, 0.056257919311523436, 0.055758560180664066, 0.05594345474243164, 0.05583536148071289, 0.05569910430908203, 0.056172576904296875, 0.055862560272216796, 0.056470462799072266, 0.056027137756347656, 0.05603033447265625, 0.056021888732910155, 0.05616857528686523, 0.05575811386108399, 0.05590095901489258, 0.056156158447265625, 0.05593683242797851, 0.055777278900146485, 0.05621500778198242, 0.05631817626953125, 0.05606361770629883, 0.05656643295288086, 0.05600259017944336, 0.05608857727050781, 0.056051712036132816, 0.05591603088378906, 0.05632275390625, 0.05560710525512695, 0.05568022537231445, 0.05567567825317383, 0.05541616058349609, 0.055892478942871096, 0.05602278518676758, 0.05629510498046875, 0.05843628692626953, 0.056631488800048826, 0.05613116836547852, 0.05656326293945312, 0.05694550323486328, 0.05689984130859375, 0.0567825927734375, 0.05654732894897461, 0.05663875198364258, 0.0566952018737793, 0.056377281188964845, 0.056420703887939454, 0.05641836929321289, 0.056534591674804686, 0.056103199005126954, 0.05613087844848633, 0.05640047836303711, 0.05628863906860351, 0.05611536026000977, 0.056594688415527346, 0.05580563354492187, 0.055970558166503905, 0.05670297622680664, 0.05624745559692383, 0.06635810852050782, 0.05611110305786133, 0.05620956802368164, 0.05604502487182617, 0.0560316162109375, 0.056061183929443356, 0.057243457794189455, 0.05599942398071289, 0.056051712036132816, 0.05630265426635742, 0.055796417236328125, 0.055549312591552734, 0.05530713653564453, 0.05586329650878906, 0.05531238555908203, 0.05679017639160156, 0.05628195190429688, 0.056387008666992186, 0.05617523193359375, 0.05634799957275391, 0.05650908660888672, 0.056772415161132815, 0.05601705551147461, 0.05602659225463867, 0.056449535369873044, 0.056545280456542966, 0.05661491012573242, 0.056635265350341794, 0.05638361740112305, 0.056506591796875, 0.056403743743896485, 0.06072662353515625, 0.05719516754150391, 0.05693993759155273, 0.056662113189697265, 0.0569126091003418, 0.0564890251159668, 0.056703071594238284, 0.056652000427246094, 0.05623235321044922, 0.05613302230834961, 0.055217952728271485, 0.05583647918701172, 0.05610902404785156, 0.05637129592895508, 0.056043487548828125, 0.05579814529418945, 0.05647564697265625, 0.05532380676269531, 0.055479137420654294, 0.05581414413452149, 0.055979839324951174, 0.05617273712158203, 0.05623311996459961, 0.055892223358154296, 0.05568368148803711, 0.05520793533325195, 0.05528163146972656, 0.05536550521850586, 0.05828623962402344, 0.05612518310546875, 0.056282848358154294, 0.056351264953613284, 0.05620121765136719, 0.055737632751464844, 0.05607916641235351, 0.05599555206298828, 0.05741027069091797, 0.05667638397216797, 0.05648998260498047, 0.05659830474853516, 0.05627734375, 0.05633001708984375, 0.056069889068603516, 0.056420574188232424, 0.056242496490478515, 0.05664303970336914, 0.05635712051391602, 0.05870191955566406, 0.05695398330688477, 0.056539104461669924, 0.05663199996948242, 0.05641836929321289, 0.056659423828125, 0.05634076690673828, 0.056596446990966796, 0.057444801330566404, 0.056598464965820314, 0.056465377807617186, 0.05698569488525391, 0.0580824966430664, 0.05647411346435547, 0.056043617248535155, 0.05600185775756836, 0.05595568084716797, 0.05595808029174805, 0.05622822570800781, 0.05611289596557617, 0.055943168640136716, 0.055965023040771486, 0.05553014373779297, 0.05598751831054687, 0.05618153762817383, 0.05605292892456055, 0.05582166290283203, 0.05621033477783203, 0.056033184051513675, 0.056182880401611325, 0.055760894775390625, 0.05580361557006836, 0.05596092987060547, 0.05585359954833984, 0.05610947036743164, 0.05590995025634766, 0.056062400817871096, 0.05590774536132812, 0.05589052963256836, 0.055919776916503905, 0.055962753295898435, 0.055854816436767575, 0.056104991912841795, 0.056885215759277345, 0.056569854736328126, 0.05637907028198242, 0.056677921295166016, 0.05610063934326172, 0.05605478286743164, 0.0560225601196289, 0.056062049865722656, 0.05617907333374023, 0.056233985900878906, 0.05634038543701172, 0.05624358367919922, 0.056226558685302734, 0.05628067016601562, 0.0567704963684082, 0.05606787109375, 0.05600937652587891, 0.05596979141235352, 0.05669887924194336, 0.056301376342773435, 0.05619036865234375, 0.056510814666748045, 0.05575929641723633, 0.05628124618530273, 0.05599948883056641, 0.055974494934082034, 0.05615846252441406, 0.056309471130371096, 0.05625263977050781, 0.05711635208129883, 0.05728704071044922, 0.05608441543579101, 0.05692627334594726, 0.0555068473815918, 0.055550048828125, 0.05599814224243164, 0.05681388854980469, 0.05623993682861328, 0.05635910415649414, 0.056240127563476565, 0.055860897064208985, 0.05718460845947266, 0.05605574417114258, 0.05609392166137695, 0.056162208557128904, 0.05623855972290039, 0.05595286560058594, 0.056056224822998046, 0.05596377563476562, 0.05658012771606445, 0.056065601348876955, 0.05616419219970703, 0.05628780746459961, 0.056869056701660155, 0.05651846313476563, 0.05607628631591797, 0.05643199920654297, 0.05639641571044922, 0.05657190322875977, 0.056448478698730466, 0.05640758514404297, 0.05651968002319336, 0.05646764755249024, 0.05654230499267578, 0.05617532730102539, 0.056354686737060546, 0.056616127014160154, 0.05606063842773437, 0.05664521789550781, 0.05620374298095703, 0.05618700790405273, 0.0569727668762207, 0.05640975952148437, 0.05633331298828125, 0.05646121597290039, 0.05624812698364258, 0.05620915222167969, 0.05618719863891602, 0.056223743438720705, 0.056403358459472655, 0.0563721923828125, 0.05627897644042969, 0.05648774337768555, 0.05679091262817383, 0.05679526519775391, 0.059418014526367184, 0.0568276481628418, 0.056462047576904296, 0.056289409637451174, 0.05650022506713867, 0.05613363265991211, 0.055811393737792966, 0.0556201286315918, 0.05585321426391601, 0.05601887893676758, 0.05610707092285156, 0.0561868782043457, 0.056281089782714844, 0.05633638381958008, 0.05613158416748047, 0.05608652877807617, 0.05633219146728516, 0.0563590087890625, 0.056240127563476565, 0.056306720733642575, 0.056228832244873045, 0.056338432312011716, 0.056586238861083986, 0.05632624053955078, 0.055942337036132814, 0.05624803161621094, 0.056395809173583986, 0.05559091186523438, 0.055695358276367186, 0.05589347076416016, 0.05659292984008789, 0.05610671997070313, 0.055427425384521486, 0.055535457611083985, 0.055967487335205075, 0.05588620758056641, 0.057097599029541014, 0.056156768798828124, 0.056452766418457034, 0.056392318725585935, 0.056245952606201174, 0.05647577667236328, 0.05605337524414063, 0.05619535827636719, 0.055944480895996095, 0.055998497009277344, 0.05608428955078125, 0.05846499252319336, 0.05639759826660156, 0.05644905471801758, 0.05599795150756836, 0.056750431060791015, 0.05620172882080078, 0.05630316925048828, 0.05596819305419922, 0.05629747009277344, 0.05829033660888672, 0.056446815490722654, 0.05698675155639649, 0.05739334487915039, 0.05594591903686524, 0.056207359313964846, 0.05629494476318359, 0.05615811157226563, 0.05632883071899414, 0.05637273788452148, 0.056301151275634766, 0.057432926177978516, 0.056346622467041016, 0.05689260864257813, 0.056420833587646484, 0.05638588714599609, 0.05633433532714844, 0.0562852783203125, 0.056354721069335936, 0.05612972640991211, 0.05621126556396484, 0.055799137115478514, 0.055543838500976564, 0.05584751892089844, 0.0558263053894043, 0.056475807189941406, 0.05626675033569336, 0.056282176971435546, 0.056140735626220704, 0.056831390380859374, 0.05637139129638672, 0.05521587371826172, 0.05617484664916992, 0.056220897674560545, 0.05605257415771484, 0.056154048919677735, 0.05607424163818359, 0.05607769775390625, 0.055604991912841795, 0.055433406829833984, 0.05541328048706055, 0.05587907028198242, 0.05595827102661133, 0.056016895294189455, 0.056371070861816405, 0.05610079956054687, 0.0562355842590332, 0.05640569686889649, 0.05683091354370117, 0.05616131210327149, 0.05647062301635742, 0.056317054748535156, 0.056267295837402344, 0.056412384033203124, 0.05632220840454102, 0.0563218879699707, 0.05624627304077148, 0.05636310577392578, 0.056473182678222655, 0.0562465934753418, 0.056578144073486325, 0.05634652709960938, 0.056784896850585936, 0.05904703903198242, 0.056922016143798826, 0.0565689582824707, 0.05637923049926758, 0.05630361557006836, 0.05633446502685547, 0.05602249526977539, 0.05600710296630859, 0.05605782318115234, 0.05599132919311523, 0.05635171127319336, 0.05593241500854492, 0.05601897430419922, 0.05593132781982422, 0.056070369720458986, 0.056212894439697264, 0.056209823608398435, 0.05615756988525391, 0.05631203079223633, 0.05631340789794922, 0.05655420684814453, 0.056068225860595705, 0.05563699340820313, 0.05573324966430664, 0.056118305206298826, 0.05621427154541016, 0.05629359817504883, 0.0562213134765625, 0.05631606292724609, 0.05589436721801758, 0.05606387329101563]",tokens/s,17.762438938764653,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3952.275456,2152.660992,0.0,1757.413376,1736.37632,s,1,12.4734677734375,12.4734677734375,0.0,12.4734677734375,12.4734677734375,12.4734677734375,12.4734677734375,[12.4734677734375],,kWh,0.00015622058873749058,1.722508014731104e-05,5.7967268595998656e-05,0.00023141293748080028,,MB,3963.71968,2387.542016,0.0,1971.32288,1913.084928,s,10,0.5615455055236817,0.056154550552368176,0.00035814502564154307,0.05609428787231445,0.05638534507751465,0.056742896842956546,0.05702893825531006,"[0.05630588912963867, 0.05590630340576172, 0.05578108978271484, 0.05614937591552734, 0.05582883071899414, 0.05597932815551758, 0.05603919982910156, 0.05710044860839844, 0.05618767929077149, 0.05626736068725586]",tokens/s,4558.846923033629,kWh,1.655127770386044e-06,1.8252889147709846e-07,9.916957086101636e-07,2.829352370473306e-06,tokens/kWh,90480069.81087874,MB,3967.901696,2408.513536,0.0,1992.2944,1972.635136,s,10,35.18063403320313,3.518063403320313,0.027803330613491103,3.5024200439453126,3.5556041748046874,3.558329553222656,3.560509855957031,"[3.502048828125, 3.4907255859375, 3.502791259765625, 3.5011953125, 3.495353759765625, 3.4844482421875, 3.561054931640625, 3.542568359375, 3.54544921875, 3.55499853515625]",tokens/s,17.907579476976235,kWh,0.00010228365814502975,1.1281983364420254e-05,4.5486508140589655e-05,0.00015905214965003961,tokens/kWh,396096.50129607227,,s,630,35.17682631301875,0.05583623224288699,0.0008420710525841882,0.05569855880737305,0.0566146297454834,0.056956986618041994,0.05953084297180176,"[0.055654399871826174, 0.05565011215209961, 0.05557267379760742, 0.056282398223876956, 0.055591392517089847, 0.055548160552978516, 0.055798912048339845, 0.05553241729736328, 0.05513216018676758, 0.05503171157836914, 0.05506057739257812, 0.055207454681396484, 0.05526166534423828, 0.05516287994384766, 0.054975841522216795, 0.05464115142822266, 0.054693599700927735, 0.054419776916503904, 0.05431046295166016, 0.054561088562011716, 0.055048126220703125, 0.055236927032470705, 0.05522393417358398, 0.055328353881835934, 0.05484576034545898, 0.05524435043334961, 0.05848566436767578, 0.055215679168701175, 0.05506067276000977, 0.05489075088500977, 0.0554284782409668, 0.05545843124389648, 0.057030654907226565, 0.055228416442871096, 0.055363582611083983, 0.05525094223022461, 0.05545779037475586, 0.05539945602416992, 0.05553571319580078, 0.05573900985717774, 0.055650367736816406, 0.05585308837890625, 0.05621161651611328, 0.05565030288696289, 0.05560092926025391, 0.05609699249267578, 0.05633433532714844, 0.05592473602294922, 0.05589334487915039, 0.058401344299316406, 0.05714332962036133, 0.05591865539550781, 0.056172542572021485, 0.05565999984741211, 0.05510915374755859, 0.05526416015625, 0.05515683364868164, 0.05579980850219726, 0.05551923370361328, 0.05542707061767578, 0.05578137588500977, 0.05706047821044922, 0.05552012634277344, 0.0550299186706543, 0.055273246765136716, 0.055562976837158204, 0.05536972808837891, 0.05515468978881836, 0.05492531204223633, 0.05502361679077149, 0.055011329650878904, 0.05526505661010742, 0.05464495849609375, 0.054771713256835934, 0.05499699020385742, 0.055193599700927735, 0.0554598388671875, 0.054935550689697264, 0.05450137710571289, 0.05462812805175781, 0.05498886489868164, 0.05524051284790039, 0.054796638488769533, 0.055949310302734374, 0.05554972839355469, 0.055446849822998044, 0.055591678619384764, 0.05555372619628906, 0.055556575775146486, 0.05566463851928711, 0.05578137588500977, 0.056010753631591796, 0.055844863891601565, 0.05633612823486328, 0.05584716796875, 0.05570502471923828, 0.055648574829101564, 0.05572224044799805, 0.05550284957885742, 0.05582169723510742, 0.05562227249145508, 0.0558941764831543, 0.05570339202880859, 0.055443073272705076, 0.05531795120239258, 0.05522528076171875, 0.0551649284362793, 0.05580595016479492, 0.055294975280761716, 0.05569766235351563, 0.05559577560424805, 0.055537086486816406, 0.05562835311889648, 0.0554247055053711, 0.055146816253662106, 0.055112926483154294, 0.05497296142578125, 0.05524300765991211, 0.05510508728027344, 0.05525942230224609, 0.05536374282836914, 0.05568102264404297, 0.05581414413452149, 0.056649280548095704, 0.05538860702514648, 0.05503776168823242, 0.05483520126342773, 0.05523606491088867, 0.05542147064208985, 0.0556151351928711, 0.0554516487121582, 0.05524310302734375, 0.055175167083740234, 0.05527523040771484, 0.05538435363769531, 0.055283489227294924, 0.055586719512939455, 0.05589228820800781, 0.055992321014404295, 0.05584281539916992, 0.055728126525878906, 0.055756160736083984, 0.055960193634033206, 0.05571993637084961, 0.05558272171020508, 0.05624550247192383, 0.055749374389648436, 0.05978726577758789, 0.05578956985473633, 0.055723262786865235, 0.05573471832275391, 0.05554003143310547, 0.05570060729980469, 0.0555098876953125, 0.05566873550415039, 0.055670783996582034, 0.05583977508544922, 0.05530108642578125, 0.054919166564941405, 0.05566828918457031, 0.055363296508789066, 0.05521062469482422, 0.05530019378662109, 0.055488510131835936, 0.05562515258789062, 0.05562015914916992, 0.05565004730224609, 0.055562496185302734, 0.05591244888305664, 0.055282974243164064, 0.055050975799560545, 0.05513216018676758, 0.055347198486328124, 0.05578540802001953, 0.055511104583740235, 0.055488510131835936, 0.05522227096557617, 0.055259136199951174, 0.05548646545410156, 0.05533695983886719, 0.055537662506103515, 0.05535302352905273, 0.05565267181396484, 0.05560688018798828, 0.05537833786010742, 0.055433216094970705, 0.05559091186523438, 0.05550425720214844, 0.05582912063598633, 0.05564575958251953, 0.055817150115966795, 0.056124576568603514, 0.0557371826171875, 0.0558359375, 0.05565513610839844, 0.055482368469238284, 0.055671966552734375, 0.05554671859741211, 0.05561139297485351, 0.05575215911865234, 0.05559555053710938, 0.055209983825683595, 0.0546058235168457, 0.05660988616943359, 0.05564697647094727, 0.05546368026733398, 0.05511788940429688, 0.05544585418701172, 0.05551923370361328, 0.05672256088256836, 0.054823230743408204, 0.05454086303710937, 0.05419993591308594, 0.0546429443359375, 0.05451993560791016, 0.05444607925415039, 0.05466719818115234, 0.05517059326171875, 0.055547870635986325, 0.055382591247558594, 0.05514854431152344, 0.05500457763671875, 0.054633056640625, 0.05506256103515625, 0.05514031982421875, 0.055021568298339846, 0.05802540969848633, 0.05592121505737305, 0.05555814361572266, 0.055218177795410155, 0.0551580810546875, 0.055351486206054686, 0.05535353469848633, 0.055269695281982424, 0.055605247497558595, 0.055672191619873045, 0.0558045425415039, 0.05566668701171875, 0.06031961441040039, 0.05594486236572266, 0.05576051330566406, 0.05626732635498047, 0.05572774505615234, 0.05567724609375, 0.05598553466796875, 0.05605830383300781, 0.05585974502563477, 0.055713024139404294, 0.055865280151367186, 0.055966209411621094, 0.05528998565673828, 0.05505862426757813, 0.05507727813720703, 0.05559305572509766, 0.05525955200195312, 0.05501948928833008, 0.05494172668457031, 0.0547696647644043, 0.05500310516357422, 0.054986785888671875, 0.05541388702392578, 0.05550374221801758, 0.05572198486328125, 0.055549663543701173, 0.05556028747558594, 0.05523247909545898, 0.0553856315612793, 0.054890815734863284, 0.0549728012084961, 0.055131935119628904, 0.05537964630126953, 0.05544588851928711, 0.05516099166870117, 0.05495993423461914, 0.05547417449951172, 0.05453023910522461, 0.05490435028076172, 0.055171390533447266, 0.055459999084472654, 0.05560115051269531, 0.05585903930664062, 0.05580160140991211, 0.055765407562255856, 0.05579743957519531, 0.05611142349243164, 0.055810047149658204, 0.056003841400146484, 0.05598857498168945, 0.05564985656738281, 0.055919456481933597, 0.05584076690673828, 0.055760894775390625, 0.055461952209472656, 0.0555478401184082, 0.05596281433105469, 0.05565932846069336, 0.05580287933349609, 0.05636198425292969, 0.056233985900878906, 0.0567086067199707, 0.056784767150878906, 0.05561315155029297, 0.05510211181640625, 0.05498700714111328, 0.05487411117553711, 0.055252735137939456, 0.0553515510559082, 0.05582438278198242, 0.05519974517822265, 0.05521775817871094, 0.055230880737304686, 0.05535251235961914, 0.05514652633666992, 0.05527222442626953, 0.055564289093017576, 0.05542092895507812, 0.055395809173583985, 0.05495616149902344, 0.05483152008056641, 0.05517926406860352, 0.05493100738525391, 0.05462879943847656, 0.05525094223022461, 0.055389633178710936, 0.05568272018432617, 0.05544847869873047, 0.055152641296386716, 0.05524684906005859, 0.055218177795410155, 0.055582687377929686, 0.05565193557739258, 0.055699905395507815, 0.055889598846435545, 0.05598988723754883, 0.05561619186401367, 0.05553526306152344, 0.0552916145324707, 0.055597217559814456, 0.05550307083129883, 0.05573247909545898, 0.055721023559570315, 0.05551712036132812, 0.055806976318359375, 0.05541875076293945, 0.055390335083007815, 0.054848670959472656, 0.054664031982421875, 0.055538753509521484, 0.05523756790161133, 0.05525299072265625, 0.05538816070556641, 0.055654399871826174, 0.05549260711669922, 0.0556193618774414, 0.055666656494140626, 0.05556572723388672, 0.0548934097290039, 0.055041118621826174, 0.05534735870361328, 0.05539507293701172, 0.055578624725341794, 0.054839073181152345, 0.05462579345703125, 0.054526142120361325, 0.05466502380371094, 0.05469257736206055, 0.05530214309692383, 0.054601470947265626, 0.05506073760986328, 0.055368961334228514, 0.05543756866455078, 0.055233024597167966, 0.05520793533325195, 0.05541222381591797, 0.05502518463134766, 0.05524563217163086, 0.05539372634887695, 0.05554585647583008, 0.055519775390625, 0.05577916717529297, 0.055676959991455076, 0.06326457595825195, 0.05648384094238281, 0.05652099227905273, 0.05657193756103516, 0.05656371307373047, 0.058060798645019535, 0.056311969757080076, 0.05638742446899414, 0.05612086486816406, 0.056740318298339844, 0.05644083023071289, 0.05571152114868164, 0.05599580764770508, 0.056046302795410154, 0.056465503692626956, 0.055678497314453124, 0.055935455322265626, 0.05565577697753906, 0.05504886245727539, 0.05526323318481445, 0.055070720672607425, 0.05557020950317383, 0.05952534484863281, 0.05653504180908203, 0.05569331359863281, 0.05575600051879883, 0.05598230361938476, 0.05613216018676758, 0.05588351821899414, 0.05572224044799805, 0.0554700813293457, 0.0585992317199707, 0.056664257049560546, 0.05787231826782226, 0.05953308868408203, 0.056237857818603514, 0.05581177520751953, 0.056441471099853514, 0.05595686340332031, 0.05608736038208008, 0.05710438537597656, 0.056025089263916014, 0.05652070236206055, 0.05622988891601562, 0.05689263916015625, 0.05649692916870117, 0.05664972686767578, 0.05688729476928711, 0.05652889633178711, 0.0568072624206543, 0.05639913558959961, 0.05657632064819336, 0.056231582641601566, 0.05645609664916992, 0.056594432830810545, 0.05646281433105469, 0.056944385528564456, 0.05633718490600586, 0.05719039916992188, 0.05654463958740234, 0.05532057571411133, 0.05605558395385742, 0.05568739318847656, 0.05574348831176758, 0.055989246368408206, 0.05622579193115235, 0.0562606086730957, 0.05608963012695312, 0.05582697677612305, 0.055504638671875, 0.05551718521118164, 0.05585171127319336, 0.05588787078857422, 0.05592652893066406, 0.05617279815673828, 0.056196895599365235, 0.05654553604125977, 0.056624191284179684, 0.05623849487304688, 0.05662908935546875, 0.056088993072509766, 0.056393184661865235, 0.056129344940185545, 0.05646640014648437, 0.056281089782714844, 0.05633782577514648, 0.05597859191894531, 0.0563056640625, 0.056266624450683596, 0.05584703826904297, 0.05563347244262695, 0.05576544189453125, 0.056293216705322266, 0.056536479949951174, 0.0601075210571289, 0.05669862365722656, 0.05687526321411133, 0.05653839874267578, 0.056113887786865234, 0.056455169677734375, 0.0563240966796875, 0.05663129425048828, 0.056607967376708986, 0.056405982971191405, 0.05627686309814453, 0.05638371276855469, 0.05618345642089844, 0.056102977752685544, 0.056000511169433595, 0.0560120964050293, 0.056260639190673825, 0.05622371292114258, 0.05626950454711914, 0.05634857559204102, 0.05636713409423828, 0.05598419189453125, 0.055670783996582034, 0.05597798538208008, 0.056186206817626955, 0.05643945693969726, 0.05610291290283203, 0.05611276626586914, 0.05596601486206055, 0.05539916610717773, 0.05550694274902344, 0.055531169891357424, 0.05584835052490234, 0.05597894287109375, 0.05617657470703125, 0.05574662399291992, 0.0558837776184082, 0.05601052856445313, 0.05577084732055664, 0.055610942840576175, 0.05561644744873047, 0.05590835189819336, 0.056313377380371094, 0.055944961547851564, 0.0563350715637207, 0.05594521713256836, 0.056635391235351565, 0.05622556686401367, 0.05633865737915039, 0.05610905456542969, 0.056231937408447265, 0.05628860855102539, 0.05713782501220703, 0.056281089782714844, 0.05615820693969727, 0.0567275505065918, 0.05616025543212891, 0.05632819366455078, 0.05635887908935547, 0.05663052749633789, 0.05598432159423828, 0.05550723266601563, 0.055624000549316405, 0.05577920150756836, 0.055779457092285156, 0.05546803283691406, 0.055905536651611326, 0.0553724479675293, 0.05547222518920898, 0.05698559951782227, 0.056403297424316406, 0.056021663665771486, 0.060393470764160156, 0.05673984146118164, 0.056174591064453126, 0.0559554557800293, 0.058210304260253906, 0.055976097106933596, 0.05624111938476563, 0.05648239898681641, 0.05628137588500977, 0.05581414413452149, 0.06095177459716797, 0.055773983001708986, 0.05541414260864258, 0.05569395065307617, 0.05661286544799805, 0.05783875274658203, 0.05603359985351562, 0.056207199096679684, 0.05665206527709961, 0.05618115234375, 0.055341056823730465, 0.05628492736816406, 0.056915969848632814, 0.05708211135864258, 0.05704207992553711, 0.0567152328491211, 0.05661356735229492, 0.05643487930297852, 0.05676163101196289, 0.05722390365600586, 0.057220958709716795, 0.058370208740234374, 0.05700751876831055, 0.05674454498291016, 0.05688115310668945, 0.057032703399658206, 0.056049663543701174, 0.05589126586914062, 0.05621830368041992, 0.05565030288696289, 0.05596160125732422, 0.05610886383056641, 0.05871756744384766, 0.05638156890869141, 0.05605446243286133, 0.05569945526123047, 0.05610662460327148, 0.05554201507568359, 0.05578329467773437, 0.05622761535644531, 0.05625904083251953, 0.05641625595092774, 0.056229183197021484, 0.0566995849609375, 0.05643468856811523, 0.05585504150390625, 0.05567855834960937, 0.056036865234375, 0.05683820724487305, 0.05672643280029297, 0.05629884719848633, 0.056368927001953124, 0.05569331359863281, 0.05580019378662109, 0.055844417572021486, 0.056323009490966795, 0.056370975494384766, 0.05627312088012695, 0.056013824462890625, 0.05616089630126953, 0.0569672966003418, 0.05652096176147461, 0.05667136001586914, 0.05639388656616211, 0.05651529693603516, 0.056346401214599606, 0.05636732864379883, 0.05663948822021484, 0.056378654479980465, 0.05622246551513672, 0.056051681518554684, 0.05654723358154297, 0.05649593734741211]",tokens/s,17.909517885268677,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,23791.407104,13034.78272,0.0,12639.535104,12621.66016,s,1,49.60141796875,49.60141796875,0.0,49.60141796875,49.60141796875,49.60141796875,49.60141796875,[49.60141796875],,kWh,0.00122082873514582,0.00013465903216312272,0.0004628125924720089,0.0018183003597809517,,MB,1213.681664,13802.340352,0.0,13386.121216,13251.628032,s,10,1.4718750915527345,0.14718750915527345,0.0017248891119877854,0.14775521850585938,0.14820942535400392,0.14853420181274413,0.14879402297973632,"[0.1423621063232422, 0.14804925537109376, 0.14636166381835938, 0.14885897827148437, 0.14783363342285155, 0.1471332550048828, 0.1481372528076172, 0.1476768035888672, 0.14751116943359374, 0.14795097351074218]",tokens/s,1739.2780234492336,kWh,4.296785209620409e-06,4.738581889929287e-07,2.8663544172645927e-06,7.63699781587793e-06,tokens/kWh,33521025.692551002,MB,1231.618048,13909.295104,0.0,13493.075968,13391.3472,s,10,53.44225341796874,5.344225341796876,0.012637173707958678,5.3420732421874995,5.360134765625,5.3652412109375,5.3693263671875,"[5.37034765625, 5.3293544921875, 5.3309423828125, 5.34403759765625, 5.35387060546875, 5.341775390625, 5.359, 5.33952490234375, 5.331029296875, 5.34237109375]",tokens/s,11.788425070193178,kWh,0.00015585074465787838,1.7190843563150976e-05,0.00010353645292713635,0.0002765780411481657,tokens/kWh,227783.81009015194,,s,630,53.4385687026978,0.08482312492491706,0.00091845975627109,0.08465423965454101,0.08543396759033203,0.08608785858154297,0.08827593055725098,"[0.08452223968505859, 0.08503782653808593, 0.08520909118652344, 0.08513945770263671, 0.08509849548339844, 0.08469078063964844, 0.08559017944335938, 0.08568831634521484, 0.08500819396972656, 0.08503929901123047, 0.08496867370605468, 0.08500508880615235, 0.08615465545654297, 0.08596351623535156, 0.08606499481201171, 0.09082470703125, 0.08538054656982422, 0.08464031982421875, 0.08538515472412109, 0.08495311737060547, 0.08470710754394531, 0.08473961639404297, 0.09007587432861328, 0.08823808288574218, 0.08498486328125, 0.08502582550048828, 0.08446355438232422, 0.08479948425292969, 0.08433869171142579, 0.08479888153076172, 0.0847479019165039, 0.08486319732666016, 0.08534083557128906, 0.08483439636230469, 0.08468838500976562, 0.08498982238769531, 0.08499878692626953, 0.08555110168457031, 0.08462950134277344, 0.0873941421508789, 0.08462911987304687, 0.08471142578125, 0.08455017852783203, 0.08523776245117187, 0.08702361297607422, 0.08509645080566407, 0.08510214233398437, 0.08505177307128907, 0.08490220642089844, 0.0850408935546875, 0.08473177337646484, 0.08486502075195312, 0.08484467315673828, 0.0852146224975586, 0.08509276580810547, 0.08478707122802734, 0.08404000091552734, 0.08415580749511718, 0.08454192352294922, 0.08443631744384765, 0.08483100891113281, 0.08455372619628906, 0.08404502105712891, 0.08523951721191406, 0.0846830062866211, 0.08436531066894531, 0.08455712127685547, 0.08417555236816407, 0.0845823974609375, 0.08456185913085937, 0.08452851104736328, 0.08408953857421875, 0.08438579559326172, 0.08431977844238281, 0.08453533172607422, 0.08447840118408204, 0.08457843017578125, 0.0848067855834961, 0.08489446258544922, 0.08469200134277344, 0.08469811248779296, 0.08438985443115235, 0.08463311767578124, 0.08435955047607421, 0.08463593292236328, 0.08436531066894531, 0.08427299499511719, 0.08411033630371094, 0.0847733154296875, 0.08409891510009766, 0.0842425308227539, 0.08402595520019532, 0.0842977294921875, 0.08471379089355469, 0.08501324462890625, 0.08460991668701172, 0.08422201538085937, 0.08453228759765626, 0.0841246109008789, 0.0845164794921875, 0.08438317108154297, 0.08399750518798828, 0.08441664123535156, 0.0841226577758789, 0.08471151733398438, 0.08460108947753907, 0.08533238220214844, 0.08427040100097656, 0.08457718658447265, 0.08473804473876953, 0.08482611083984375, 0.0847667236328125, 0.08466841888427734, 0.08451423645019532, 0.08492835235595703, 0.08474211120605468, 0.0849354248046875, 0.08458755493164062, 0.08576703643798828, 0.08446985626220703, 0.08534786987304688, 0.08466831970214844, 0.08568665313720703, 0.08482425689697265, 0.08457830047607422, 0.08443814086914063, 0.08515408325195313, 0.08458601379394531, 0.08482614135742188, 0.08432848358154296, 0.08623046112060546, 0.08456192016601563, 0.0840159683227539, 0.08456819152832032, 0.08442249298095703, 0.08395791625976562, 0.08545075225830077, 0.0861299819946289, 0.08612934112548828, 0.08453347015380859, 0.08536383819580078, 0.0847833251953125, 0.0845767059326172, 0.08493875122070313, 0.08452710723876954, 0.0851596450805664, 0.08442908477783204, 0.08453286743164062, 0.08478777313232422, 0.08425583648681641, 0.08460931396484375, 0.0848237762451172, 0.08531161499023437, 0.08411196899414063, 0.08551628875732421, 0.08373875427246094, 0.08409279632568359, 0.0841195526123047, 0.08463507080078125, 0.0842696304321289, 0.08404972839355469, 0.08388630676269532, 0.08464176177978516, 0.08382601928710938, 0.08466860961914062, 0.08435350036621093, 0.08402124786376954, 0.0841891860961914, 0.08431206512451171, 0.0847930908203125, 0.0848285140991211, 0.08419522857666016, 0.08408380889892578, 0.08442562866210937, 0.08490306854248048, 0.08459964752197266, 0.0849336929321289, 0.08447071838378906, 0.0852782745361328, 0.0864886703491211, 0.08453379058837891, 0.08429190063476563, 0.08499814605712891, 0.08359935760498047, 0.0849612808227539, 0.08353177642822265, 0.08396208190917968, 0.08428316497802735, 0.08400077056884765, 0.08558796691894531, 0.0855444793701172, 0.08430230712890625, 0.0850987548828125, 0.08496125030517578, 0.08587964630126953, 0.08421695709228516, 0.08454678344726563, 0.08427782440185547, 0.08470889282226562, 0.08428928375244141, 0.08470809936523438, 0.08445951843261719, 0.0850389404296875, 0.08471279907226563, 0.08534819030761719, 0.08485545349121094, 0.08551618957519531, 0.08516022491455078, 0.08493468475341796, 0.08513340759277344, 0.0853237762451172, 0.08477442932128906, 0.08462957000732421, 0.08430156707763672, 0.08484918212890626, 0.08472998046875, 0.08456569671630859, 0.08434262084960938, 0.08417123413085938, 0.08432588958740235, 0.0843658218383789, 0.08520694732666016, 0.08525628662109375, 0.08454099273681641, 0.08482246398925782, 0.08539552307128906, 0.08422755432128906, 0.084368896484375, 0.08445362854003906, 0.0843575668334961, 0.08403926086425781, 0.08431072235107422, 0.0845660171508789, 0.08452690887451172, 0.085057373046875, 0.08448397064208985, 0.08607587432861329, 0.08559724426269531, 0.0850145263671875, 0.0847955551147461, 0.08478390502929688, 0.08511727905273438, 0.08478275299072266, 0.08486911773681641, 0.08483334350585937, 0.08453772735595703, 0.08634563446044922, 0.08436908721923828, 0.08415740966796875, 0.08565663909912109, 0.08501350402832031, 0.08446355438232422, 0.08960614776611328, 0.08500208282470703, 0.08436137390136719, 0.08413565063476562, 0.0845865249633789, 0.08411100769042969, 0.0837760009765625, 0.08476067352294922, 0.08415846252441406, 0.08432157135009766, 0.084736572265625, 0.08436319732666016, 0.08444882965087891, 0.08541251373291016, 0.08478336334228516, 0.08489266967773437, 0.08500444793701172, 0.09206159973144531, 0.08471382141113282, 0.08499440002441407, 0.08481177520751954, 0.08466022491455077, 0.0845794906616211, 0.0846704330444336, 0.08476557159423828, 0.08478105926513672, 0.08449209594726563, 0.0840352325439453, 0.08474832153320312, 0.0844672622680664, 0.08420035552978515, 0.08473193359375, 0.08498995208740234, 0.08435664367675781, 0.08415280151367187, 0.08440585327148438, 0.08438758087158203, 0.08448265838623047, 0.08434073638916016, 0.08392915344238282, 0.0848603515625, 0.08727110290527344, 0.08570716857910156, 0.08525462341308594, 0.08489545440673828, 0.08431632232666016, 0.0846173095703125, 0.08462499237060547, 0.08461151885986327, 0.08528419494628907, 0.08530601501464843, 0.08593817901611328, 0.08543231964111328, 0.08508790588378906, 0.0855470733642578, 0.08523168182373046, 0.08544879913330078, 0.08547074890136719, 0.0851236801147461, 0.08512054443359375, 0.08496537780761719, 0.08468732452392579, 0.08449228668212891, 0.08429065704345703, 0.08470345306396485, 0.08431043243408202, 0.08457762908935547, 0.08423865509033203, 0.0843679962158203, 0.0843468780517578, 0.08476262664794922, 0.08436729431152344, 0.08451705932617187, 0.08439180755615235, 0.08472492980957032, 0.08441734313964844, 0.08471756744384766, 0.08462540435791016, 0.08465382385253906, 0.0848611831665039, 0.08439997100830078, 0.08456617736816406, 0.08517427062988281, 0.084748291015625, 0.08503091430664063, 0.084947998046875, 0.08512786865234374, 0.0846454086303711, 0.08486573028564454, 0.08514508819580079, 0.08465465545654297, 0.08454045104980469, 0.08417903900146484, 0.08415116882324218, 0.08434483337402343, 0.08432425689697266, 0.08442684936523437, 0.08514559936523437, 0.08495426940917969, 0.0854835205078125, 0.08441942596435546, 0.08438294219970703, 0.08421660614013672, 0.0845638427734375, 0.08443507385253907, 0.0852459487915039, 0.08421552276611328, 0.08457654571533203, 0.08692092895507812, 0.08501277160644531, 0.08465574645996093, 0.0844307861328125, 0.08467443084716797, 0.08537554931640624, 0.08760684967041016, 0.0850948486328125, 0.08616960144042969, 0.08490140533447266, 0.08496790313720703, 0.08470848083496094, 0.08459046173095704, 0.08516022491455078, 0.085036865234375, 0.0852673568725586, 0.08450048065185548, 0.08454057312011719, 0.0844516830444336, 0.08513724517822266, 0.08445536041259766, 0.08498812866210938, 0.08450457763671874, 0.08462335968017579, 0.08682291412353516, 0.08411036682128906, 0.08430486297607422, 0.08473526763916016, 0.08452783966064453, 0.08416255950927734, 0.08458383941650391, 0.08456204986572266, 0.08466009521484374, 0.08472637176513671, 0.08430592346191407, 0.08522496032714844, 0.0848465576171875, 0.08513938903808593, 0.08497315216064454, 0.08535958099365235, 0.08509375762939453, 0.08505001831054687, 0.08486243438720703, 0.08521923065185547, 0.08518022155761719, 0.08507475280761718, 0.08475033569335938, 0.08737910461425781, 0.08528304290771484, 0.08454412841796875, 0.08489369964599609, 0.08528076934814453, 0.08436479949951171, 0.0840791015625, 0.08409513854980469, 0.08431104278564452, 0.0844250259399414, 0.08453107452392578, 0.08431068420410157, 0.08488662719726563, 0.08443993377685546, 0.08471965026855469, 0.08469821166992188, 0.08480655670166015, 0.08564530944824218, 0.08490393829345703, 0.08448716735839844, 0.08445849609375, 0.08468889617919922, 0.08467046356201172, 0.08464956665039063, 0.08451522827148437, 0.08785919952392578, 0.09575730895996094, 0.08576236724853516, 0.08500492858886718, 0.08487942504882813, 0.0846929931640625, 0.08548556518554687, 0.08474755096435548, 0.08492227172851563, 0.08444345855712891, 0.0844411849975586, 0.08381478118896485, 0.08436685180664062, 0.08399504089355468, 0.08408438110351563, 0.08421625518798828, 0.0837872314453125, 0.08375894165039062, 0.08468275451660157, 0.0842574691772461, 0.08371814727783203, 0.08382038116455078, 0.08391081237792969, 0.08598643493652344, 0.0845771484375, 0.08440422058105469, 0.08389968109130859, 0.08453517150878906, 0.08425888061523437, 0.08451971435546875, 0.08470124816894531, 0.08433657836914063, 0.08466432189941406, 0.0862003173828125, 0.08478672027587891, 0.08445308685302734, 0.08480844879150391, 0.0845159683227539, 0.08567804718017578, 0.08452950286865234, 0.08483427429199218, 0.08473836517333984, 0.0853752670288086, 0.0862208023071289, 0.0841338882446289, 0.08438301086425781, 0.08409126281738281, 0.08410899353027344, 0.0846322250366211, 0.08435692596435547, 0.08517606353759766, 0.08556793975830078, 0.08453529357910156, 0.08495414733886719, 0.08472297668457031, 0.08438751983642578, 0.08433602905273438, 0.08476703643798827, 0.08453548431396485, 0.08457430267333985, 0.0847132797241211, 0.0875331802368164, 0.08487967681884766, 0.08482147216796875, 0.08497183990478516, 0.08501091003417968, 0.08546006774902344, 0.08496425628662109, 0.08786739349365234, 0.08609766387939453, 0.08510079956054688, 0.08511666870117188, 0.08513772583007813, 0.08453123474121094, 0.08424652862548829, 0.08432844543457031, 0.08433641815185547, 0.08393341064453125, 0.08407997131347657, 0.0851851806640625, 0.08426815795898437, 0.08445996856689453, 0.08453778839111328, 0.08435842895507813, 0.0842732162475586, 0.08393590545654298, 0.08402665710449218, 0.08449440002441407, 0.0843229751586914, 0.08525583648681641, 0.08744175720214843, 0.08511484527587891, 0.08447138977050782, 0.08458489227294921, 0.08469692993164063, 0.08432793426513673, 0.0844183349609375, 0.08443379211425782, 0.0842239990234375, 0.08507990264892579, 0.08483206176757813, 0.08468720245361328, 0.08452217864990234, 0.08538809967041015, 0.08479948425292969, 0.08507917022705078, 0.08466310119628906, 0.08700137329101562, 0.08420534515380859, 0.08411917114257812, 0.08420185852050781, 0.0845082550048828, 0.08486470031738282, 0.08424521636962891, 0.0842260513305664, 0.08479129791259765, 0.08438505554199219, 0.08432508850097656, 0.08423423767089844, 0.08434893035888671, 0.08440217590332032, 0.0841739501953125, 0.08410546875, 0.08451033782958985, 0.08460784149169921, 0.08428150177001953, 0.08465139007568359, 0.08558451080322266, 0.08434585571289062, 0.08458137512207031, 0.08513906860351562, 0.08440214538574219, 0.0845848617553711, 0.08480697631835937, 0.08451142120361328, 0.08524816131591798, 0.08657020568847656, 0.085751708984375, 0.08510553741455078, 0.08479350280761719, 0.08519209289550782, 0.0848687973022461, 0.08408332824707031, 0.08376537322998047, 0.08448000335693359, 0.0842608642578125, 0.08430182647705078, 0.08447590637207031, 0.08415245056152344, 0.0847806396484375, 0.08450895690917969, 0.08459398651123047, 0.08467935943603516, 0.084264892578125, 0.0845578842163086, 0.08441551971435547, 0.08414415740966796, 0.08913180541992187, 0.08433023834228516, 0.08436946868896485, 0.08439334106445312, 0.08516675567626954, 0.08471177673339844, 0.08474604797363282, 0.0849977569580078, 0.08829138946533203, 0.0850436782836914, 0.08476876831054687, 0.08482546997070313, 0.08498035430908203, 0.08464588928222656, 0.0847831039428711, 0.08500969696044922, 0.08506851196289063, 0.08449801635742188, 0.08463401794433593, 0.08394342041015625, 0.08387510681152344, 0.08371887969970702, 0.08456566619873047, 0.08458480072021485, 0.08447334289550781, 0.08426127624511719, 0.08717033386230469, 0.08496771240234376, 0.08502742767333984, 0.08464118194580078, 0.08469923400878906, 0.08427155303955078, 0.08434806060791016, 0.08403030395507813, 0.08417894744873047, 0.08439762878417968, 0.084224609375, 0.08436457824707032, 0.08451894378662109, 0.08506342315673829, 0.0852242202758789]",tokens/s,11.789237909888024,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 60, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 60, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 60, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,12193.357824,7099.84256,0.0,6704.594944,6690.791936,s,1,28.44133203125,28.44133203125,0.0,28.44133203125,28.44133203125,28.44133203125,28.44133203125,[28.44133203125],,kWh,0.0006235108196625006,6.877042901585516e-05,0.00020887405598800195,0.0009011553046663577,,MB,1414.0416,7313.752064,0.0,6897.532928,6816.50432,s,10,1.1857021408081054,0.11857021408081055,0.0010024997860044512,0.11849817657470704,0.1201054313659668,0.12013938941955567,0.12016655586242676,"[0.12009788513183593, 0.11803987121582031, 0.11808700561523437, 0.11830169677734376, 0.11748175811767578, 0.11901964569091797, 0.11683843231201171, 0.11869465637207031, 0.11896784210205077, 0.12017334747314454]",tokens/s,2159.058259146984,kWh,3.4711949982843576e-06,3.828096586059955e-07,2.307318839317635e-06,6.161323496207988e-06,tokens/kWh,41549514.5089454,MB,1429.864448,7320.04352,0.0,6903.824384,6816.50688,s,10,72.63889843749999,7.26388984375,0.015049420714715026,7.2661413574218745,7.280953271484375,7.285970288085937,7.289983901367187,"[7.2909873046875, 7.26429052734375, 7.27105419921875, 7.246630859375, 7.2679921875, 7.25025634765625, 7.2623486328125, 7.26813818359375, 7.27983837890625, 7.23736181640625]",tokens/s,8.67303901286533,kWh,0.00021200965658713236,2.3385078023517928e-05,9.511884243488237e-05,0.00033051357704553267,tokens/kWh,190612.44189469682,,s,630,72.6361952819824,0.11529554806663876,0.0010446708919572187,0.115090576171875,0.11633906249999999,0.11714389457702637,0.11896890357971192,"[0.11530003356933594, 0.11442550659179687, 0.11449600219726562, 0.11477782440185547, 0.11467190551757812, 0.11597782135009765, 0.11590493011474609, 0.1181503677368164, 0.1160010223388672, 0.11474934387207031, 0.11423603057861328, 0.11417788696289062, 0.1152425308227539, 0.11499724578857422, 0.11604441833496094, 0.12042675018310547, 0.11649612426757812, 0.11655500793457031, 0.11493247985839844, 0.11514998626708985, 0.11507798767089844, 0.11556893157958985, 0.11576290893554687, 0.11464508819580078, 0.11562179565429688, 0.11634483337402343, 0.11534073638916016, 0.11492617797851562, 0.11450511932373048, 0.11523772430419922, 0.11565200042724609, 0.11610710144042968, 0.11600873565673828, 0.11528262329101563, 0.11634473419189453, 0.11496662139892579, 0.11498291015625, 0.11481906890869141, 0.11617485046386719, 0.11482931518554687, 0.11630754852294922, 0.11550761413574219, 0.11574681854248046, 0.11531468963623047, 0.11623388671875, 0.12600972747802736, 0.11497894287109375, 0.11875052642822266, 0.11647567749023438, 0.11615718078613281, 0.11529625701904297, 0.11559321594238281, 0.11422720336914062, 0.11409548950195313, 0.11512464141845703, 0.11666044616699218, 0.11514262390136719, 0.1166060791015625, 0.11510633850097657, 0.11519219207763672, 0.11556454467773437, 0.11493689727783203, 0.11478102111816406, 0.11487741088867187, 0.11472191619873047, 0.11617779541015626, 0.11568742370605468, 0.11547647857666016, 0.11493785858154297, 0.11429248046875, 0.11434355163574218, 0.11523455810546875, 0.11492153930664062, 0.116283203125, 0.117570556640625, 0.11477922821044922, 0.11559152221679687, 0.11497119903564453, 0.11427635192871094, 0.11439513397216797, 0.11502793884277343, 0.11443756866455078, 0.11583958435058593, 0.11552931213378906, 0.114968994140625, 0.1166346206665039, 0.11602812957763672, 0.11464463806152343, 0.11552543640136718, 0.11564064025878906, 0.11542176055908203, 0.11569760131835938, 0.11571119689941406, 0.11521218872070313, 0.11535372924804688, 0.11471894073486329, 0.11477455902099609, 0.11447641754150391, 0.11445916748046875, 0.11548477172851562, 0.11646566772460938, 0.11871561431884765, 0.1153054428100586, 0.11541814422607422, 0.11486022186279297, 0.11465174102783203, 0.11518966674804687, 0.11540294647216796, 0.11669884490966796, 0.11518915557861328, 0.11437340545654297, 0.11584512329101562, 0.11483920288085937, 0.11444608306884765, 0.11628141021728515, 0.1153986587524414, 0.11510816192626953, 0.11628771209716797, 0.1155455322265625, 0.11538079833984374, 0.11531641387939454, 0.11446918487548828, 0.11434803009033204, 0.11413654327392578, 0.11474179077148437, 0.11548419189453125, 0.11519884490966797, 0.11557833862304688, 0.11564701080322265, 0.11837235260009765, 0.11480802917480469, 0.11720188903808594, 0.11519366455078126, 0.11558297729492187, 0.11497062683105469, 0.11751833343505859, 0.11452825927734375, 0.11466957092285156, 0.11468342590332031, 0.11784758758544922, 0.11503507232666016, 0.11471593475341797, 0.11548336029052735, 0.11580413055419922, 0.11510137939453124, 0.11875977325439453, 0.11483689880371094, 0.11418685150146485, 0.11474329376220703, 0.11525529479980469, 0.11536294555664063, 0.1157161636352539, 0.11509228515625, 0.11497401428222656, 0.11522860717773438, 0.11466194915771484, 0.11441490936279297, 0.11542617797851562, 0.11470162963867188, 0.11561235046386718, 0.1167227554321289, 0.11475244903564454, 0.11542063903808594, 0.11472541046142579, 0.11464704132080078, 0.11466751861572265, 0.11626290893554687, 0.11459693145751954, 0.11620652770996094, 0.11579801940917969, 0.11555987548828126, 0.11446044921875, 0.11532323455810548, 0.11487664031982422, 0.11470281219482421, 0.11524697875976563, 0.1159842529296875, 0.11503119659423829, 0.11449839782714843, 0.11491276550292968, 0.1147479019165039, 0.11485164642333984, 0.1145304946899414, 0.1149333724975586, 0.11499510192871094, 0.11596233367919923, 0.11518726348876954, 0.1189883804321289, 0.11527254486083985, 0.1148098907470703, 0.11480572509765626, 0.11504630279541016, 0.11403385925292969, 0.11465206146240234, 0.11511138916015624, 0.11513910675048829, 0.11455423736572265, 0.11495692443847656, 0.11450367736816407, 0.1146692123413086, 0.1152290267944336, 0.11548262023925782, 0.11578982543945313, 0.11466537475585938, 0.11506674957275391, 0.11436259460449219, 0.11447529602050781, 0.11413270568847657, 0.11427606201171875, 0.11414147186279297, 0.11510578918457032, 0.11496060943603516, 0.11427804565429688, 0.11533222198486329, 0.11493888092041016, 0.11666448211669922, 0.1141451187133789, 0.11750109100341796, 0.11460079956054688, 0.11538745880126954, 0.11566563415527344, 0.11498438262939453, 0.11390975952148437, 0.11476873779296876, 0.11435206604003906, 0.11637267303466797, 0.11480579376220704, 0.11611103820800782, 0.114781982421875, 0.11624838256835937, 0.11534937286376953, 0.11459954833984375, 0.11450777435302735, 0.11481804656982422, 0.11452227020263672, 0.1145588150024414, 0.11580006408691407, 0.11524710083007812, 0.1148436508178711, 0.11507679748535156, 0.1149948501586914, 0.11492607879638672, 0.1148416976928711, 0.11518572998046875, 0.11476150512695313, 0.11867568206787109, 0.11573801422119141, 0.11484630584716797, 0.11462451171875, 0.11594342041015625, 0.11438089752197265, 0.11330960083007813, 0.11523149108886718, 0.11573366546630859, 0.11542790222167969, 0.1154276123046875, 0.11474944305419922, 0.11482854461669922, 0.1148485107421875, 0.1144438705444336, 0.11714396667480469, 0.11630182647705078, 0.11567072296142578, 0.11788726043701171, 0.1154416961669922, 0.11462620544433594, 0.1146712646484375, 0.11444294738769531, 0.11452178955078125, 0.11503033447265625, 0.11633843231201171, 0.11616860961914062, 0.11966422271728516, 0.11536978912353515, 0.11477903747558593, 0.11474127960205079, 0.11468185424804687, 0.1150013427734375, 0.115797119140625, 0.11456355285644532, 0.11532546997070313, 0.11565052795410156, 0.11422447967529296, 0.1141537628173828, 0.1142188491821289, 0.11428294372558594, 0.11504000091552734, 0.11550508880615235, 0.11590892791748048, 0.11477401733398437, 0.11556422424316407, 0.11508921813964844, 0.114498046875, 0.1143746566772461, 0.11510784149169923, 0.11459359741210938, 0.11510006713867188, 0.11539788818359376, 0.11538220977783203, 0.11490972900390625, 0.11511113739013672, 0.11630016326904297, 0.11515542602539063, 0.11800704193115234, 0.1153617935180664, 0.11457357025146485, 0.11516681671142578, 0.11497465515136719, 0.11519599914550781, 0.11435887908935546, 0.11435846710205078, 0.11938211059570313, 0.11566851043701172, 0.11587948608398438, 0.11559414672851563, 0.11486412811279297, 0.11480604553222656, 0.1143057632446289, 0.11507939147949219, 0.11466294097900391, 0.11474969482421875, 0.11553507232666016, 0.11560755157470703, 0.11511273956298829, 0.11468800354003907, 0.1145789794921875, 0.11466185760498047, 0.11429682922363281, 0.11502496337890625, 0.11581468963623047, 0.11601372528076172, 0.11581849670410156, 0.11576694488525391, 0.11481126403808593, 0.1144258270263672, 0.11421491241455078, 0.11523686218261718, 0.11477401733398437, 0.11584921264648437, 0.11524915313720703, 0.11508943939208985, 0.1150893783569336, 0.11420374298095703, 0.11447388458251953, 0.11598847961425782, 0.11478813171386719, 0.11490531158447266, 0.11530025482177735, 0.11522665405273437, 0.11606204986572266, 0.11476195526123047, 0.1148326416015625, 0.11461504364013672, 0.11632809448242187, 0.115714111328125, 0.11517699432373046, 0.1149849624633789, 0.11546851348876953, 0.117772705078125, 0.11456086730957031, 0.11432726287841796, 0.11463491058349609, 0.11659715270996093, 0.115212158203125, 0.11681372833251953, 0.11538835144042969, 0.11449788665771485, 0.11504828643798828, 0.11428361511230468, 0.11424665832519532, 0.11443814086914063, 0.11428044891357422, 0.11459401702880859, 0.11599030303955078, 0.11507711791992188, 0.11469414520263672, 0.11491526031494141, 0.11366515350341796, 0.1146569595336914, 0.11406777954101563, 0.11620966339111329, 0.11529420471191407, 0.11569097900390625, 0.11585753631591797, 0.11516150665283204, 0.1166192626953125, 0.1149725112915039, 0.11506089782714844, 0.11773542022705077, 0.11578572845458984, 0.11456511688232422, 0.11670118713378906, 0.11466957092285156, 0.11408383941650391, 0.11431526184082032, 0.1150218276977539, 0.11792998504638671, 0.11526780700683593, 0.11665952301025391, 0.11611305236816406, 0.11479532623291015, 0.11514182281494141, 0.11478237152099609, 0.11476233673095704, 0.11493154907226563, 0.11522684478759766, 0.11521024322509765, 0.11600691223144531, 0.11580006408691407, 0.11502191925048828, 0.11544265747070312, 0.11473811340332031, 0.11489446258544922, 0.11488236999511718, 0.11540332794189453, 0.11580973052978516, 0.11656864166259766, 0.11531190490722656, 0.1154849624633789, 0.11474169921875, 0.11482726287841796, 0.1148231658935547, 0.1147894058227539, 0.11486502075195312, 0.11580384063720703, 0.11643331146240235, 0.11472035217285156, 0.11501200103759765, 0.114155517578125, 0.11429273223876953, 0.11445977783203125, 0.11524114990234376, 0.1150030746459961, 0.11604624176025391, 0.11539692687988282, 0.11523260498046875, 0.11465484619140626, 0.11455149078369141, 0.11431922912597656, 0.11480006408691407, 0.11523689270019531, 0.11590259552001952, 0.11552191925048828, 0.11892121887207031, 0.1152020492553711, 0.11444019317626954, 0.11510784149169923, 0.1150978240966797, 0.11479837036132813, 0.11547200012207032, 0.11530278778076172, 0.11479862213134766, 0.11516105651855468, 0.1146081314086914, 0.11409139251708984, 0.11437734222412109, 0.1147146224975586, 0.11444188690185547, 0.11503651428222657, 0.11565670776367187, 0.11582169342041015, 0.11448614501953125, 0.1142108154296875, 0.11457852935791016, 0.1143609619140625, 0.11462268829345704, 0.1162550048828125, 0.11574044799804688, 0.11515494537353516, 0.11520169830322266, 0.11512380981445312, 0.11839686584472656, 0.11520012664794922, 0.11944172668457032, 0.116129150390625, 0.1164967041015625, 0.11557548522949218, 0.1151488037109375, 0.11549247741699219, 0.11479689788818359, 0.11493379211425782, 0.1168436508178711, 0.11614297485351563, 0.11552092742919921, 0.11570175933837891, 0.11628173065185547, 0.11518531036376953, 0.11494588470458984, 0.11457814025878907, 0.11489405059814453, 0.11391776275634766, 0.11499187469482422, 0.1157245101928711, 0.11525325012207031, 0.1145323486328125, 0.11486617279052734, 0.11455693054199219, 0.11442546844482422, 0.11449609375, 0.11505372619628906, 0.11517523193359375, 0.11619574737548828, 0.117266845703125, 0.11549209594726563, 0.11487596893310546, 0.1147713623046875, 0.11475424194335937, 0.11454774475097657, 0.11512105560302735, 0.11725193786621094, 0.11566102600097657, 0.11614002990722656, 0.11481702423095703, 0.11505868530273437, 0.11491327667236328, 0.11489199829101562, 0.11566716766357422, 0.11633309173583985, 0.11559849548339844, 0.11927417755126953, 0.1157480926513672, 0.1157778549194336, 0.11411929321289062, 0.11422720336914062, 0.11543142700195312, 0.11563593292236328, 0.1164393310546875, 0.1168476791381836, 0.11697401428222656, 0.1173939208984375, 0.11492697906494141, 0.11486790466308594, 0.1149736328125, 0.11570585632324219, 0.11637344360351562, 0.11555232238769532, 0.115504638671875, 0.11520674896240235, 0.11470982360839843, 0.11443587493896484, 0.11461014556884766, 0.11538518524169922, 0.11500543975830078, 0.11642060852050781, 0.11636531066894532, 0.11544684600830078, 0.11583106994628906, 0.11501615905761718, 0.11484384155273437, 0.11452623748779298, 0.11461830139160156, 0.11531874847412109, 0.11593529510498046, 0.11575657653808594, 0.11562544250488281, 0.11472179412841797, 0.11470848083496094, 0.11797503662109375, 0.11549491119384765, 0.11850982666015625, 0.11613884735107421, 0.115880126953125, 0.11568370819091797, 0.11546249389648437, 0.11477811431884766, 0.11424671936035156, 0.11472172546386719, 0.11443225860595703, 0.11664653015136718, 0.1154703369140625, 0.11515058898925781, 0.11509171295166015, 0.11402265930175781, 0.11402384185791016, 0.11424348449707031, 0.11435842895507813, 0.11463459014892578, 0.11623673248291015, 0.11534130859375, 0.1161396484375, 0.1152311019897461, 0.11419843292236329, 0.11403478240966797, 0.11410636901855468, 0.11421900939941407, 0.11532028961181641, 0.11669558715820312, 0.115115234375, 0.11482323455810547, 0.11423567962646484, 0.11454428863525391, 0.11415017700195312, 0.11412226867675782, 0.11443001556396484, 0.11598070526123047, 0.1154378204345703, 0.11452995300292969, 0.11487996673583985, 0.11422579193115234, 0.11403798675537109, 0.11398633575439453, 0.11445145416259765, 0.1137242202758789, 0.11554563140869141, 0.11470111846923828, 0.11480608367919921, 0.11516336059570312, 0.11460025787353516, 0.11434188842773438, 0.11476787567138672, 0.11440537261962891, 0.11445043182373046, 0.11552745819091798, 0.11581257629394531, 0.1156136932373047, 0.11831008148193359, 0.11445088195800782, 0.11424806213378906, 0.1141739501953125, 0.11436844635009766, 0.11565267181396484, 0.11437641906738281, 0.11532681274414062, 0.11503593444824219, 0.11431587219238282, 0.1146921615600586, 0.11411670684814453, 0.11424460601806641, 0.11462928009033203, 0.11714380645751953]",tokens/s,8.673361779953705,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,14385.012736,10142.810112,0.0,9747.562496,9611.730944,s,1,34.31497265625,34.31497265625,0.0,34.31497265625,34.31497265625,34.31497265625,34.31497265625,[34.31497265625],,kWh,0.0007788282409832997,8.590274566197604e-05,0.00029462440236599496,0.0011593553890112706,,MB,4722.757632,10507.71456,0.0,10091.495424,9989.953536,s,10,1.2657408599853515,0.12657408599853515,0.0014093910157059629,0.12633460998535156,0.12765541381835938,0.12900365600585936,0.13008224975585939,"[0.12648461151123047, 0.1268326110839844, 0.12621836853027343, 0.12543926239013672, 0.12524419403076173, 0.12735580444335937, 0.1264508514404297, 0.1259213409423828, 0.13035189819335938, 0.12544191741943359]",tokens/s,2022.5308994367356,kWh,3.729289157172676e-06,4.1116035866514746e-07,2.4903958038480642e-06,6.630845319685888e-06,tokens/kWh,38607445.60576283,MB,4722.757632,10509.811712,0.0,10093.592576,9989.956096,s,10,76.04738525390624,7.6047385253906254,0.028115135723231145,7.601865478515625,7.63665849609375,7.638342919921875,7.639690458984375,"[7.6362841796875, 7.64002734375, 7.606771484375, 7.59695947265625, 7.629376953125, 7.632111328125, 7.59613720703125, 7.57292041015625, 7.584455078125, 7.552341796875]",tokens/s,8.28430849918853,kWh,0.00022130831102241136,2.4411459785056264e-05,0.00011673431068695224,0.00036245408149441984,tokens/kWh,173815.12091199867,,s,630,76.04411956024167,0.12070495168292335,0.0012465179445850338,0.12048220825195313,0.12197726821899414,0.12290295524597168,0.1254197380065918,"[0.12093644714355468, 0.12052684783935547, 0.12114031982421875, 0.12114559936523438, 0.12024076843261719, 0.12042652893066406, 0.12247039794921875, 0.12105907440185547, 0.11997347259521485, 0.1235728988647461, 0.12073308563232422, 0.12119087982177734, 0.120115234375, 0.12042652893066406, 0.12082134246826172, 0.12290505981445313, 0.12082717132568359, 0.12087747192382813, 0.1215511703491211, 0.12336518096923828, 0.12130732727050782, 0.12086624145507813, 0.11998675537109375, 0.12129203033447265, 0.121053955078125, 0.12080947113037109, 0.12175052642822265, 0.12255538940429687, 0.12162627410888673, 0.1309876403808594, 0.11998486328125, 0.12093350219726562, 0.12089024353027343, 0.12006195068359375, 0.12007628631591796, 0.12049215698242187, 0.1212326431274414, 0.11952710723876953, 0.11978777313232422, 0.12054946899414062, 0.12071587371826172, 0.11957453155517578, 0.11993292999267578, 0.12310918426513671, 0.1218309097290039, 0.11992339324951172, 0.12047769927978516, 0.11982201385498047, 0.12151430511474609, 0.12066815948486329, 0.12031807708740234, 0.12014374542236328, 0.12162662506103515, 0.12161148834228516, 0.12151888275146484, 0.12299049377441407, 0.12170454406738282, 0.12071513366699219, 0.12131507110595703, 0.12122354888916016, 0.12232498931884765, 0.12214832305908203, 0.1206747817993164, 0.12229942321777344, 0.12165753936767579, 0.12072425842285156, 0.12099174499511718, 0.12197682952880859, 0.1211883544921875, 0.12135833740234375, 0.12080947113037109, 0.12072774505615234, 0.12190636444091797, 0.1197492446899414, 0.11997388458251954, 0.12029132843017579, 0.12101773071289063, 0.12034304046630859, 0.12006790161132813, 0.12008806610107423, 0.12207801818847656, 0.12089055633544922, 0.11990220642089844, 0.12062777709960938, 0.12097357177734375, 0.12062265777587891, 0.1205498275756836, 0.12089344024658204, 0.12103065490722656, 0.12200287628173828, 0.12112694549560547, 0.12624444580078126, 0.1206827163696289, 0.12104691314697266, 0.12128342437744141, 0.1218720932006836, 0.1232259521484375, 0.12245152282714844, 0.12116214752197266, 0.12153699493408203, 0.12116928100585937, 0.12054950714111329, 0.12096326446533204, 0.12128050994873046, 0.12099721527099609, 0.12186726379394532, 0.1216669464111328, 0.12133232116699219, 0.1215283203125, 0.12116377258300781, 0.12213862609863281, 0.12441347503662109, 0.12052297973632813, 0.12073395538330078, 0.1220315170288086, 0.1205759048461914, 0.12130374145507812, 0.12244739532470703, 0.12072128295898438, 0.11985497283935546, 0.1197903060913086, 0.1198919677734375, 0.12131737518310547, 0.1203828125, 0.12005996704101562, 0.12562902069091797, 0.12049651336669921, 0.120174560546875, 0.11925299072265624, 0.11983872222900391, 0.12050227355957031, 0.1197768325805664, 0.12025286102294921, 0.1210871353149414, 0.12058029174804688, 0.12023875427246093, 0.12007958221435547, 0.12050307464599609, 0.12152543640136719, 0.12071609497070312, 0.12092415618896485, 0.12148294067382813, 0.12031622314453125, 0.11979679870605468, 0.11986220550537109, 0.1201270751953125, 0.12120105743408204, 0.11987916564941406, 0.11959552001953125, 0.12033971405029296, 0.1204245147705078, 0.12007891082763672, 0.12032627105712891, 0.1209692153930664, 0.120774658203125, 0.12033843231201172, 0.1203076171875, 0.12059043121337891, 0.12049635314941406, 0.11988047790527344, 0.12030668640136719, 0.12090179443359375, 0.12027008056640626, 0.12090633392333984, 0.11986739349365234, 0.1207537612915039, 0.12154716491699219, 0.12094054412841797, 0.12024323272705079, 0.12014281463623047, 0.11995257568359376, 0.12143494415283203, 0.12211952209472657, 0.12065821075439453, 0.12162652587890625, 0.12019145965576172, 0.12042854309082031, 0.12349849700927734, 0.13049568176269533, 0.12055225372314453, 0.12093849945068359, 0.12003533172607422, 0.12061414337158204, 0.12140825653076172, 0.12044422149658203, 0.12101087951660157, 0.12060610961914063, 0.12011580657958984, 0.12172227478027343, 0.12047154998779297, 0.12102877044677735, 0.12364179229736329, 0.12290038299560548, 0.11981414031982422, 0.12044083404541016, 0.12103065490722656, 0.11961737823486328, 0.12001910400390625, 0.1198878402709961, 0.12020089721679687, 0.11991410827636718, 0.11956707000732422, 0.12073369598388672, 0.11995462036132812, 0.12044576263427734, 0.12099174499511718, 0.12387942504882812, 0.11965030670166016, 0.12062483215332032, 0.12080944061279297, 0.11953997039794922, 0.12006819152832031, 0.12116172790527344, 0.11973632049560547, 0.120700927734375, 0.12094185638427735, 0.12071298980712891, 0.12090054321289062, 0.120166015625, 0.1196732177734375, 0.12081472015380859, 0.12027954864501954, 0.12056166076660156, 0.121162109375, 0.1203133773803711, 0.12025494384765625, 0.12021273803710937, 0.12043341064453125, 0.12041385650634766, 0.12217708587646485, 0.12042230224609375, 0.12081446075439453, 0.1203220443725586, 0.12011724853515625, 0.11988719940185547, 0.11987010955810547, 0.12074320220947266, 0.12102524566650391, 0.1207391357421875, 0.12014598083496093, 0.12039759826660157, 0.12019907379150391, 0.11980076599121094, 0.11972402954101563, 0.12099174499511718, 0.12059974670410156, 0.12116255950927735, 0.12072665405273438, 0.12122364807128906, 0.12068495941162109, 0.120340576171875, 0.12084825897216797, 0.12175769805908203, 0.12599501037597657, 0.12045696258544922, 0.12127462768554688, 0.12070057678222657, 0.12022767639160156, 0.12110694122314453, 0.12465766143798829, 0.12062252807617188, 0.11955462646484374, 0.11983599853515625, 0.11924342346191406, 0.12044083404541016, 0.12002304077148437, 0.12080329895019531, 0.12011280059814453, 0.11973875427246093, 0.12054118347167969, 0.12097740936279297, 0.12075635528564453, 0.12074518585205078, 0.1207384033203125, 0.12399343872070312, 0.12022978973388672, 0.12038739013671874, 0.1201042251586914, 0.1210667495727539, 0.12024179077148438, 0.12044537353515625, 0.12058870697021484, 0.11973744201660157, 0.11992787170410156, 0.12121398162841797, 0.12085327911376953, 0.12023955535888672, 0.12321238708496093, 0.12340025329589843, 0.1209151382446289, 0.12047824096679688, 0.12052851104736328, 0.11967139434814453, 0.12126822662353516, 0.12048588562011718, 0.12127027130126954, 0.12112019348144532, 0.1218414077758789, 0.12148191833496094, 0.12422713470458985, 0.11996598052978516, 0.12059677124023438, 0.12057379150390625, 0.12152028656005859, 0.12085993957519531, 0.12108258819580078, 0.12210176086425781, 0.12171453094482422, 0.12121875, 0.12198121643066406, 0.12071488189697266, 0.1223724136352539, 0.1217702407836914, 0.12202188873291016, 0.1212968978881836, 0.12316252899169922, 0.12113520050048829, 0.12116361236572265, 0.12188790130615235, 0.12252422332763672, 0.12188422393798828, 0.1215447006225586, 0.12178931427001953, 0.12183334350585938, 0.12167167663574219, 0.12115334320068359, 0.12154646301269531, 0.1224803237915039, 0.12275804901123047, 0.12201967620849609, 0.12106768035888672, 0.12126822662353516, 0.12120188903808594, 0.12188671875, 0.12310546875, 0.12288060760498047, 0.1220322265625, 0.1215282211303711, 0.11992195129394531, 0.12068889617919921, 0.12622412872314454, 0.12080611419677735, 0.12105532836914062, 0.11988572692871094, 0.12245536041259765, 0.12089965057373046, 0.12023462677001953, 0.11957987213134766, 0.11952381134033203, 0.12026911926269532, 0.12054729461669922, 0.12026882934570313, 0.12095699310302735, 0.11983385467529296, 0.12065190124511718, 0.12455174255371093, 0.12037939453125, 0.12068867492675782, 0.11992668914794923, 0.12038150024414063, 0.12153849792480469, 0.12094246673583985, 0.12005315399169922, 0.11988166046142579, 0.11942739105224609, 0.11968895721435546, 0.11972073364257813, 0.11997337341308593, 0.11971379089355469, 0.12490735626220703, 0.12194064331054688, 0.12115570831298828, 0.12002623748779297, 0.11907968139648438, 0.11986032104492188, 0.12082579040527344, 0.12014886474609375, 0.11965245056152343, 0.12067635345458984, 0.12082479858398437, 0.1200755844116211, 0.1204804458618164, 0.12036243438720703, 0.12018335723876954, 0.12026265716552734, 0.12046246337890625, 0.12080422210693359, 0.11998003387451171, 0.11966464233398437, 0.12013481903076172, 0.11999318695068359, 0.12061238098144532, 0.12016278076171875, 0.12100137329101562, 0.12034518432617188, 0.12040953826904296, 0.11986492919921875, 0.11989500427246094, 0.12054937744140624, 0.12044601440429688, 0.120531005859375, 0.12054003143310547, 0.12138233947753906, 0.12076099395751953, 0.12067945861816406, 0.12031254577636719, 0.11957263946533203, 0.1212907485961914, 0.12094054412841797, 0.12033952331542969, 0.12221126556396485, 0.12026470184326171, 0.12041206359863281, 0.12102870178222656, 0.11988956451416016, 0.11977378845214844, 0.1234593276977539, 0.1204735336303711, 0.12113516998291016, 0.12024012756347656, 0.12034877014160156, 0.12618128204345702, 0.12249702453613281, 0.12015577697753907, 0.12044940948486328, 0.12072140502929687, 0.12012973022460938, 0.1200508804321289, 0.1199130859375, 0.12012278747558594, 0.11981884765625, 0.12018013000488281, 0.12022831726074219, 0.11910566711425781, 0.11931648254394531, 0.12052051544189453, 0.12082809448242188, 0.12214284515380859, 0.1204796142578125, 0.1196723861694336, 0.12048397064208985, 0.1200513916015625, 0.12075382232666015, 0.12053276824951172, 0.12000550079345704, 0.11943526458740235, 0.11951308441162109, 0.11965977478027344, 0.11943807983398437, 0.11923455810546875, 0.12327350616455078, 0.11873654174804688, 0.1203776626586914, 0.12051634979248046, 0.11923273468017578, 0.11963938903808594, 0.12034825897216797, 0.11953033447265625, 0.11957987213134766, 0.1201725082397461, 0.11972281646728515, 0.12014726257324218, 0.12026032257080078, 0.11960540771484375, 0.12021331024169922, 0.11956880187988281, 0.11966320037841797, 0.11966464233398437, 0.11982157135009766, 0.11993164825439454, 0.11918131256103516, 0.12023193359375, 0.11950284576416016, 0.11984281921386719, 0.12008585357666016, 0.12006671905517578, 0.12037503814697266, 0.11967513275146484, 0.1200926742553711, 0.11982601928710937, 0.1196253433227539, 0.12016925048828125, 0.11935948944091797, 0.11961516571044922, 0.121653564453125, 0.12012525177001954, 0.12089539337158203, 0.11990854644775391, 0.11968931579589843, 0.1203220443725586, 0.12011929321289062, 0.12022169494628906, 0.12099993896484375, 0.12016435241699219, 0.12067839813232421, 0.12176112365722656, 0.11925981140136718, 0.12039373016357421, 0.12212838745117187, 0.12419276428222656, 0.12163276672363281, 0.12094464111328125, 0.12077568054199218, 0.12043135833740234, 0.12124556732177734, 0.11983712005615234, 0.1209288330078125, 0.120710205078125, 0.12094767761230468, 0.12081484985351562, 0.12153446197509765, 0.1206025619506836, 0.12058294677734376, 0.12111872100830078, 0.12083631896972656, 0.11980786895751953, 0.12003628540039063, 0.12058246612548829, 0.12110684967041016, 0.12276310729980469, 0.11899945831298828, 0.11971379089355469, 0.11993497467041016, 0.11981951904296875, 0.12069500732421876, 0.12100457763671875, 0.12158566284179688, 0.1207227554321289, 0.12036576080322266, 0.12053708648681641, 0.11975475311279297, 0.1196789779663086, 0.12126825714111328, 0.12005888366699219, 0.12034556579589843, 0.11950796508789062, 0.11958927917480469, 0.1199642562866211, 0.12013948822021485, 0.11955238342285156, 0.1195492172241211, 0.12046399688720703, 0.12069174194335938, 0.123478271484375, 0.11956502532958985, 0.11984889221191407, 0.12021526336669922, 0.1200327377319336, 0.11988057708740234, 0.12060671997070313, 0.12110578918457031, 0.11995750427246094, 0.12077120208740234, 0.12049612426757812, 0.12022169494628906, 0.12036656188964844, 0.11981059265136719, 0.1197875213623047, 0.12067142486572266, 0.12016518402099609, 0.12002098846435547, 0.11983439636230468, 0.11994882965087891, 0.12025926208496093, 0.11958067321777344, 0.11980140686035157, 0.12032044982910156, 0.12102098846435547, 0.12123529815673828, 0.11997325134277344, 0.12013958740234375, 0.11978781127929687, 0.11942169952392578, 0.12139622497558594, 0.12052703857421875, 0.12007266998291016, 0.12022966766357422, 0.11962947082519532, 0.12013654327392578, 0.1246740493774414, 0.120610595703125, 0.11959318542480468, 0.11972342681884765, 0.12091043090820312, 0.12011910247802735, 0.1235027847290039, 0.11964415740966797, 0.11975475311279297, 0.1206839370727539, 0.11958934020996094, 0.120150146484375, 0.11989984130859375, 0.11979590606689453, 0.11912754821777344, 0.11915532684326172, 0.11973632049560547, 0.12029132843017579, 0.11915001678466797, 0.11998806762695312, 0.11948515319824218, 0.12000665283203125, 0.12285951995849609, 0.11932825469970704, 0.11902355194091797, 0.11969522857666015, 0.1187212142944336, 0.1191357421875, 0.11905455780029296, 0.11926585388183594, 0.11857686614990234, 0.11894364929199219, 0.11845407867431641, 0.11928572845458985, 0.11911404418945312, 0.1216880645751953, 0.11921612548828125, 0.11875260925292969, 0.11916710662841797, 0.11868217468261719, 0.11807536315917969, 0.11918950653076171, 0.11968704223632813, 0.11968482971191406, 0.11958512115478516, 0.11995961761474609, 0.11961740875244141, 0.1197733154296875, 0.11948607635498047, 0.1198329620361328, 0.11899494171142579]",tokens/s,8.284664266523826,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 60, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1096.6016,709.820416,0.0,314.5728,299.62752,s,1,8.29876953125,8.29876953125,0.0,8.29876953125,8.29876953125,8.29876953125,8.29876953125,[8.29876953125],,kWh,3.174359504165902e-05,3.4870759261862695e-06,1.1626398190009435e-05,4.6857069157854725e-05,,MB,1247.06816,797.9008,0.0,381.681664,359.87456,s,10,0.2938719673156738,0.029387196731567384,0.001027390722866223,0.029006208419799805,0.030425178146362305,0.031296924400329586,0.03199432140350342,"[0.030231456756591796, 0.02891900825500488, 0.03216867065429688, 0.0285383358001709, 0.029249759674072267, 0.02894256019592285, 0.028616064071655272, 0.02906985664367676, 0.02890902328491211, 0.029227231979370116]",tokens/s,8711.276626293782,kWh,8.580955272303441e-07,9.463321532899257e-08,3.7207301007649246e-07,1.324801752635829e-06,tokens/kWh,193236459.3348867,MB,1258.438656,820.969472,0.0,404.750336,361.449984,s,10,17.94017346191406,1.794017346191406,0.006884328040200046,1.7932661743164062,1.8034279541015625,1.8042221557617188,1.8048575170898438,"[1.80325146484375, 1.7861607666015624, 1.793760986328125, 1.792058837890625, 1.7860655517578126, 1.7999669189453125, 1.7968050537109375, 1.805016357421875, 1.7927713623046875, 1.784316162109375]",tokens/s,35.11671731254177,kWh,5.20455033881874e-05,5.740288016313365e-06,2.0339026892123554e-05,7.81248182966243e-05,tokens/kWh,806401.8755320698,,s,630,17.93442261314392,0.02846733748118083,0.0005289263217916553,0.028420432090759278,0.028783866119384764,0.028968376731872556,0.029877254180908207,"[0.028222496032714844, 0.028015584945678712, 0.028149728775024415, 0.028395807266235352, 0.02823756790161133, 0.028222848892211914, 0.028344959259033204, 0.028248064041137694, 0.02818191909790039, 0.028205663681030273, 0.028365055084228517, 0.028327167510986326, 0.02826214408874512, 0.028234495162963866, 0.02812883186340332, 0.02855776023864746, 0.028411199569702148, 0.03115283203125, 0.028357759475708007, 0.028303712844848634, 0.028246559143066407, 0.02976358413696289, 0.029706239700317383, 0.028753215789794923, 0.02852275276184082, 0.028543424606323243, 0.028442623138427735, 0.028526304244995117, 0.02832592010498047, 0.02833228874206543, 0.029394943237304686, 0.02834022331237793, 0.02834432029724121, 0.028543167114257813, 0.02891516876220703, 0.028709215164184572, 0.02860438346862793, 0.028648672103881837, 0.028737855911254884, 0.02850806427001953, 0.028518239974975587, 0.028854303359985352, 0.028860895156860352, 0.028788991928100586, 0.028604415893554686, 0.028672000885009766, 0.02868351936340332, 0.02859699249267578, 0.028548799514770507, 0.02862726402282715, 0.029104127883911132, 0.028815359115600587, 0.029024255752563476, 0.02876185607910156, 0.028574176788330078, 0.02896054458618164, 0.02856118392944336, 0.02882921600341797, 0.028699520111083985, 0.02866975975036621, 0.028598272323608398, 0.028442304611206056, 0.028631359100341796, 0.028447263717651366, 0.02838921546936035, 0.028783296585083006, 0.028396991729736327, 0.028301792144775392, 0.02817228889465332, 0.028184864044189455, 0.028137184143066405, 0.028061920166015625, 0.028069664001464843, 0.027963232040405274, 0.028091615676879882, 0.02834726333618164, 0.028673728942871093, 0.02840025520324707, 0.02846188735961914, 0.028519136428833008, 0.028544479370117188, 0.028336896896362304, 0.028435903549194334, 0.028317695617675782, 0.0282587833404541, 0.028764255523681642, 0.028262496948242188, 0.028233407974243164, 0.028285120010375978, 0.028251935958862304, 0.027902463912963867, 0.027926015853881835, 0.028201120376586914, 0.028435871124267577, 0.028289119720458986, 0.028359039306640625, 0.02855344009399414, 0.02834771156311035, 0.028283584594726564, 0.02824511909484863, 0.028230239868164062, 0.02847158432006836, 0.028370943069458008, 0.028175968170166016, 0.02819660758972168, 0.02835321617126465, 0.02833593559265137, 0.028530048370361327, 0.028121152877807618, 0.028123903274536132, 0.0284117431640625, 0.028401695251464843, 0.02904217529296875, 0.028569631576538086, 0.028414527893066407, 0.028358655929565428, 0.028461055755615236, 0.02840575981140137, 0.028307039260864256, 0.02821776008605957, 0.02861231994628906, 0.028612607955932616, 0.028414239883422853, 0.028423328399658204, 0.028343391418457032, 0.028093215942382812, 0.028544736862182618, 0.02855331230163574, 0.02892576026916504, 0.028586528778076173, 0.028591903686523437, 0.02858620834350586, 0.02867184066772461, 0.028642847061157228, 0.028843711853027344, 0.0286561279296875, 0.02858950424194336, 0.028552032470703124, 0.028639135360717775, 0.02855311965942383, 0.02944393539428711, 0.02953660774230957, 0.02895871925354004, 0.028848127365112306, 0.028612607955932616, 0.030217248916625975, 0.028721408843994142, 0.028506847381591798, 0.028726783752441407, 0.02912713623046875, 0.028538047790527345, 0.02859913635253906, 0.02874982452392578, 0.028491775512695314, 0.02876518440246582, 0.028804096221923828, 0.028596223831176756, 0.028211200714111328, 0.02817024040222168, 0.027790943145751954, 0.028302783966064452, 0.02819580841064453, 0.02840291213989258, 0.0283287353515625, 0.028298368453979494, 0.028269439697265624, 0.028246015548706056, 0.028299264907836914, 0.02815385627746582, 0.02819891166687012, 0.028073984146118162, 0.02774982452392578, 0.027680896759033204, 0.027789312362670897, 0.027851200103759764, 0.027967487335205078, 0.028104703903198244, 0.027841856002807617, 0.027955904006958007, 0.028360639572143555, 0.02851568031311035, 0.028682975769042968, 0.028463104248046874, 0.028192768096923827, 0.028262304306030273, 0.02812259292602539, 0.028015199661254882, 0.028387359619140625, 0.028163936614990233, 0.028150272369384766, 0.028872255325317384, 0.028690879821777343, 0.028369983673095702, 0.02885523223876953, 0.028375040054321288, 0.028278335571289063, 0.0282935676574707, 0.028294591903686522, 0.028207679748535157, 0.028194496154785156, 0.02838150405883789, 0.028369152069091796, 0.028155519485473634, 0.027977344512939453, 0.028068351745605468, 0.02809062385559082, 0.02802252769470215, 0.028049407958984376, 0.028090368270874022, 0.028026432037353517, 0.02778566360473633, 0.027838623046875, 0.027949119567871095, 0.028143232345581054, 0.02824617576599121, 0.0287825927734375, 0.028657663345336915, 0.028557056427001952, 0.028604095458984374, 0.02849430465698242, 0.02817033576965332, 0.028063488006591798, 0.028186880111694335, 0.028241920471191406, 0.028167327880859374, 0.028322656631469725, 0.028685600280761718, 0.028762847900390624, 0.028624895095825196, 0.028761600494384764, 0.02857561683654785, 0.028578432083129882, 0.029091455459594726, 0.02869900894165039, 0.02865558433532715, 0.02867612838745117, 0.02870854377746582, 0.02869068717956543, 0.028659296035766602, 0.028702720642089844, 0.028656095504760743, 0.028638431549072266, 0.028513023376464844, 0.02844607925415039, 0.02848361587524414, 0.02844121551513672, 0.028651519775390624, 0.0286167049407959, 0.02874736022949219, 0.02863555145263672, 0.028974784851074218, 0.028743999481201172, 0.02862710380554199, 0.028553632736206053, 0.02842812728881836, 0.028308544158935547, 0.02804422378540039, 0.027897855758666993, 0.02803094482421875, 0.027971647262573243, 0.027930591583251955, 0.02791801643371582, 0.028135744094848633, 0.027905984878540038, 0.02819487953186035, 0.027971744537353516, 0.02817625617980957, 0.028176576614379882, 0.02817616081237793, 0.028442623138427735, 0.028565439224243164, 0.028386463165283204, 0.028275903701782228, 0.03287830352783203, 0.02829308891296387, 0.028164127349853515, 0.02815180778503418, 0.0280894718170166, 0.028445472717285158, 0.028321216583251953, 0.028414623260498047, 0.02832908821105957, 0.028525184631347657, 0.028369152069091796, 0.028604320526123047, 0.02832313537597656, 0.028259103775024413, 0.028481536865234375, 0.028601472854614257, 0.028506879806518556, 0.028462495803833008, 0.028307968139648438, 0.02815023994445801, 0.02806559944152832, 0.028389312744140624, 0.02830940818786621, 0.028248031616210936, 0.028397695541381836, 0.028084224700927734, 0.027968671798706053, 0.02797612762451172, 0.028203424453735353, 0.02815795135498047, 0.028225088119506837, 0.028189247131347656, 0.0283022403717041, 0.02846614456176758, 0.028548576354980468, 0.028326431274414064, 0.02824991989135742, 0.028145856857299804, 0.02828495979309082, 0.028355808258056642, 0.02838400077819824, 0.028393056869506834, 0.02821343994140625, 0.028191680908203124, 0.028283679962158203, 0.029526111602783203, 0.028811328887939452, 0.028628639221191406, 0.02857206344604492, 0.028706079483032228, 0.028729215621948242, 0.02868092727661133, 0.0286046085357666, 0.028603839874267577, 0.028497407913208008, 0.029913984298706054, 0.02872902488708496, 0.02881772804260254, 0.029783231735229492, 0.028592960357666015, 0.028480960845947267, 0.028598848342895507, 0.028509408950805663, 0.028517120361328124, 0.028444704055786134, 0.028477439880371092, 0.028446304321289063, 0.02873504066467285, 0.028630144119262697, 0.02862870407104492, 0.028553216934204102, 0.028858367919921874, 0.029787328720092772, 0.028649887084960936, 0.028678112030029297, 0.029052799224853515, 0.02845155143737793, 0.028466720581054688, 0.028594335556030272, 0.028434591293334963, 0.028720928192138673, 0.028199136734008787, 0.02827574348449707, 0.028317888259887694, 0.02820908737182617, 0.02819568061828613, 0.028248064041137694, 0.028297216415405273, 0.02823151969909668, 0.028046976089477538, 0.02818307113647461, 0.028237823486328126, 0.02842540740966797, 0.02814252853393555, 0.029037952423095703, 0.02911187171936035, 0.028771263122558594, 0.028231008529663086, 0.028234399795532227, 0.028325504302978515, 0.028133472442626952, 0.028207679748535157, 0.028311264038085936, 0.028342271804809572, 0.028118303298950195, 0.028627775192260743, 0.028839616775512694, 0.02833030319213867, 0.028268543243408203, 0.02825823974609375, 0.028270719528198242, 0.028041151046752928, 0.028094463348388672, 0.028314912796020508, 0.02824060821533203, 0.028611648559570314, 0.028271551132202147, 0.028762111663818358, 0.028391424179077147, 0.02828886413574219, 0.028154048919677734, 0.02799203109741211, 0.027930624008178712, 0.027922431945800782, 0.027897823333740236, 0.029745183944702148, 0.028753503799438477, 0.028453279495239257, 0.028516351699829103, 0.02820627212524414, 0.028204992294311525, 0.02820924758911133, 0.028291872024536133, 0.02858598327636719, 0.028497440338134766, 0.0286909122467041, 0.028471296310424804, 0.02840959930419922, 0.02826470375061035, 0.028476959228515626, 0.028342752456665038, 0.028216352462768556, 0.028165088653564454, 0.028256256103515624, 0.02851161575317383, 0.028750463485717772, 0.0285546875, 0.02852921676635742, 0.028700672149658202, 0.029746623992919923, 0.02884383964538574, 0.028583744049072265, 0.028775487899780273, 0.028694400787353514, 0.028844032287597656, 0.028622976303100588, 0.02859110450744629, 0.028553184509277345, 0.028613471984863283, 0.028852287292480468, 0.028688543319702147, 0.028723039627075196, 0.028848127365112306, 0.02890070343017578, 0.028770240783691406, 0.02885843276977539, 0.028652191162109375, 0.02871500778198242, 0.028612287521362304, 0.028666175842285157, 0.028778495788574218, 0.02857561683654785, 0.02830761528015137, 0.028200191497802736, 0.02805014419555664, 0.027929887771606446, 0.028023519515991212, 0.02794067192077637, 0.027906240463256834, 0.02813542366027832, 0.028165887832641602, 0.028360479354858397, 0.028438175201416015, 0.028598783493041992, 0.028994976043701173, 0.028767040252685547, 0.028760160446166992, 0.028669023513793947, 0.028677024841308595, 0.028678144454956055, 0.028606271743774413, 0.028554880142211914, 0.02841865539550781, 0.028313568115234375, 0.02818160057067871, 0.028425119400024415, 0.028325664520263673, 0.028676319122314452, 0.028194816589355468, 0.03595683288574219, 0.031592607498168945, 0.029067007064819336, 0.028891424179077148, 0.028862207412719727, 0.02851958465576172, 0.028502847671508787, 0.02824835205078125, 0.028099872589111327, 0.02818492889404297, 0.028208383560180662, 0.02835481643676758, 0.028745567321777344, 0.028154624938964843, 0.028192832946777345, 0.029562431335449217, 0.028561792373657226, 0.028170175552368164, 0.02834160041809082, 0.028676864624023437, 0.02840777587890625, 0.028519424438476562, 0.028535167694091798, 0.02846272087097168, 0.028498943328857423, 0.028512191772460938, 0.02858742332458496, 0.028420768737792968, 0.029495296478271486, 0.028307136535644532, 0.0283853759765625, 0.028463327407836914, 0.02854902458190918, 0.02834662437438965, 0.028316959381103516, 0.02847407913208008, 0.028381343841552734, 0.028401119232177734, 0.028475839614868163, 0.028829792022705077, 0.028560415267944336, 0.02845574378967285, 0.02894976043701172, 0.02850454330444336, 0.0284736328125, 0.028483552932739256, 0.02855878448486328, 0.028684896469116213, 0.028587583541870118, 0.02851577568054199, 0.028412864685058593, 0.02852016067504883, 0.028615007400512694, 0.028510175704956054, 0.028797023773193358, 0.028509759902954103, 0.028594560623168945, 0.028539903640747072, 0.02850681686401367, 0.02898975944519043, 0.02858598327636719, 0.028630752563476563, 0.028507776260375976, 0.02871887969970703, 0.031959264755249024, 0.02873616027832031, 0.028598272323608398, 0.029016063690185546, 0.028590080261230468, 0.028432384490966797, 0.02847871971130371, 0.028392127990722656, 0.028354688644409178, 0.02844051170349121, 0.028350656509399413, 0.028267423629760743, 0.028338655471801758, 0.028139968872070313, 0.027885568618774413, 0.027785215377807617, 0.027727424621582033, 0.027675071716308595, 0.02779110336303711, 0.02788582420349121, 0.027813472747802735, 0.02773756790161133, 0.027687231063842774, 0.028137311935424805, 0.028279327392578126, 0.028123615264892578, 0.028280607223510744, 0.028224576950073244, 0.028363712310791017, 0.028420095443725587, 0.028317695617675782, 0.028043392181396485, 0.028196735382080076, 0.028212799072265624, 0.027908672332763673, 0.02778099250793457, 0.02777052879333496, 0.027992416381835937, 0.02778432083129883, 0.027799968719482423, 0.02785327911376953, 0.028321056365966796, 0.027966304779052733, 0.028373952865600585, 0.02832044792175293, 0.028188928604125977, 0.028108320236206054, 0.028111328125, 0.028136831283569336, 0.02813942337036133, 0.02803785514831543, 0.028020671844482422, 0.02802284812927246, 0.02793267250061035, 0.027747583389282227, 0.027874048233032227, 0.027656192779541015, 0.02823686408996582, 0.028213247299194336, 0.028520416259765625, 0.028437471389770506, 0.028358655929565428, 0.028853248596191407, 0.028380128860473634, 0.02826652717590332, 0.028416160583496095, 0.028595775604248048, 0.028356895446777344, 0.02838857650756836, 0.02874345588684082, 0.028349344253540038, 0.02837539291381836, 0.028376703262329103, 0.02895884895324707, 0.028595712661743163, 0.028582399368286132, 0.028716991424560547, 0.028645439147949217, 0.028594175338745118, 0.028582048416137696, 0.028571487426757813, 0.028637184143066406, 0.02855299186706543, 0.028543039321899413, 0.028557472229003907, 0.028622848510742187, 0.028499967575073244, 0.028466400146484376, 0.02854400062561035, 0.02862473678588867, 0.02866374397277832, 0.028521984100341798, 0.028572160720825194, 0.028485631942749022]",tokens/s,35.1279778328788,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3927.699456,2152.660992,0.0,1757.413376,1736.37632,s,1,12.52262109375,12.52262109375,0.0,12.52262109375,12.52262109375,12.52262109375,12.52262109375,[12.52262109375],,kWh,0.0001584674610750047,1.7472890894140697e-05,5.979782561599256e-05,0.00023573817758513796,,MB,3934.502912,2387.542016,0.0,1971.32288,1913.084928,s,10,0.56003564453125,0.056003564453125,0.0004012378070317411,0.05581399917602539,0.056581279373168945,0.05664607982635498,0.05669792018890381,"[0.05617155075073242, 0.056360126495361325, 0.056566879272460936, 0.05582175827026367, 0.05580624008178711, 0.055629119873046876, 0.056710880279541014, 0.05570294570922851, 0.0554600944519043, 0.055806049346923826]",tokens/s,4571.13761418297,kWh,1.6468480188436859e-06,1.8161163862555528e-07,9.848853072584362e-07,2.8133449647276774e-06,tokens/kWh,90994884.45590602,MB,3934.502912,2408.513536,0.0,1992.2944,1972.635136,s,10,34.87702587890625,3.487702587890625,0.01510740592291888,3.4910791015625,3.501748315429688,3.504462048339844,3.506633034667969,"[3.48496044921875, 3.4877802734375, 3.501145263671875, 3.4943779296875, 3.494394775390625, 3.500339111328125, 3.50717578125, 3.4849052734375, 3.457155029296875, 3.4647919921875]",tokens/s,18.063466827342815,kWh,0.00010163501932365669,1.1210439492826176e-05,4.5264707247940516e-05,0.00015811016606442335,tokens/kWh,398456.35210028244,,s,630,34.872874938964884,0.05535376974438864,0.0011945569063210235,0.055267553329467776,0.0559307762145996,0.05636314430236816,0.05848259189605713,"[0.05495974349975586, 0.0547782096862793, 0.05524457550048828, 0.05441926574707031, 0.05457145690917969, 0.05482086563110351, 0.055213535308837894, 0.05532499313354492, 0.05646115112304687, 0.05487971115112305, 0.05439369583129883, 0.054841407775878905, 0.05492326354980469, 0.0549826545715332, 0.05455462265014648, 0.05454972839355469, 0.055261985778808594, 0.05527142333984375, 0.05487807846069336, 0.055411968231201175, 0.05461017608642578, 0.054653568267822264, 0.0550010871887207, 0.0551475830078125, 0.05549075317382812, 0.05562239837646484, 0.056797119140625, 0.05628934478759766, 0.05552249526977539, 0.055554878234863284, 0.05535884857177734, 0.05584511947631836, 0.055505279541015626, 0.055384063720703126, 0.055556095123291016, 0.055431167602539064, 0.05568697738647461, 0.05549075317382812, 0.058859519958496094, 0.05565849685668945, 0.05506867218017578, 0.05473484802246094, 0.05516419219970703, 0.05539503860473633, 0.05525299072265625, 0.05499903869628906, 0.055019519805908204, 0.05504150390625, 0.05520233535766601, 0.055029022216796876, 0.054583999633789064, 0.05498601531982422, 0.055363838195800784, 0.055495166778564455, 0.05567692947387695, 0.05540224075317383, 0.055363838195800784, 0.055224128723144535, 0.055430465698242185, 0.055374561309814455, 0.055698944091796876, 0.05607609558105469, 0.0557371826171875, 0.05461702346801758, 0.05509107208251953, 0.05524460983276367, 0.056344383239746096, 0.05517567825317383, 0.05541427230834961, 0.05535750579833985, 0.055402942657470707, 0.05548569488525391, 0.055098110198974606, 0.05493721771240234, 0.05527795028686523, 0.05535334396362305, 0.05551839828491211, 0.05564396667480469, 0.055659423828125, 0.055483905792236325, 0.05555260848999023, 0.05546393585205078, 0.05565987014770508, 0.05554857635498047, 0.05599027252197265, 0.05558236694335938, 0.05569366455078125, 0.05566195297241211, 0.055591552734375, 0.055228416442871096, 0.05474508666992187, 0.054860801696777345, 0.05476249694824219, 0.05459715270996094, 0.05464223861694336, 0.055257823944091795, 0.05518355178833008, 0.055416831970214846, 0.055409919738769534, 0.05487241744995117, 0.05710480117797852, 0.05513356781005859, 0.055427711486816404, 0.0551383056640625, 0.05495779037475586, 0.05529219055175781, 0.05529302215576172, 0.055223201751708986, 0.054949119567871095, 0.05465520095825195, 0.054507648468017575, 0.054679969787597656, 0.05494784164428711, 0.055057502746582034, 0.05526595306396485, 0.05526348876953125, 0.05541455841064453, 0.05531465530395508, 0.05517926406860352, 0.055022624969482424, 0.055667678833007814, 0.05535238265991211, 0.05541939163208008, 0.055798206329345706, 0.055739585876464846, 0.05841388702392578, 0.05521897506713867, 0.055379169464111325, 0.05547110366821289, 0.0553779182434082, 0.05552243041992187, 0.05528982543945313, 0.055534496307373046, 0.05544879913330078, 0.05570230484008789, 0.055711742401123046, 0.0555228157043457, 0.05585356903076172, 0.055330814361572264, 0.054976097106933595, 0.05583456039428711, 0.05560924911499023, 0.05566316986083984, 0.05550694274902344, 0.05603942489624023, 0.05585100936889648, 0.05560319900512695, 0.055096351623535156, 0.055389152526855466, 0.05517107009887695, 0.055325855255126954, 0.05803641510009765, 0.05583251190185547, 0.05566332626342774, 0.05544550323486328, 0.055884990692138675, 0.05578742218017578, 0.05611942291259766, 0.05523273468017578, 0.05534163284301758, 0.05539833450317383, 0.055181377410888674, 0.05510518264770508, 0.055013729095458985, 0.05508505630493164, 0.05494521713256836, 0.05496275329589844, 0.05522166442871094, 0.05555817413330078, 0.05536588668823242, 0.05542272186279297, 0.05523440170288086, 0.05591910552978516, 0.05552150344848633, 0.055547904968261716, 0.05567692947387695, 0.055586814880371094, 0.055521278381347655, 0.05643468856811523, 0.05568716812133789, 0.05573427200317383, 0.05588172912597656, 0.055715198516845706, 0.05563369750976563, 0.05543612670898437, 0.055553470611572266, 0.05651718521118164, 0.05563596725463867, 0.05547372817993164, 0.05481881713867188, 0.0561868782043457, 0.05528371047973633, 0.05565235137939453, 0.055119873046875, 0.055373504638671876, 0.055212352752685545, 0.0550830078125, 0.055119873046875, 0.054880256652832034, 0.054951934814453124, 0.05517663955688477, 0.055401023864746095, 0.05561753463745117, 0.05594249725341797, 0.05749542236328125, 0.05513059234619141, 0.055185726165771484, 0.05510105514526367, 0.05481689453125, 0.0551283187866211, 0.05502975845336914, 0.05518096160888672, 0.05522467041015625, 0.055142398834228515, 0.05591625595092774, 0.0568056640625, 0.05633769607543945, 0.05592947387695312, 0.05502767944335937, 0.055225631713867185, 0.055653217315673825, 0.05556208038330078, 0.05587507247924805, 0.05536630249023437, 0.056341793060302736, 0.05546985626220703, 0.055821121215820314, 0.055553375244140626, 0.055925342559814455, 0.05555116653442383, 0.05542195129394531, 0.0556317138671875, 0.05543132781982422, 0.055449310302734374, 0.05540687942504883, 0.055387264251708986, 0.05502041625976563, 0.05518131256103516, 0.055777278900146485, 0.055531520843505856, 0.05569740676879883, 0.05519919967651367, 0.05523411178588867, 0.05509423828125, 0.05513216018676758, 0.055027713775634764, 0.055216289520263674, 0.05531631851196289, 0.055991744995117186, 0.055639808654785156, 0.05539718246459961, 0.05524873733520508, 0.054787872314453125, 0.05483280181884766, 0.055070465087890624, 0.05544428634643555, 0.05523251342773437, 0.05514400100708008, 0.05514080047607422, 0.0551464958190918, 0.05500668716430664, 0.055560737609863284, 0.054873600006103515, 0.05559552001953125, 0.05563596725463867, 0.055382015228271485, 0.05575657653808594, 0.055245025634765625, 0.05510371017456055, 0.05520281600952148, 0.05559580612182617, 0.05575475311279297, 0.0555601921081543, 0.05584896087646484, 0.05556752014160156, 0.07943241882324219, 0.05702822494506836, 0.057579360961914065, 0.05531862258911133, 0.05523846435546875, 0.05511004638671875, 0.0550497932434082, 0.05541321563720703, 0.05541510391235351, 0.055104671478271486, 0.0547250862121582, 0.05430460739135742, 0.05440524673461914, 0.05434147262573242, 0.05471289443969726, 0.054994144439697266, 0.05463017654418945, 0.05475635147094727, 0.05459574508666992, 0.05476335906982422, 0.05485977554321289, 0.05463449478149414, 0.055069950103759764, 0.0542042236328125, 0.054104415893554685, 0.05462691116333008, 0.05484134292602539, 0.05503110504150391, 0.05499078369140625, 0.05453865432739258, 0.05453859329223633, 0.05470003128051758, 0.05446656036376953, 0.05464678573608398, 0.055021568298339846, 0.05482416152954102, 0.05486262512207031, 0.054747135162353515, 0.054771713256835934, 0.055087104797363284, 0.05446041488647461, 0.05444540786743164, 0.054728832244873044, 0.05514704132080078, 0.055144447326660156, 0.0553221435546875, 0.05501795196533203, 0.05519721603393555, 0.055030174255371093, 0.05514236831665039, 0.05522441482543945, 0.055431072235107424, 0.055363006591796875, 0.05508966445922851, 0.05765078353881836, 0.05530886459350586, 0.055330814361572264, 0.0554161262512207, 0.054575809478759764, 0.05429590225219726, 0.05454425430297852, 0.05484124755859375, 0.05491785430908203, 0.05470633697509766, 0.05450543975830078, 0.05531036758422852, 0.05531145477294922, 0.0554598388671875, 0.055269695281982424, 0.05574102401733398, 0.05561315155029297, 0.055647872924804685, 0.05575132751464844, 0.05564131164550781, 0.0559598388671875, 0.0567116813659668, 0.05550080108642578, 0.05501542282104492, 0.055277568817138675, 0.05563734436035156, 0.05574825668334961, 0.05588684844970703, 0.05583871841430664, 0.05605580902099609, 0.05548441696166992, 0.05559500885009765, 0.055483776092529295, 0.055666656494140626, 0.05710649490356445, 0.05573897552490235, 0.05580944061279297, 0.05587337493896485, 0.05629209518432617, 0.05692777633666992, 0.05623446273803711, 0.05624335861206055, 0.05602799987792969, 0.05620940780639649, 0.056164352416992185, 0.05620441436767578, 0.05606079864501953, 0.05631740951538086, 0.056220191955566406, 0.05578956985473633, 0.05590323257446289, 0.05608755111694336, 0.05851065444946289, 0.05608927917480469, 0.055553375244140626, 0.05555267333984375, 0.056174591064453126, 0.05539836883544922, 0.055296031951904294, 0.055547904968261716, 0.055201793670654295, 0.058861568450927736, 0.055967742919921876, 0.05743206405639648, 0.05688095855712891, 0.05559724807739258, 0.05532035064697265, 0.0555706558227539, 0.055777278900146485, 0.05561040115356446, 0.0558612174987793, 0.05586806488037109, 0.05587779235839844, 0.05603247833251953, 0.05646828842163086, 0.055117984771728516, 0.05540454483032226, 0.05513353729248047, 0.055253662109375, 0.05609267044067383, 0.055117729187011716, 0.055076030731201174, 0.054868896484375, 0.05483497619628906, 0.05512582397460938, 0.05528745651245117, 0.05573503875732422, 0.055312225341796875, 0.05564432144165039, 0.05547417449951172, 0.05556224060058594, 0.05552537536621094, 0.05583052825927735, 0.055373825073242185, 0.05543231964111328, 0.05535014343261719, 0.05603737640380859, 0.05566668701171875, 0.05546915054321289, 0.05551094436645508, 0.0553087043762207, 0.05491772842407226, 0.05502361679077149, 0.055358657836914064, 0.05544636917114258, 0.055244766235351565, 0.0553164176940918, 0.055236671447753904, 0.05534089660644531, 0.055167137145996095, 0.05571379089355469, 0.0551868782043457, 0.054556671142578124, 0.055185409545898435, 0.05538313674926758, 0.055780033111572265, 0.0552468147277832, 0.055109630584716796, 0.05496582412719726, 0.0549997444152832, 0.05470150375366211, 0.05499347305297852, 0.05513216018676758, 0.055371681213378904, 0.05525104141235351, 0.05753238296508789, 0.05539023971557617, 0.055564289093017576, 0.05565983963012695, 0.05526988983154297, 0.055177406311035154, 0.05549260711669922, 0.05530588912963867, 0.05685843276977539, 0.055376415252685544, 0.055621536254882815, 0.0554230728149414, 0.05637849426269531, 0.05586419296264648, 0.05583379364013672, 0.055464767456054685, 0.055357440948486325, 0.055373825073242185, 0.05545574569702148, 0.05558639907836914, 0.056240127563476565, 0.05568710327148438, 0.05549916839599609, 0.05514246368408203, 0.055164127349853515, 0.05542172622680664, 0.055877632141113284, 0.0551479377746582, 0.05512252807617188, 0.05491616058349609, 0.05485049438476562, 0.05465599822998047, 0.055223297119140625, 0.055244800567626956, 0.055160831451416016, 0.05532876968383789, 0.055155841827392575, 0.05496307373046875, 0.055357662200927735, 0.055269153594970706, 0.05497766494750977, 0.05506342315673828, 0.05557452774047852, 0.05512713623046875, 0.054924190521240236, 0.05466908645629883, 0.05444220733642578, 0.054228897094726565, 0.055005279541015625, 0.05454172897338867, 0.05475942230224609, 0.0550563850402832, 0.05489459228515625, 0.060663806915283204, 0.05457299041748047, 0.055706878662109376, 0.055202369689941404, 0.0552204818725586, 0.05496435165405274, 0.05499062347412109, 0.05484348678588867, 0.05493132781982422, 0.05485753631591797, 0.0548969612121582, 0.054714366912841796, 0.05490687942504883, 0.054812671661376954, 0.055019390106201174, 0.05501468658447266, 0.05485836791992187, 0.054831329345703124, 0.05449932861328125, 0.05455462265014648, 0.05898761749267578, 0.055155616760253906, 0.05406252670288086, 0.05426134490966797, 0.05495702362060547, 0.05461606216430664, 0.05472819137573242, 0.05419059371948242, 0.05455651092529297, 0.05363260650634766, 0.05367776107788086, 0.05351718521118164, 0.0541102066040039, 0.0543559684753418, 0.056597503662109375, 0.0544502067565918, 0.05388592147827148, 0.053808799743652345, 0.05442595291137695, 0.054988414764404296, 0.054703807830810545, 0.054428352355957034, 0.054497055053710934, 0.054492576599121094, 0.05474150466918945, 0.05527769470214844, 0.05415097427368164, 0.0538361587524414, 0.05381100845336914, 0.05395065689086914, 0.053766143798828124, 0.05442150497436524, 0.055064064025878906, 0.056469150543212894, 0.05530268859863281, 0.05556159973144531, 0.05484025573730469, 0.05531033706665039, 0.05555814361572266, 0.05480448150634765, 0.054801761627197264, 0.055401153564453125, 0.055026817321777347, 0.057189247131347654, 0.058791934967041014, 0.054986751556396485, 0.05482700729370117, 0.0542105598449707, 0.05465686416625976, 0.0550032958984375, 0.0549310417175293, 0.054583072662353516, 0.054331329345703124, 0.054330047607421876, 0.0551383056640625, 0.055126014709472655, 0.05446041488647461, 0.054357120513916016, 0.05461695861816406, 0.05556633758544922, 0.057145343780517575, 0.05455820846557617, 0.05460128021240234, 0.05426681518554687, 0.05403968048095703, 0.05421334457397461, 0.05433686447143555, 0.05504083251953125, 0.05485340881347656, 0.054758689880371095, 0.054418495178222656, 0.05426982498168945, 0.054836929321289064, 0.05534892654418945, 0.05556185531616211, 0.05483827209472656, 0.05466316986083984, 0.055314430236816405, 0.05501337432861328, 0.05482406234741211, 0.05446131134033203, 0.054849536895751956, 0.054930721282958984, 0.05514313507080078, 0.054929279327392576, 0.05552755355834961, 0.05523020935058594, 0.054937854766845706, 0.055539710998535156, 0.05500928115844726, 0.05514662551879883, 0.05558259201049805, 0.05570880126953125, 0.055439838409423826, 0.05530838394165039, 0.055269695281982424, 0.05502975845336914, 0.054832767486572266, 0.05458367919921875, 0.054451358795166015, 0.054303585052490236, 0.054682785034179685, 0.05440777587890625]",tokens/s,18.065616933007036,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,5097.627648,3461.28384,0.0,3066.036224,2865.160192,s,1,13.38390625,13.38390625,0.0,13.38390625,13.38390625,13.38390625,13.38390625,[13.38390625],,kWh,0.00017487062844995005,1.9281217380313427e-05,6.501616312401537e-05,0.00025916800895427883,,MB,5149.663232,3790.536704,0.0,3374.317568,3158.448128,s,10,0.9157994461059572,0.0915799446105957,0.0011892466327708747,0.09108694076538086,0.09342338409423828,0.09351868362426759,0.09359492324829102,"[0.09025955200195312, 0.09040876770019532, 0.09074793243408204, 0.0907008285522461, 0.09142594909667968, 0.09361398315429688, 0.09259849548339844, 0.09199523162841797, 0.09064649963378907, 0.09340220642089844]",tokens/s,2795.37185885545,kWh,2.7058780424000155e-06,2.9840813150666025e-07,1.496266629111287e-06,4.500552803017963e-06,tokens/kWh,56881901.22518561,MB,5153.906688,3790.536704,0.0,3374.317568,3158.450688,s,10,55.28744775390625,5.528744775390625,0.02828313822901441,5.533910888671874,5.561607861328125,5.561866918945313,5.562074165039062,"[5.48099658203125, 5.48805810546875, 5.5044287109375, 5.536615234375, 5.51940771484375, 5.56155029296875, 5.54486474609375, 5.55819384765625, 5.53120654296875, 5.5621259765625]",tokens/s,11.394991550419114,kWh,0.00016187142267759847,1.7854969432405406e-05,6.869442841228868e-05,0.00024842082052229254,tokens/kWh,253601.9318652342,,s,630,55.28498953247067,0.08775395163884239,0.0011452891762173502,0.08761142349243164,0.08863429946899415,0.08943564682006835,0.09132697471618652,"[0.08672249603271484, 0.08921660614013673, 0.08627180480957031, 0.08731305694580078, 0.0873371810913086, 0.08705142211914063, 0.08651136016845704, 0.08661033630371094, 0.08614755249023437, 0.08600479888916016, 0.08675353240966797, 0.08663724517822266, 0.08671340942382813, 0.08705039978027344, 0.08636041259765626, 0.086299072265625, 0.08696361541748047, 0.08683487701416015, 0.08672348785400391, 0.08708096313476563, 0.08619414520263671, 0.08639411163330078, 0.08615299224853516, 0.08633042907714844, 0.08709673309326171, 0.08659552001953125, 0.0863279037475586, 0.08664678192138672, 0.08673075103759766, 0.08678137969970703, 0.0874686050415039, 0.08691232299804688, 0.08670272064208985, 0.08641276550292969, 0.08648764801025391, 0.08669776153564453, 0.08782447814941406, 0.08641081237792969, 0.08690950775146485, 0.08630271911621094, 0.08749228668212891, 0.08859024047851563, 0.08655913543701171, 0.08709939575195312, 0.09019391632080079, 0.08824755096435546, 0.08671040344238282, 0.086995361328125, 0.0865315170288086, 0.08727817535400391, 0.08965074920654297, 0.0903944320678711, 0.0877423324584961, 0.08647776031494141, 0.08700227355957031, 0.08631951904296875, 0.08639859008789062, 0.0870544662475586, 0.08724534606933594, 0.08680770874023437, 0.0864901123046875, 0.0872790756225586, 0.08621724700927734, 0.0864300765991211, 0.08674457550048828, 0.08694953918457031, 0.0866251220703125, 0.0865005111694336, 0.08636252593994141, 0.08686841583251953, 0.08805593872070312, 0.08734047698974609, 0.08655712127685547, 0.08668364715576173, 0.08688758087158203, 0.08662335968017579, 0.08706598663330078, 0.08693135833740234, 0.08662265777587891, 0.08678809356689453, 0.08698191833496094, 0.08695267486572265, 0.08637030029296874, 0.08682003021240234, 0.08649967956542969, 0.0866984634399414, 0.08723865509033203, 0.08686182403564453, 0.0871725082397461, 0.08734146881103516, 0.09051907348632812, 0.08798684692382812, 0.0868106231689453, 0.08767398071289062, 0.08646131134033203, 0.08711500549316406, 0.08680524444580077, 0.0869191665649414, 0.08658029174804688, 0.08715058898925782, 0.08689759826660157, 0.0867872314453125, 0.08687091064453124, 0.08707711791992187, 0.08726252746582032, 0.087072509765625, 0.08758477020263672, 0.08744003295898438, 0.08725078582763672, 0.08689667510986328, 0.08698483276367187, 0.08744290924072265, 0.08677430725097657, 0.08726707458496094, 0.08734336090087891, 0.0901711654663086, 0.08681494140625, 0.08739408111572265, 0.08768144226074219, 0.08700457763671875, 0.08753603363037109, 0.08715264129638672, 0.08681881713867187, 0.08741248321533203, 0.08664214324951172, 0.08724150085449218, 0.08748851013183594, 0.08652909088134765, 0.08695410919189453, 0.08725177764892578, 0.0868331527709961, 0.08665907287597656, 0.08636316680908203, 0.08964969635009766, 0.08684108734130859, 0.08767507171630859, 0.08820172882080078, 0.08718745422363282, 0.08751280212402343, 0.08670236968994141, 0.08757247924804687, 0.08760115051269532, 0.08708064270019532, 0.08687033843994141, 0.08836710357666015, 0.08680448150634766, 0.08678825378417969, 0.08713187408447266, 0.08694182586669921, 0.08683721923828125, 0.08659964752197266, 0.0871629409790039, 0.08775475311279297, 0.08809471893310547, 0.08714649963378907, 0.08667545318603516, 0.09059667205810547, 0.08697277069091797, 0.08667910766601562, 0.08709606170654297, 0.08641539001464844, 0.08729395294189453, 0.08661395263671876, 0.08662432098388671, 0.0909677734375, 0.08739868927001954, 0.0868384017944336, 0.08705522918701172, 0.08800800323486328, 0.08683904266357421, 0.0872674560546875, 0.0874334716796875, 0.08734982299804687, 0.08711513519287109, 0.09081037139892578, 0.08801340484619141, 0.08753907012939453, 0.08694646453857421, 0.0868325424194336, 0.08708911895751953, 0.08690560150146484, 0.08682460784912109, 0.08802649688720703, 0.08714348602294922, 0.08666703796386718, 0.08761753845214844, 0.08724272155761718, 0.08727263641357422, 0.08740541076660156, 0.08711526489257812, 0.0874639663696289, 0.08712374114990235, 0.08803603363037109, 0.08776627349853515, 0.08742578887939453, 0.08734620666503906, 0.08781104278564453, 0.08759910583496094, 0.08808201599121093, 0.08745820617675781, 0.08749056243896484, 0.08765235137939453, 0.08943001556396485, 0.08964688110351562, 0.08870320129394531, 0.08752742767333985, 0.08788582611083984, 0.08790611267089844, 0.08831916809082031, 0.08848700714111328, 0.08786319732666016, 0.08739984130859375, 0.08759766387939454, 0.08777932739257813, 0.08754291534423828, 0.08910323333740235, 0.08783846282958985, 0.08857830047607422, 0.09074073791503906, 0.08742707061767578, 0.08813702392578125, 0.08794719696044923, 0.08765705871582032, 0.08713375854492188, 0.08719542694091797, 0.0881200942993164, 0.08866560363769531, 0.09160710144042969, 0.08810765075683594, 0.0873388442993164, 0.08696217346191407, 0.08822486114501953, 0.08830818939208984, 0.08768761444091797, 0.08727916717529297, 0.08744547271728516, 0.08770742034912109, 0.08768511962890625, 0.08852345275878906, 0.08725424194335937, 0.08725583648681641, 0.08711539459228515, 0.0873722915649414, 0.08787126159667968, 0.08744931030273438, 0.08794560241699219, 0.08732466888427734, 0.08680413055419922, 0.08766226959228515, 0.08744812774658203, 0.08761148834228516, 0.08736358642578125, 0.08777145385742187, 0.08790460968017579, 0.08782425689697265, 0.0876559066772461, 0.08772281646728515, 0.08731619262695313, 0.087531005859375, 0.08743417358398438, 0.08740585327148437, 0.08739708709716797, 0.08702976226806641, 0.08752947235107422, 0.08790016174316406, 0.08959964752197265, 0.08764019012451171, 0.08738569641113281, 0.08931375885009765, 0.08813993835449219, 0.08786316680908203, 0.08801702117919921, 0.08744898986816406, 0.08740310668945313, 0.08751500701904297, 0.08792896270751953, 0.09071820831298828, 0.08827699279785156, 0.08729190063476562, 0.08747539520263672, 0.08677401733398438, 0.08715116882324218, 0.08721202850341797, 0.08762163543701172, 0.08744550323486328, 0.08704144287109375, 0.08678256225585937, 0.08739997100830078, 0.08822950744628906, 0.08725385284423828, 0.08747007751464844, 0.087364990234375, 0.08755580902099609, 0.08771389007568359, 0.08752761840820313, 0.08716966247558594, 0.08723865509033203, 0.08765644836425782, 0.08714649963378907, 0.08775475311279297, 0.08766025543212891, 0.08725122833251953, 0.08726732635498047, 0.08761277008056641, 0.0876304931640625, 0.08797529602050781, 0.08717375946044922, 0.08711373138427735, 0.08699884796142578, 0.08758665466308593, 0.08728399658203125, 0.08755001831054687, 0.08751516723632813, 0.08740860748291016, 0.08717842864990234, 0.08839254760742188, 0.08801074981689454, 0.08871116638183593, 0.08743936157226563, 0.0880345916748047, 0.0877677764892578, 0.08830770874023437, 0.08798598480224609, 0.08887519836425781, 0.08826470184326173, 0.08820896148681641, 0.08896969604492187, 0.08988047790527344, 0.08874569702148437, 0.08880982208251953, 0.08880086517333985, 0.08873158264160157, 0.08877481842041016, 0.08920060729980468, 0.08793910217285156, 0.08779193878173829, 0.08765187072753906, 0.08757465362548827, 0.08846371459960937, 0.09420390319824219, 0.0915979232788086, 0.0886341781616211, 0.08780169677734374, 0.08774687957763672, 0.08810006713867187, 0.08765497589111328, 0.08778294372558594, 0.0878086395263672, 0.08815837097167968, 0.08808432006835938, 0.08825241851806641, 0.08764643096923828, 0.08791395568847657, 0.08769725036621094, 0.08778905487060547, 0.08829574584960938, 0.08774518585205078, 0.08753561401367188, 0.08732057952880859, 0.08766368103027344, 0.08805452728271485, 0.08821778869628906, 0.08750406646728516, 0.08784317016601563, 0.08755862426757813, 0.08825856018066407, 0.08786739349365234, 0.08842422485351563, 0.08827942657470703, 0.08799420928955078, 0.08779571533203125, 0.08786688232421876, 0.08917453002929687, 0.08760476684570312, 0.08820076751708984, 0.08793529510498047, 0.08761164855957031, 0.08835485076904297, 0.0876148452758789, 0.08776972961425782, 0.09135222625732421, 0.0876839370727539, 0.08769081878662109, 0.0877531509399414, 0.08740249633789063, 0.08743936157226563, 0.09081609344482422, 0.08972329711914062, 0.08811110687255859, 0.08780595397949219, 0.08749056243896484, 0.08716659545898438, 0.08759539031982422, 0.08794461059570312, 0.08833695983886719, 0.08775273895263672, 0.08736972808837891, 0.08742060852050781, 0.087484130859375, 0.08820387268066407, 0.08757148742675781, 0.08769430541992188, 0.08754176330566406, 0.08720384216308594, 0.08737177276611328, 0.08774166107177735, 0.08764905548095703, 0.09094876861572265, 0.08775308990478516, 0.0875232925415039, 0.0875647964477539, 0.08784512329101563, 0.08724364471435547, 0.08772281646728515, 0.08759302520751953, 0.0875387191772461, 0.08767378997802734, 0.08850844573974609, 0.08829686737060546, 0.0885824966430664, 0.08774272155761718, 0.0876578598022461, 0.08825676727294922, 0.08779199981689453, 0.08738979339599609, 0.08748073577880859, 0.08830156707763671, 0.09102745819091797, 0.08817254638671874, 0.08764559936523438, 0.08774102020263672, 0.08796774291992188, 0.08778125, 0.08846963500976562, 0.08853609466552734, 0.08775369262695312, 0.08736675262451171, 0.08763689422607422, 0.08759442901611328, 0.08807635498046874, 0.08872716522216798, 0.08831168365478516, 0.08841081237792969, 0.08795337677001953, 0.08818019104003906, 0.0875607681274414, 0.0882339859008789, 0.08775478363037109, 0.0879943389892578, 0.08783052825927734, 0.08850249481201172, 0.09037596893310547, 0.08786265563964844, 0.08787324523925781, 0.08783721923828125, 0.08890751647949219, 0.08787417602539062, 0.08809081268310547, 0.08781804656982421, 0.08863539123535157, 0.08790016174316406, 0.08854243469238281, 0.08834742736816406, 0.08780966186523438, 0.08776105499267578, 0.08798131561279297, 0.08815305328369141, 0.088923583984375, 0.08761135864257813, 0.08738188934326171, 0.0874750747680664, 0.08728521728515624, 0.08759539031982422, 0.08837324523925781, 0.08790214538574219, 0.08708412933349609, 0.08769779205322266, 0.08736418914794922, 0.08785414123535157, 0.08748332977294922, 0.08834662628173828, 0.08718540954589844, 0.0878380126953125, 0.08805856323242188, 0.08794882965087891, 0.0877265625, 0.08813164520263672, 0.08754720306396484, 0.08984844970703125, 0.08886617279052735, 0.08782460784912109, 0.08749097442626953, 0.08988671875, 0.08825651550292969, 0.08795458984375, 0.08866492462158203, 0.08799436950683594, 0.08820243072509766, 0.08767139434814453, 0.08733103942871094, 0.0989881591796875, 0.08893660736083984, 0.08751203155517578, 0.08720665740966797, 0.08752947235107422, 0.0876883544921875, 0.08787439727783203, 0.08763362884521485, 0.08736112213134765, 0.08708985900878906, 0.0873512954711914, 0.08757852935791016, 0.08778342437744141, 0.0881460189819336, 0.08796329498291015, 0.08748281860351563, 0.08745148468017579, 0.08823177337646484, 0.08793660736083984, 0.08758911895751953, 0.08733734130859375, 0.08747417449951173, 0.0876786880493164, 0.08753753662109375, 0.08719833374023438, 0.08713375854492188, 0.0869948501586914, 0.08731065368652344, 0.08781327819824218, 0.08812630462646484, 0.08795516967773437, 0.08710956573486328, 0.08712841796875, 0.08766585540771485, 0.08843462371826172, 0.08758975982666016, 0.08719155120849609, 0.08718268585205079, 0.09164822387695312, 0.0887996826171875, 0.08814966583251953, 0.08808403015136719, 0.08771398162841797, 0.0873778533935547, 0.08696809387207032, 0.08752406311035156, 0.0876525115966797, 0.08738374328613281, 0.08757484436035157, 0.0873587188720703, 0.08726399993896485, 0.0878182373046875, 0.08740367889404296, 0.08811427307128906, 0.09023257446289062, 0.08875417327880859, 0.08800227355957031, 0.0873864974975586, 0.08696221160888672, 0.08701689910888671, 0.08702207946777343, 0.08722220611572265, 0.0878551025390625, 0.08772777557373047, 0.09126515197753907, 0.08815779113769531, 0.08896109008789063, 0.0887603530883789, 0.08806735992431641, 0.08819305419921875, 0.08784966278076171, 0.08771078491210937, 0.08779792022705078, 0.08847171020507813, 0.08754649353027344, 0.08798332977294922, 0.0877281265258789, 0.08887375640869141, 0.08824422454833984, 0.08807218933105469, 0.08774451446533203, 0.08773017883300781, 0.08743321228027344, 0.08777295684814453, 0.08784713745117187, 0.08775625610351563, 0.08839609527587891, 0.08810514831542969, 0.08774454498291015, 0.08944025421142578, 0.08878291320800781, 0.08796915435791015, 0.08835062408447265, 0.08740131378173828, 0.08772819519042968, 0.08798307037353516, 0.08822227478027343, 0.08760956573486328, 0.08786943817138672, 0.08769670104980469, 0.0877791976928711, 0.0891394271850586, 0.08860118103027344, 0.08816166687011719, 0.09099244689941406, 0.08832809448242188, 0.0876038055419922, 0.08801721954345704, 0.08756018829345703, 0.08806931304931641, 0.0881526107788086, 0.10139228820800782, 0.08885244750976562, 0.08792630767822265, 0.08820211029052734, 0.08812748718261719, 0.08796729278564454, 0.08808902740478515, 0.08823935699462891, 0.08764006042480468, 0.08755043029785156, 0.08841785430908203, 0.08753376007080078, 0.08754176330566406, 0.0880268783569336, 0.08781289672851562, 0.08786534118652344, 0.08767894744873046, 0.08750508880615235, 0.08821887969970703]",tokens/s,11.395498223436944,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 615, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 160, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 282, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1054, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 856, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 596, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 426, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1013, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 839, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 199, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1234, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1020, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 742, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 505, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1054, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 856, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 596, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 426, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1097.732096,4937.678848,0.0,4542.431232,4484.571136,s,1,14.5617099609375,14.5617099609375,0.0,14.5617099609375,14.5617099609375,14.5617099609375,14.5617099609375,[14.5617099609375],,kWh,0.00021752510441666904,2.398741052359524e-05,8.017478636199865e-05,0.00032168730130226295,,MB,1305.165824,5480.841216,0.0,5073.010688,4884.617216,s,10,11.0216416015625,1.10216416015625,0.006930244767834649,1.1029554443359375,1.1094821044921876,1.110569970703125,1.111440263671875,"[1.0856644287109376, 1.0958511962890625, 1.10051123046875, 1.10182177734375, 1.1063885498046875, 1.102702392578125, 1.1045953369140624, 1.10320849609375, 1.1116578369140624, 1.1092403564453126]",tokens/s,232.27029988319325,kWh,3.2022201245416584e-05,3.5315273165968545e-06,2.1340822628199835e-05,5.689455119021327e-05,tokens/kWh,4499552.147693818,MB,1325.572096,5495.52128,0.0,5087.690752,4884.619776,s,10,49.30262548828125,4.930262548828125,0.014049781425734228,4.934433837890625,4.943747021484374,4.946774633789063,4.949196723632812,"[4.9013173828125, 4.91222119140625, 4.9211572265625, 4.92892138671875, 4.93288525390625, 4.935982421875, 4.93823681640625, 4.93902734375, 4.94307421875, 4.94980224609375]",tokens/s,12.778224156637354,kWh,0.00014493874330833318,1.598788314719173e-05,9.619927140380006e-05,0.000257125897859325,tokens/kWh,245016.15949423984,,s,630,49.29829949951177,0.07825126904684399,0.0019029491224974552,0.07785262298583984,0.07942870635986328,0.07996926078796386,0.09095911315917969,"[0.088295166015625, 0.07616742706298828, 0.0763904037475586, 0.07641622161865234, 0.07685814666748046, 0.07706623840332032, 0.07695974731445313, 0.0764698257446289, 0.07737347412109374, 0.07706224060058593, 0.07746915435791016, 0.07827750396728515, 0.08050685119628906, 0.07918592071533204, 0.07841935729980469, 0.07729779052734374, 0.07696227264404297, 0.07748198699951171, 0.07705187225341797, 0.07739395141601563, 0.07659439849853515, 0.07655213165283203, 0.07665869140625, 0.07739862060546875, 0.07899571228027344, 0.07919206237792968, 0.0789764175415039, 0.07762319946289062, 0.07730191802978516, 0.07709136199951172, 0.07741849517822266, 0.07758643341064453, 0.0769617919921875, 0.07723622131347656, 0.07670310211181641, 0.07712783813476562, 0.0777814712524414, 0.07867951965332032, 0.0790574722290039, 0.07897612762451171, 0.0784454116821289, 0.07828265380859376, 0.07877232360839843, 0.07707647705078124, 0.07778665924072266, 0.07673648071289063, 0.07729158020019532, 0.07774166107177734, 0.07769379425048828, 0.07769292449951172, 0.07813529968261719, 0.07839334106445313, 0.07820492553710938, 0.07805951690673828, 0.0780083236694336, 0.07752025604248047, 0.07744576263427734, 0.07756800079345703, 0.07719526672363282, 0.07719664001464843, 0.07750313568115234, 0.07736729431152344, 0.07775958251953125, 0.09045753479003907, 0.07656505584716797, 0.07712710571289062, 0.07751123046875, 0.0776785888671875, 0.07757209777832032, 0.07709696197509766, 0.07725254058837891, 0.07684307098388672, 0.07678361511230469, 0.07685648345947266, 0.07873212432861328, 0.08012582397460938, 0.07865897369384765, 0.07805967712402344, 0.07757433319091797, 0.07689206695556641, 0.0774210205078125, 0.07734272003173828, 0.0769241943359375, 0.07732281494140625, 0.07736057281494141, 0.07667990112304687, 0.07739318084716797, 0.07861670684814454, 0.07844310760498047, 0.07882137298583984, 0.07800012969970703, 0.07762124633789062, 0.07819670104980468, 0.07763970947265625, 0.07723417663574218, 0.07732182312011719, 0.07764771270751954, 0.07721836853027343, 0.07784857940673828, 0.07943577575683594, 0.07818649291992187, 0.07863497924804687, 0.078570556640625, 0.07804003143310546, 0.07809024047851562, 0.07794892883300782, 0.077412353515625, 0.07766015625, 0.0772300796508789, 0.0773201904296875, 0.07739801788330078, 0.07817625427246094, 0.07806918334960937, 0.07816454315185548, 0.0789109115600586, 0.07834403228759766, 0.07806845092773437, 0.07780556488037109, 0.07756095886230469, 0.07790681457519531, 0.07780127716064453, 0.07737945556640625, 0.07758060455322266, 0.07831116485595703, 0.07746790313720703, 0.07746336364746094, 0.09297280120849609, 0.07664051055908203, 0.07712118530273437, 0.07736937713623047, 0.07733280181884766, 0.07750653076171875, 0.07703350067138671, 0.077264892578125, 0.07704985809326172, 0.07705804443359375, 0.07720140838623046, 0.07857904052734375, 0.08072643280029297, 0.07805903625488281, 0.07732201385498047, 0.07747698974609375, 0.07816361236572265, 0.07756610870361329, 0.07748131561279296, 0.07718080139160156, 0.07689295959472656, 0.07732428741455079, 0.07686144256591797, 0.07823974609375, 0.08058048248291015, 0.0786690902709961, 0.0782992935180664, 0.07831581115722656, 0.07822525024414062, 0.07774665832519531, 0.0778094711303711, 0.07734300994873047, 0.07763961791992187, 0.07747385406494141, 0.07770521545410156, 0.07857766723632813, 0.0786903076171875, 0.0786527328491211, 0.07812300872802734, 0.07813190460205079, 0.07779737854003907, 0.07773945617675782, 0.07822569274902344, 0.07754959869384766, 0.07719757080078125, 0.07733980560302735, 0.07744566345214844, 0.0777383041381836, 0.07806976318359375, 0.07841177368164062, 0.07911804962158203, 0.0787376937866211, 0.07777279663085937, 0.0778239974975586, 0.07768678283691406, 0.078183837890625, 0.07772835540771485, 0.07744921875, 0.07739186859130859, 0.07832371520996094, 0.07783158111572265, 0.07806393432617187, 0.0786475830078125, 0.09281330871582032, 0.0767179183959961, 0.07716470336914062, 0.07717683410644531, 0.07724569702148437, 0.07711820983886719, 0.07719907379150391, 0.07700099182128907, 0.07733452606201172, 0.07715990447998047, 0.07711920166015625, 0.07876486206054688, 0.08106393432617187, 0.07851622772216797, 0.07762886047363281, 0.0777119369506836, 0.07741645050048829, 0.07742054748535156, 0.07754137420654297, 0.077517822265625, 0.07753011322021484, 0.07746694183349609, 0.07735935974121094, 0.07856377410888672, 0.07993138885498047, 0.07923916625976563, 0.07834210968017578, 0.07814527893066406, 0.07743721771240235, 0.07750450897216797, 0.07763542175292969, 0.07759878540039063, 0.07782614135742187, 0.07729961395263672, 0.07724655914306641, 0.0789683837890625, 0.07828012847900391, 0.078501953125, 0.07896774291992187, 0.07799603271484375, 0.07803286743164063, 0.07724240112304688, 0.0776437759399414, 0.07810006713867187, 0.07743878173828125, 0.07749078369140625, 0.07731814575195313, 0.0780902099609375, 0.07855852508544922, 0.07878729248046876, 0.0791756820678711, 0.07869411468505859, 0.07853903961181641, 0.07804029083251954, 0.07786348724365234, 0.07796144104003906, 0.07850534057617188, 0.07756406402587891, 0.07768742370605469, 0.07780335998535157, 0.07917680358886718, 0.07990959930419922, 0.0793683853149414, 0.0916098861694336, 0.07732390594482422, 0.07715382385253906, 0.07678243255615234, 0.07695286560058594, 0.07729561614990234, 0.0772553939819336, 0.07744905853271485, 0.07726028442382812, 0.07695782470703125, 0.07718147277832031, 0.0796610565185547, 0.08236032104492187, 0.07861017608642579, 0.07817036437988281, 0.07769497680664063, 0.07771488189697266, 0.07759705352783203, 0.07813308715820312, 0.07728368377685547, 0.07717059326171875, 0.0774267807006836, 0.0771747817993164, 0.07813865661621094, 0.07990959930419922, 0.079556640625, 0.07869436645507813, 0.07860797119140625, 0.07725456237792969, 0.07756623840332032, 0.07751907348632812, 0.07726080322265624, 0.07723161315917969, 0.07719987487792969, 0.07726898956298828, 0.07805644989013671, 0.07848851013183594, 0.07927200317382813, 0.07892991638183594, 0.07823564910888672, 0.07796640014648437, 0.07789798736572266, 0.07795782470703125, 0.07784243011474609, 0.0771379165649414, 0.07750450897216797, 0.077412353515625, 0.07812652587890626, 0.07847379302978516, 0.08005987548828125, 0.07943017578125, 0.07869843292236328, 0.07993145751953125, 0.07843635559082031, 0.07929036712646484, 0.07765196990966797, 0.07765577697753906, 0.0775433578491211, 0.07734921264648438, 0.07771705627441407, 0.07809478759765626, 0.0788479995727539, 0.08000019073486328, 0.0911673583984375, 0.07721174621582032, 0.07692082977294921, 0.07693711853027344, 0.07752508544921875, 0.07713382720947265, 0.07749222564697265, 0.07744512176513672, 0.07730118560791016, 0.07722643280029297, 0.07745731353759766, 0.07969776153564453, 0.082393310546875, 0.0787026596069336, 0.07832790374755859, 0.07780556488037109, 0.07767359924316407, 0.07785257720947265, 0.07716678619384766, 0.07697897338867188, 0.07728070068359374, 0.07717436981201171, 0.07733551788330079, 0.07905814361572265, 0.08016099548339843, 0.0795244140625, 0.07906441497802734, 0.07806003570556641, 0.07771561431884766, 0.0775263671875, 0.07770588684082032, 0.07784652709960938, 0.07742390441894531, 0.07740265655517578, 0.07810288238525391, 0.07765385437011718, 0.07855900573730469, 0.0791983642578125, 0.07904025268554687, 0.07845231628417969, 0.07823139190673828, 0.07847821044921875, 0.07833190155029297, 0.07839743804931641, 0.07828684997558594, 0.07803084564208984, 0.07761714935302734, 0.0780738525390625, 0.07842396545410156, 0.07863715362548829, 0.07932422637939453, 0.0795248031616211, 0.07808345794677735, 0.07812979125976563, 0.07797459411621094, 0.07788591766357422, 0.07769929504394531, 0.07824160003662109, 0.07823814392089844, 0.07806566619873047, 0.07799603271484375, 0.0780943374633789, 0.079098876953125, 0.09088854217529296, 0.07770317077636718, 0.0775244140625, 0.07749180603027343, 0.07710614776611328, 0.07714406585693359, 0.07766015625, 0.07748377227783203, 0.07721772766113282, 0.07724678039550781, 0.07729494476318359, 0.07949584197998047, 0.08123391723632813, 0.07971635437011719, 0.0780206069946289, 0.07753711700439453, 0.07741251373291015, 0.07748403167724609, 0.07726898956298828, 0.07767401885986328, 0.07719369506835938, 0.0771638412475586, 0.07729564666748047, 0.078695068359375, 0.07968358612060547, 0.07946646118164062, 0.07902006530761718, 0.07824793243408203, 0.07790348815917969, 0.07886876678466796, 0.07700694274902344, 0.07735033416748047, 0.07704019165039062, 0.07740415954589844, 0.07749561309814453, 0.07894905853271485, 0.079710205078125, 0.08032371520996094, 0.08017190551757812, 0.07965846252441407, 0.07845532989501953, 0.07848550415039063, 0.07763471984863281, 0.0775074234008789, 0.07748403167724609, 0.07789568328857421, 0.07746918487548828, 0.07789984130859375, 0.07888735961914063, 0.07863910675048828, 0.0795688934326172, 0.07915110778808594, 0.07821311950683593, 0.07832575988769531, 0.07810047912597656, 0.0775880355834961, 0.07768460845947266, 0.07802118682861328, 0.07743644714355469, 0.07797808074951172, 0.07806566619873047, 0.07881465911865235, 0.079276611328125, 0.09293392181396484, 0.0775406723022461, 0.07730172729492188, 0.07724742126464844, 0.07748329925537109, 0.07711360168457031, 0.07694918060302734, 0.07714672088623047, 0.0771148452758789, 0.07717459106445312, 0.07715932464599609, 0.0795893783569336, 0.08211046600341797, 0.0803082275390625, 0.07806752014160157, 0.07834015655517577, 0.07723993682861328, 0.07725020599365234, 0.07731446075439453, 0.07719366455078125, 0.07733395385742188, 0.07725933074951172, 0.07737548828125, 0.07837206268310547, 0.08006253051757813, 0.07958182525634766, 0.07901398468017579, 0.07896195220947265, 0.07797628784179687, 0.07779513549804687, 0.07840172576904297, 0.07816806030273438, 0.07836262512207032, 0.07733042907714843, 0.0775393295288086, 0.0777871322631836, 0.07889881896972656, 0.0796121597290039, 0.07929254150390624, 0.07878860473632812, 0.07822284698486329, 0.07810294342041016, 0.07794624328613281, 0.07792304229736328, 0.0779653091430664, 0.07769878387451172, 0.0772836151123047, 0.07774617767333984, 0.07848140716552734, 0.07861030578613282, 0.07903654479980468, 0.07933033752441407, 0.07813346862792969, 0.07832345581054688, 0.07792438507080078, 0.07803084564208984, 0.07822025299072266, 0.07801766204833985, 0.07733277130126953, 0.077652099609375, 0.07900534057617188, 0.07870134735107422, 0.07838307189941406, 0.09136204528808593, 0.0773214111328125, 0.0771777572631836, 0.07787433624267578, 0.07683977508544922, 0.07710720062255859, 0.0772005157470703, 0.0770807647705078, 0.07736390686035156, 0.07806976318359375, 0.07716044616699219, 0.0796917724609375, 0.0814755859375, 0.07850102233886719, 0.07795798492431641, 0.0776089630126953, 0.07791410827636719, 0.07748403167724609, 0.0771725082397461, 0.07785903930664062, 0.0772116470336914, 0.07736029052734375, 0.07752345275878907, 0.07929849243164062, 0.08053187561035156, 0.07924940490722657, 0.0789400634765625, 0.07886243438720703, 0.07914252471923829, 0.07833638763427735, 0.07826185607910156, 0.07758460998535156, 0.07735027313232422, 0.07723910522460938, 0.07750656127929688, 0.0785080337524414, 0.07920134735107422, 0.07942854309082031, 0.07930265808105469, 0.07943373107910157, 0.07816191864013672, 0.07785266876220703, 0.07819264221191406, 0.07801353454589843, 0.07782489776611329, 0.07750160217285157, 0.07791001892089844, 0.07791871643066406, 0.07896717071533203, 0.0794419174194336, 0.07837229156494141, 0.0784655990600586, 0.07900160217285156, 0.07854617309570312, 0.07824614715576172, 0.07833036804199218, 0.07867391967773438, 0.07785881805419922, 0.07792211151123046, 0.07858604431152344, 0.0782144012451172, 0.078833984375, 0.07928025817871094, 0.09098793792724609, 0.07736380767822265, 0.07695343780517579, 0.07715408325195312, 0.07711577606201171, 0.07726227569580078, 0.07718736267089844, 0.07730204772949219, 0.07867558288574218, 0.07701261138916016, 0.07727385711669922, 0.08020787048339843, 0.08214527893066406, 0.07947673797607421, 0.07853177642822265, 0.07786537933349609, 0.07744902038574218, 0.07765257263183593, 0.07761456298828125, 0.07754720306396484, 0.07778707122802735, 0.07772048187255859, 0.07773139190673828, 0.07956934356689453, 0.08051097869873047, 0.08024269104003906, 0.0789401626586914, 0.07909990692138671, 0.07810015869140625, 0.07778726196289062, 0.07812086486816407, 0.07702146911621094, 0.07784636688232421, 0.07742441558837891, 0.07770764923095703, 0.07882956695556641, 0.07952758026123047, 0.07941734313964843, 0.07906940460205078, 0.07976563262939453, 0.07954617309570312, 0.07813314819335937, 0.07795846557617188, 0.07736009979248047, 0.07733408355712891, 0.07776710510253906, 0.07856460571289063, 0.07859481811523437, 0.07883161926269532, 0.0791572494506836, 0.07943167877197266, 0.0796917724609375, 0.0787877426147461, 0.07860662078857422, 0.07819875335693359, 0.07774214172363281, 0.07785913848876953, 0.07805155181884765, 0.07818854522705078, 0.07860018920898437, 0.07886847686767579, 0.07857350158691406, 0.07860765075683594]",tokens/s,12.779345462133842,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1054, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 856, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 596, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 426, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 615, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 160, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 282, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 615, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 160, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 282, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1173, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1141, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 944, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 677, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 500, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1054, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 856, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 596, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 426, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 615, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 160, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 282, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1173, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1054, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 856, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 596, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 426, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 426, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 115, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 1989, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2932, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 864, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1532, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1541, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 169, in new_forward output = module._old_forward(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 214, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 598, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 615, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 160, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 282, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1049.137152,5046.730752,0.0,4651.483136,4638.22848,s,1,14.198349609375,14.198349609375,0.0,14.198349609375,14.198349609375,14.198349609375,14.198349609375,[14.198349609375],,kWh,0.0002059586062625006,2.271175767481626e-05,7.954506363599956e-05,0.0003082154275733164,,MB,1222.680576,6172.901376,0.0,5765.070848,5418.530816,s,10,10.067480834960937,1.0067480834960938,0.0060373690484956935,1.0091661987304688,1.0126028259277344,1.0127268890380858,1.0128261395263671,"[0.9936867065429688, 0.9992445068359375, 1.0019698486328126, 1.007849609375, 1.0080123901367188, 1.0125752563476562, 1.0103200073242187, 1.0128509521484375, 1.0103255615234374, 1.01064599609375]",tokens/s,254.2840698648256,kWh,2.9188706434471595e-05,3.2190273342346356e-06,1.9385773084364752e-05,5.179350685307098e-05,tokens/kWh,4942704.511710836,MB,1240.367104,6172.901376,0.0,5765.070848,5418.533376,s,10,49.00916796875,4.900916796875,0.01320260211013012,4.905751953125,4.9146486328125,4.9152515625,4.91573390625,"[4.87516796875, 4.88421875, 4.890076171875, 4.89661669921875, 4.9038095703125, 4.9076943359375, 4.907861328125, 4.91335400390625, 4.9145146484375, 4.9158544921875]",tokens/s,12.854737717679894,kWh,0.00014398664825469116,1.5882754806762217e-05,9.576901348383259e-05,0.000255638416545286,tokens/kWh,246441.83316179962,,s,630,49.004861740112325,0.07778549482557509,0.0019485939166861061,0.07745126342773437,0.07909130020141601,0.07961224327087402,0.0891949423980713,"[0.08933990478515624, 0.07592518615722656, 0.07579583740234375, 0.07649581146240235, 0.07545414733886718, 0.07582675170898437, 0.07566413116455079, 0.075683837890625, 0.07550975799560547, 0.07577983856201172, 0.07732249450683594, 0.07804927825927735, 0.08050396728515626, 0.07935266876220703, 0.07704370880126953, 0.07665782165527343, 0.07588540649414062, 0.07588998413085937, 0.07575212860107422, 0.07571865844726562, 0.07590707397460937, 0.07600947570800781, 0.07672752380371094, 0.07752349090576172, 0.07847929382324219, 0.08017878723144531, 0.07869718170166015, 0.07727500915527344, 0.07706253051757812, 0.07684889221191406, 0.07630233764648438, 0.07626751708984375, 0.07732185363769531, 0.07647456359863282, 0.07617145538330078, 0.07733452606201172, 0.07787459564208984, 0.07901654052734375, 0.0784993896484375, 0.07908988952636718, 0.07763929748535156, 0.07766409301757812, 0.07689087677001953, 0.07647232055664062, 0.07668531036376953, 0.0768586883544922, 0.07644195556640625, 0.07701673889160156, 0.07734854125976562, 0.07863772583007812, 0.07846537780761718, 0.07910399627685546, 0.07791001892089844, 0.07895021057128906, 0.07781132507324219, 0.07657324981689453, 0.07714790344238281, 0.07704755401611328, 0.07659529876708984, 0.077295166015625, 0.07726780700683594, 0.0777359390258789, 0.0784783706665039, 0.0919291229248047, 0.07576172637939453, 0.0755568618774414, 0.07590707397460937, 0.07573661041259766, 0.07641340637207031, 0.0758104019165039, 0.07576812744140625, 0.07689132690429687, 0.07628278350830078, 0.07592960357666016, 0.07761920166015625, 0.08085708618164063, 0.07805731201171875, 0.0778733139038086, 0.07724237060546875, 0.07604774475097656, 0.0764975357055664, 0.07604383850097657, 0.07632940673828124, 0.0774771499633789, 0.07690108489990234, 0.07623052978515625, 0.07779097747802734, 0.07969558715820313, 0.07903456115722657, 0.07781977844238282, 0.07758448028564453, 0.07880089569091797, 0.07630694580078125, 0.07666687774658203, 0.07605833435058594, 0.0763661117553711, 0.07667711639404297, 0.07663398742675781, 0.07721087646484374, 0.0781443862915039, 0.07885004425048828, 0.078671875, 0.0779422378540039, 0.07854134368896484, 0.07750656127929688, 0.0770142059326172, 0.07685142517089844, 0.076404541015625, 0.07686428833007812, 0.0770355224609375, 0.07708057403564453, 0.07759852600097657, 0.07851641845703125, 0.07817327880859375, 0.07840860748291016, 0.0778629150390625, 0.07767040252685548, 0.07786495971679687, 0.07760076904296875, 0.0772894744873047, 0.07688396453857421, 0.07732204437255859, 0.07719475555419922, 0.07785337829589843, 0.07802880096435547, 0.07879065704345703, 0.08826876831054688, 0.07669334411621094, 0.07579094696044922, 0.07627366638183594, 0.07631667327880859, 0.07575961303710937, 0.07579647827148438, 0.07631053161621094, 0.07642726135253906, 0.07683891296386719, 0.07633920288085938, 0.07827561950683594, 0.08079049682617187, 0.07991705322265626, 0.07755356597900391, 0.07686112213134766, 0.07660995483398438, 0.07662156677246093, 0.07638861083984375, 0.07607046508789063, 0.076271484375, 0.07684767913818359, 0.07670524597167969, 0.07757794952392578, 0.07920102691650391, 0.07901315307617188, 0.07767734527587891, 0.0770703353881836, 0.07801139068603516, 0.07681126403808594, 0.07685270690917968, 0.07693571472167969, 0.07647232055664062, 0.07682240295410156, 0.07676322937011719, 0.0774512939453125, 0.0785080337524414, 0.07871231842041015, 0.07946617889404296, 0.07816684722900391, 0.07755980682373047, 0.07802780914306641, 0.07727788543701172, 0.07711177825927734, 0.07703327941894532, 0.07709081268310547, 0.07758233642578125, 0.07713996887207031, 0.0777871322631836, 0.0786513900756836, 0.07839449310302735, 0.07836966705322265, 0.0781475830078125, 0.07869235229492187, 0.0779606704711914, 0.07787773132324219, 0.07692908477783203, 0.07748607635498046, 0.07730707550048828, 0.07751254272460938, 0.07814630126953125, 0.07785692596435546, 0.07851628875732422, 0.08884003448486329, 0.07638502502441406, 0.07640064239501954, 0.0759582748413086, 0.07597440338134766, 0.07646028900146484, 0.07625727844238281, 0.07593126678466797, 0.0761200942993164, 0.07692323303222656, 0.07742873382568359, 0.0781946563720703, 0.08213302612304688, 0.07952588653564453, 0.077412353515625, 0.07644322967529296, 0.07687564849853516, 0.07666044616699219, 0.07652025604248047, 0.07665254211425782, 0.07653689575195312, 0.07612662506103515, 0.07665865325927734, 0.07773446655273437, 0.07901593780517578, 0.07959347534179688, 0.07904825592041016, 0.07747360229492188, 0.07706483459472656, 0.07739295959472656, 0.07693574523925781, 0.07688575744628906, 0.07686822509765626, 0.07686758422851563, 0.07674674987792969, 0.07716156768798828, 0.07800717163085938, 0.07906716918945313, 0.07922057342529297, 0.07867203521728515, 0.07757004547119141, 0.07769197082519531, 0.0779676513671875, 0.07718160247802734, 0.07712973022460938, 0.07709677124023437, 0.07709308624267579, 0.07721366119384766, 0.07903587341308593, 0.07867766571044922, 0.07856114959716796, 0.0788298568725586, 0.07819123077392579, 0.07798588562011718, 0.07786473846435547, 0.07791433715820313, 0.07738982391357421, 0.07722998046875, 0.07718675231933594, 0.07734636688232421, 0.0779559326171875, 0.07870054626464844, 0.07818240356445312, 0.0911994857788086, 0.0762798080444336, 0.07584358215332031, 0.07649603271484375, 0.0764582748413086, 0.07651779174804688, 0.07614479827880859, 0.07630643463134766, 0.076653564453125, 0.07694233703613282, 0.07694643402099609, 0.07857855987548829, 0.08163890838623047, 0.07821987152099609, 0.07759398651123046, 0.07686825561523437, 0.0766929931640625, 0.0770136947631836, 0.07673420715332031, 0.07662528228759766, 0.07662179565429687, 0.07683052825927734, 0.07660623931884766, 0.07949324798583984, 0.07966915130615235, 0.07928227233886719, 0.07841177368164062, 0.07741645050048829, 0.07729357147216796, 0.07710310363769532, 0.07671148681640624, 0.07656492614746094, 0.07674674987792969, 0.07674674987792969, 0.07674265289306641, 0.07807907104492187, 0.07892470550537109, 0.07920783996582031, 0.07908121490478516, 0.07890211486816406, 0.07831552124023437, 0.07692243194580078, 0.07714387512207031, 0.07698291015625, 0.07729718780517578, 0.07728790283203125, 0.07745126342773437, 0.07790169525146484, 0.07801436614990234, 0.0787042236328125, 0.07878031921386719, 0.07864329528808593, 0.0792828140258789, 0.07832575988769531, 0.07770435333251953, 0.07753814697265625, 0.07774553680419923, 0.07699887847900391, 0.07718701171875, 0.07758022308349609, 0.07819318389892578, 0.07874121856689453, 0.07854723358154297, 0.09250406646728515, 0.07675084686279297, 0.07657881927490234, 0.07647846221923828, 0.0760335693359375, 0.07605705261230469, 0.0766051483154297, 0.07653545379638672, 0.07656716918945312, 0.07651942443847656, 0.0773017578125, 0.07809843444824219, 0.08111007690429688, 0.08007350158691406, 0.0781416015625, 0.07743599700927735, 0.07678044891357422, 0.07651737976074219, 0.07716044616699219, 0.07665586853027344, 0.07667945861816407, 0.07683865356445313, 0.0768416976928711, 0.07736649322509766, 0.07966390228271485, 0.07912652587890626, 0.07844425964355468, 0.07831785583496094, 0.07772160339355469, 0.07743833923339843, 0.0773105926513672, 0.07698738861083984, 0.07756476593017578, 0.07682182312011719, 0.07649571228027344, 0.07733452606201172, 0.07904048156738282, 0.07885545349121094, 0.07853679656982422, 0.07855481719970703, 0.07960675048828125, 0.07807180786132813, 0.07713587188720702, 0.07713938903808594, 0.07747805023193359, 0.07735273742675781, 0.07709951782226562, 0.07797567749023437, 0.07788953399658204, 0.07851417541503906, 0.07857151794433594, 0.07862844848632812, 0.07839376068115235, 0.07795097351074219, 0.07811891174316406, 0.07767449951171874, 0.07791600036621094, 0.07697779083251953, 0.07707292938232421, 0.07745126342773437, 0.07808204650878907, 0.07814262390136718, 0.07821517181396484, 0.0922833251953125, 0.07605657958984376, 0.0760274887084961, 0.07657923126220703, 0.07725465393066407, 0.07603807830810547, 0.07654611206054687, 0.07631439971923829, 0.07631484985351562, 0.07741645050048829, 0.07675846099853516, 0.07832736206054687, 0.08169369506835937, 0.07808921813964843, 0.07723110198974609, 0.0768202896118164, 0.077050048828125, 0.07691651153564454, 0.07653807830810547, 0.07619174194335937, 0.07660749053955078, 0.07664230346679687, 0.07795420837402343, 0.07770403289794922, 0.07947408294677734, 0.0792080307006836, 0.07818956756591797, 0.07752448272705079, 0.07701478576660156, 0.07753190612792969, 0.07766015625, 0.07626956939697266, 0.0767262725830078, 0.07723417663574218, 0.07736524963378906, 0.07787725067138672, 0.07937840270996094, 0.0800123519897461, 0.07899190521240235, 0.078283203125, 0.07784243011474609, 0.07792230224609376, 0.07785049438476563, 0.07748531341552735, 0.07709123229980469, 0.07682198333740234, 0.07735807800292968, 0.07750656127929688, 0.07796736145019531, 0.07891763305664062, 0.07925350189208985, 0.07851827239990235, 0.078487548828125, 0.07799398040771484, 0.07774169921875, 0.07901427459716796, 0.07771340942382812, 0.07677529907226563, 0.07728131103515624, 0.07822908782958984, 0.07852671813964844, 0.07851606750488281, 0.07855760192871093, 0.09115846252441406, 0.07670630645751954, 0.07646975708007812, 0.07633283233642578, 0.0765337905883789, 0.0765283203125, 0.07622246551513671, 0.07657881927490234, 0.07638835144042969, 0.0765132827758789, 0.07781539154052734, 0.07809065246582031, 0.08151561737060548, 0.07953705596923828, 0.07803449249267579, 0.07733865356445313, 0.076499267578125, 0.07646627044677734, 0.07694950103759765, 0.07665849304199218, 0.07661126708984375, 0.07655174255371094, 0.07656707000732423, 0.07822582244873047, 0.07952515411376954, 0.08030281829833984, 0.07929631805419922, 0.07803718566894531, 0.07706419372558594, 0.07729737854003907, 0.077340576171875, 0.07714649963378906, 0.07733417510986328, 0.07831078338623047, 0.07705084991455079, 0.07731404876708985, 0.07851583862304687, 0.0790429458618164, 0.07931001281738281, 0.07871161651611328, 0.07838713836669922, 0.0775823974609375, 0.07878656005859375, 0.07711872100830078, 0.0767925796508789, 0.07751398468017578, 0.07763136291503907, 0.07734934234619141, 0.07833411407470703, 0.08007023620605469, 0.07871965026855468, 0.07824806213378906, 0.07788735961914063, 0.07836233520507813, 0.07840796661376953, 0.07781581115722656, 0.07737718200683594, 0.0773359375, 0.07852130889892578, 0.077264892578125, 0.07808348846435546, 0.07863970947265625, 0.07872512054443359, 0.0879636459350586, 0.07623270416259766, 0.07625113677978515, 0.07621568298339844, 0.07642556762695313, 0.0770041275024414, 0.07656543731689452, 0.07660749053955078, 0.07653135681152344, 0.077053955078125, 0.07745161437988281, 0.07872306823730468, 0.0814202880859375, 0.07960105895996093, 0.07732412719726563, 0.07663488006591797, 0.07662537384033204, 0.07727494049072266, 0.07751497650146484, 0.07678956604003906, 0.0767857894897461, 0.07705977630615235, 0.07730470275878906, 0.07856947326660156, 0.07934361267089844, 0.07948271942138672, 0.07875727844238281, 0.07802751922607422, 0.07794687652587891, 0.07712345886230469, 0.07699468994140625, 0.07711949157714844, 0.0773017578125, 0.07774534606933593, 0.07737782287597657, 0.07929459381103515, 0.07879647827148438, 0.07851900482177734, 0.0791592025756836, 0.07901936340332032, 0.07828966522216797, 0.0777359390258789, 0.07760486602783204, 0.07720550537109375, 0.07771955108642578, 0.07756755065917968, 0.07756832122802734, 0.07838527679443359, 0.07839315032958985, 0.07843583679199219, 0.07845958709716797, 0.07860991668701171, 0.07894057464599609, 0.07810467529296874, 0.07853670501708984, 0.07722803497314454, 0.07732192230224609, 0.0779717788696289, 0.07756185913085938, 0.07801446533203125, 0.07843424224853515, 0.0784814682006836, 0.079476318359375, 0.0905379867553711, 0.07649423980712891, 0.07646883392333985, 0.07641088104248046, 0.07612416076660156, 0.0765191650390625, 0.0762671356201172, 0.07626521301269532, 0.07647526550292968, 0.0767011489868164, 0.07777894592285156, 0.0785311050415039, 0.08133379364013672, 0.07809276580810547, 0.07787513732910156, 0.07721155548095703, 0.07694761657714844, 0.0767610855102539, 0.0765251235961914, 0.07650342559814453, 0.07674665832519531, 0.07671209716796876, 0.07822713470458985, 0.07817862701416016, 0.07986697387695313, 0.07917984008789063, 0.07864201354980468, 0.0779386215209961, 0.0781968002319336, 0.07752499389648437, 0.07719439697265625, 0.07749718475341796, 0.0767262725830078, 0.07666687774658203, 0.07760195159912109, 0.07822831726074218, 0.07923638153076172, 0.07961673736572265, 0.07945625305175781, 0.07831756591796875, 0.07862271881103515, 0.0783790054321289, 0.07693721771240235, 0.07699411010742188, 0.07758073425292969, 0.07721766662597657, 0.07713190460205079, 0.07794175720214844, 0.08017404937744141, 0.07924457550048829, 0.07884671783447265, 0.07851827239990235, 0.07830323028564454, 0.07865753936767578, 0.07808975982666015, 0.07773590087890625, 0.07788800048828125, 0.07826150512695312, 0.07806438446044922, 0.07742371368408203, 0.07833414459228516, 0.07877203369140626, 0.0786707534790039]",tokens/s,12.855867308453632,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1178.492928,1093.599232,0.0,698.351616,690.178048,s,1,9.307734375,9.307734375,0.0,9.307734375,9.307734375,9.307734375,9.307734375,[9.307734375],,kWh,4.813778019170816e-05,5.30268701156643e-06,1.66522355439791e-05,7.009270274725368e-05,,MB,1372.995584,1408.172032,0.0,1000.341504,957.775872,s,10,0.5886727676391602,0.058867276763916024,0.0008932273377649839,0.05856430435180664,0.05927543525695801,0.06033389415740967,0.061180661277771,"[0.05887075042724609, 0.058735103607177735, 0.05904022216796875, 0.05820880126953125, 0.058393505096435545, 0.06139235305786133, 0.05903468704223633, 0.05824940872192383, 0.05835657501220703, 0.058391361236572265]",tokens/s,4348.76580118822,kWh,1.8262286517710889e-06,2.013398846739338e-07,1.2138065265999281e-06,3.241375063044951e-06,tokens/kWh,78978826.89314988,MB,1387.06944,1416.56064,0.0,1008.730112,957.778432,s,10,25.123927734375,2.5123927734375004,0.007594485465332524,2.5096402587890623,2.52302841796875,2.52503056640625,2.52663228515625,"[2.5029521484375, 2.51033251953125, 2.505419677734375, 2.52703271484375, 2.508947998046875, 2.506647216796875, 2.50775732421875, 2.51946875, 2.52258349609375, 2.512785888671875]",tokens/s,25.075697027181892,kWh,7.273305940906234e-05,8.022381448986752e-06,3.13892751113997e-05,0.00011214471596944882,tokens/kWh,561774.12779005,,s,630,25.121339572906486,0.03987514217921666,0.0006522795463124235,0.03971884727478027,0.04030095443725586,0.040638852691650386,0.04335835014343263,"[0.039836639404296874, 0.040035518646240234, 0.040170177459716794, 0.039548095703125, 0.03970502471923828, 0.03939376068115234, 0.039524383544921875, 0.03946905517578125, 0.03934003067016602, 0.039572704315185545, 0.0394596176147461, 0.039615840911865235, 0.039416481018066406, 0.03941580963134766, 0.0394958381652832, 0.03980271911621094, 0.039354366302490236, 0.039370750427246096, 0.03942604827880859, 0.039667713165283204, 0.0397127685546875, 0.03951411056518555, 0.03952230453491211, 0.03984384155273438, 0.039569408416748046, 0.03940761566162109, 0.039896385192871094, 0.03985580825805664, 0.03975270462036133, 0.039616512298583983, 0.03965542221069336, 0.039772159576416014, 0.039712257385253906, 0.03989350509643555, 0.03991676712036133, 0.03953276824951172, 0.03972758483886719, 0.04024095916748047, 0.03947734451293945, 0.03963324737548828, 0.03955023956298828, 0.03963910293579102, 0.03971468734741211, 0.03976169586181641, 0.040372383117675784, 0.04014969635009766, 0.03961974334716797, 0.0395497932434082, 0.0397946891784668, 0.039736446380615235, 0.03977849578857422, 0.03951996612548828, 0.039363296508789065, 0.03947043228149414, 0.03971535873413086, 0.03972531127929688, 0.03955862426757813, 0.039553470611572265, 0.03953276824951172, 0.0396492805480957, 0.04370761489868164, 0.03965945434570312, 0.039697025299072264, 0.040117599487304687, 0.03997119903564453, 0.039870750427246096, 0.039686111450195315, 0.03997596740722656, 0.04007219314575195, 0.04203724670410156, 0.040457759857177734, 0.0402006721496582, 0.04028416061401367, 0.040091232299804686, 0.0399257926940918, 0.03949606323242188, 0.03956531143188476, 0.040037696838378906, 0.03982815933227539, 0.039873790740966794, 0.03938777542114258, 0.03946099090576172, 0.03996192169189453, 0.03932844924926758, 0.03942399978637695, 0.039354366302490236, 0.039454113006591796, 0.04000009536743164, 0.03957964706420898, 0.03951411056518555, 0.03977328109741211, 0.03963772964477539, 0.0396822395324707, 0.039556961059570316, 0.039622112274169924, 0.0396743049621582, 0.03964339065551758, 0.040551551818847655, 0.03957235336303711, 0.039519649505615234, 0.03975753784179688, 0.03965983963012695, 0.03967238235473633, 0.039666847229003904, 0.03996096038818359, 0.03991376113891602, 0.04015068817138672, 0.040065567016601564, 0.039760929107666015, 0.03962140655517578, 0.03955318450927734, 0.0401324462890625, 0.040065216064453124, 0.039763904571533205, 0.039848064422607424, 0.03967158508300781, 0.04012403106689453, 0.039811614990234376, 0.03960992050170899, 0.03953907012939453, 0.03991263961791992, 0.039580543518066405, 0.040938655853271486, 0.03960636901855469, 0.03985055923461914, 0.039642433166503906, 0.03991961669921875, 0.03984143829345703, 0.03998134231567383, 0.03970054244995117, 0.04029958343505859, 0.04053087997436523, 0.039739391326904294, 0.039733375549316406, 0.0395665283203125, 0.039766719818115234, 0.039593982696533206, 0.039707969665527344, 0.03966207885742187, 0.03940371322631836, 0.03962575912475586, 0.03967689514160156, 0.03980806350708008, 0.03967808151245117, 0.039791423797607424, 0.039626750946044925, 0.04024515151977539, 0.039772449493408205, 0.040640033721923825, 0.03969257736206055, 0.039935359954833986, 0.04027865600585938, 0.03996031951904297, 0.03981654357910156, 0.03975609588623047, 0.03976992034912109, 0.03989993667602539, 0.03959807968139648, 0.039600128173828124, 0.03952025604248047, 0.03954278564453125, 0.03958927917480469, 0.0397973747253418, 0.03982755279541016, 0.03944214248657227, 0.03952195358276367, 0.039517982482910156, 0.03969302368164063, 0.03957360076904297, 0.039800704956054686, 0.03949961471557617, 0.03981536102294922, 0.03966566467285156, 0.03950947189331055, 0.03954332733154297, 0.039858177185058595, 0.04044095993041992, 0.0397628173828125, 0.03968928146362305, 0.03945568084716797, 0.03986636734008789, 0.04024662399291992, 0.039481502532958984, 0.03952076721191406, 0.0396759033203125, 0.03980886459350586, 0.039704734802246094, 0.0395830078125, 0.03959667205810547, 0.04001308822631836, 0.03993439865112305, 0.039808734893798825, 0.03948303985595703, 0.04003049468994141, 0.039513694763183595, 0.04011212921142578, 0.03960108947753906, 0.039741439819335936, 0.03963286590576172, 0.03958560180664063, 0.039516223907470706, 0.03956137466430664, 0.039470111846923825, 0.039699424743652345, 0.041523200988769535, 0.04022902297973633, 0.03948118209838867, 0.03942604827880859, 0.0396409912109375, 0.039659614562988284, 0.039451744079589846, 0.03967001724243164, 0.03971958541870117, 0.04053606414794922, 0.04031206512451172, 0.03970943832397461, 0.04045401763916016, 0.03977836990356445, 0.04011148834228516, 0.03994489669799805, 0.040151039123535154, 0.04017561721801758, 0.04027996826171875, 0.039927391052246096, 0.03974588775634766, 0.03979411315917969, 0.0434752311706543, 0.03988320159912109, 0.03976806259155274, 0.04534272003173828, 0.04501913452148437, 0.0409169921875, 0.04099225616455078, 0.04020803070068359, 0.039664478302001954, 0.039876609802246096, 0.03980284881591797, 0.03959632110595703, 0.04005043029785156, 0.03949977493286133, 0.03985539245605469, 0.0399832649230957, 0.03973791885375977, 0.03958169555664062, 0.04023875045776367, 0.039448287963867186, 0.03946793746948242, 0.03937251281738281, 0.039593982696533206, 0.03940332794189453, 0.04034515380859375, 0.0402213134765625, 0.040048641204833986, 0.04024863815307617, 0.039995166778564455, 0.03971510314941406, 0.0396596794128418, 0.03959142303466797, 0.0393691520690918, 0.03968582534790039, 0.03981923294067383, 0.039525249481201175, 0.039653377532958986, 0.03949772644042969, 0.0397209587097168, 0.039626750946044925, 0.03943587112426758, 0.03954524612426758, 0.03996057510375976, 0.03943987274169922, 0.03936307144165039, 0.039395328521728515, 0.04182160186767578, 0.039530303955078124, 0.039651424407958984, 0.03940652847290039, 0.03930495834350586, 0.039591934204101564, 0.039712158203125, 0.04006358337402344, 0.03972710418701172, 0.03937279891967774, 0.03968991851806641, 0.03944681549072265, 0.03948880004882813, 0.040428287506103514, 0.03995859146118164, 0.039712703704833985, 0.039624702453613284, 0.03979673767089844, 0.03970851135253906, 0.0396412467956543, 0.03982950210571289, 0.04016128158569336, 0.03989718246459961, 0.04000678253173828, 0.03962502288818359, 0.03987299346923828, 0.039446529388427735, 0.039708671569824217, 0.040054561614990235, 0.03965359878540039, 0.03996057510375976, 0.039593982696533206, 0.03953811264038086, 0.040581214904785154, 0.04224252700805664, 0.040243457794189454, 0.039487232208251954, 0.03961427307128906, 0.03993164825439453, 0.03973318481445313, 0.040753662109375, 0.04004044723510742, 0.039739391326904294, 0.04007516860961914, 0.04176454544067383, 0.040866111755371096, 0.04020038223266602, 0.03973625564575195, 0.0397628173828125, 0.039599777221679684, 0.03957180786132813, 0.039726207733154294, 0.03948556900024414, 0.03958217620849609, 0.03963523101806641, 0.0396308479309082, 0.039610366821289066, 0.03965542221069336, 0.03960128021240234, 0.03957372665405273, 0.039487583160400394, 0.039653663635253904, 0.03942745590209961, 0.03953961563110352, 0.03953216171264649, 0.039586177825927736, 0.039561344146728517, 0.04035747146606445, 0.0398111686706543, 0.04006057739257812, 0.039924030303955076, 0.039908641815185546, 0.03963385772705078, 0.04037628936767578, 0.039771617889404295, 0.039559520721435544, 0.04011644744873047, 0.03982745742797852, 0.03987804794311523, 0.04004627227783203, 0.03995536041259766, 0.039897087097167966, 0.03968384170532226, 0.03954473495483399, 0.039817569732666015, 0.03954687881469727, 0.039798080444335936, 0.03958428955078125, 0.03944195175170898, 0.03962944030761719, 0.03948044967651367, 0.03977510452270508, 0.04005260848999023, 0.03967942428588867, 0.03978720092773438, 0.03971891021728516, 0.039857471466064456, 0.03982124710083008, 0.039653182983398434, 0.03982227325439453, 0.039608001708984375, 0.03962809753417969, 0.039657726287841796, 0.039504638671875, 0.039607872009277345, 0.03970502471923828, 0.03976508712768555, 0.039790847778320315, 0.03988742446899414, 0.03944655990600586, 0.039849151611328126, 0.03964150238037109, 0.039992927551269535, 0.0396352653503418, 0.03942867279052734, 0.03975372695922851, 0.039976287841796875, 0.03974969482421875, 0.04045475387573242, 0.04043161773681641, 0.039810081481933594, 0.040637409210205075, 0.03978358459472656, 0.0399901123046875, 0.039339134216308594, 0.039506111145019535, 0.03944723129272461, 0.03944243240356445, 0.039458049774169925, 0.03936332702636719, 0.03952844619750977, 0.03935846328735351, 0.03957555389404297, 0.04008345413208008, 0.041207809448242184, 0.03980287933349609, 0.03967718505859375, 0.03950892639160156, 0.039352127075195316, 0.03958147048950195, 0.03967612838745117, 0.03975987243652344, 0.04035174560546875, 0.04057088088989258, 0.04015043258666992, 0.039696414947509764, 0.039929759979248046, 0.03959584045410156, 0.03978857421875, 0.039521087646484376, 0.03952844619750977, 0.039686145782470705, 0.03963068771362305, 0.040145057678222656, 0.04067327880859375, 0.0397918701171875, 0.039846656799316406, 0.03967712020874024, 0.03968083190917969, 0.03967974472045899, 0.03942819213867187, 0.03981327819824219, 0.04011539077758789, 0.03983443069458008, 0.03957084655761719, 0.03970547103881836, 0.03964694213867188, 0.040017280578613285, 0.03972774505615234, 0.04260752105712891, 0.03974467086791992, 0.03977481460571289, 0.03944473648071289, 0.039577598571777346, 0.039798782348632815, 0.03952435302734375, 0.04036403274536133, 0.041090847015380856, 0.03960444641113281, 0.039478687286376955, 0.03925872039794922, 0.03926425552368164, 0.039321952819824216, 0.039394977569580075, 0.0392540168762207, 0.03916595077514649, 0.04287692642211914, 0.03957964706420898, 0.03945792007446289, 0.03946912002563477, 0.039340480804443356, 0.04030054473876953, 0.03996710586547852, 0.03952777481079101, 0.03923011016845703, 0.03941580963134766, 0.03957555389404297, 0.039540542602539065, 0.03956252670288086, 0.03962358474731445, 0.03922467041015625, 0.039400096893310546, 0.045762016296386716, 0.04368809509277344, 0.03998348617553711, 0.03955686569213867, 0.03947135925292969, 0.039411006927490236, 0.03943078231811523, 0.039374912261962894, 0.039617599487304686, 0.039848896026611326, 0.039723007202148435, 0.03969785690307617, 0.03988127899169922, 0.039615840911865235, 0.04044252777099609, 0.04009983825683594, 0.04025139236450195, 0.040627552032470704, 0.039765758514404295, 0.039711647033691407, 0.03970240020751953, 0.03984944152832031, 0.04105599975585938, 0.041044097900390625, 0.040304641723632816, 0.039768863677978515, 0.03964675140380859, 0.03972073745727539, 0.0395880012512207, 0.039809215545654295, 0.04021868896484375, 0.04033331298828125, 0.04029417419433594, 0.039870014190673826, 0.040018592834472656, 0.03981926345825195, 0.04001567840576172, 0.039724224090576174, 0.040497920989990235, 0.04020780944824219, 0.040178497314453124, 0.03992473602294922, 0.03987148666381836, 0.039839134216308594, 0.03970134353637695, 0.039833343505859375, 0.03966566467285156, 0.03973241424560547, 0.03977913665771484, 0.03977974319458008, 0.0398526382446289, 0.03965542221069336, 0.03956067276000977, 0.039491710662841795, 0.039575809478759764, 0.039657505035400394, 0.039510143280029296, 0.040476673126220705, 0.03981721496582031, 0.03979788970947266, 0.03983798217773438, 0.03999599838256836, 0.03971891021728516, 0.04027088165283203, 0.04043670272827148, 0.04048896026611328, 0.040025215148925784, 0.040180606842041014, 0.03982131195068359, 0.03987446212768555, 0.03975084686279297, 0.039965599060058594, 0.03977830505371094, 0.03998720169067383, 0.04108451080322266, 0.03994460678100586, 0.039699775695800785, 0.043072193145751954, 0.039739391326904294, 0.03959574508666992, 0.039532833099365235, 0.03966323089599609, 0.03993452835083008, 0.04029420852661133, 0.03966880035400391, 0.04073507308959961, 0.041477825164794924, 0.03991024017333984, 0.04049862289428711, 0.041063201904296874, 0.03992876815795898, 0.03993632125854492, 0.0397031364440918, 0.040215679168701175, 0.0401396484375, 0.04008665466308594, 0.039680896759033205, 0.03965488052368164, 0.039980670928955075, 0.03959081649780274, 0.03964131164550781, 0.03989481735229492, 0.040046592712402344, 0.04051459121704101, 0.039771102905273435, 0.04009983825683594, 0.03953868865966797, 0.03971878433227539, 0.03982144165039062, 0.03963852691650391, 0.03949820709228516, 0.039554080963134765, 0.040068096160888675, 0.039884159088134766, 0.04013324737548828, 0.03996623992919922, 0.04012393569946289, 0.0407336311340332, 0.04003430557250977, 0.04365107345581055, 0.040551551818847655, 0.039787296295166016, 0.04003440093994141, 0.040210430145263674, 0.03989049530029297, 0.03965087890625, 0.039655872344970707, 0.03954937744140625, 0.03959590530395508, 0.03952243041992187, 0.039587329864501954, 0.039580158233642575, 0.03950796890258789, 0.039567359924316405, 0.03953049468994141, 0.039462913513183595, 0.03943833541870117, 0.03982460784912109, 0.04038291168212891, 0.03996899032592773, 0.03971289443969726, 0.039644542694091796, 0.03960201644897461, 0.03953129577636719, 0.039812576293945315, 0.039615009307861326, 0.03956121444702149, 0.040887935638427735, 0.039696800231933595, 0.03989910507202148, 0.03963904190063477, 0.03965705490112305, 0.03959584045410156, 0.039524513244628905, 0.039537086486816406, 0.039618560791015625]",tokens/s,25.078280486262706,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1127.5264,2578.382848,0.0,2183.135232,2081.564672,s,1,10.6620048828125,10.6620048828125,0.0,10.6620048828125,10.6620048828125,10.6620048828125,10.6620048828125,[10.6620048828125],,kWh,0.00010513518717917426,1.158997807986383e-05,3.851614192398878e-05,0.00015524130718302687,,MB,1236.185088,3134.128128,0.0,2726.2976,2478.86848,s,10,3.9971143798828126,0.39971143798828124,0.0057334785106462356,0.40094010925292967,0.40350568847656254,0.4045810302734375,0.4054413037109375,"[0.38347836303710936, 0.400876953125, 0.40225457763671874, 0.4010032653808594, 0.3996824951171875, 0.4032667236328125, 0.3997377624511719, 0.40234405517578126, 0.4056563720703125, 0.3988138122558594]",tokens/s,640.4620325313418,kWh,1.1468964845513397e-05,1.2640497670574177e-06,7.601715483076874e-06,2.0334730095647688e-05,tokens/kWh,12589299.134823164,MB,1247.301632,3134.128128,0.0,2726.2976,2478.87104,s,10,26.040555419921873,2.604055541992188,0.015545223263453626,2.5988059082031247,2.623723461914062,2.632519445800781,2.6395562329101563,"[2.59868408203125, 2.598927734375, 2.596089111328125, 2.59702587890625, 2.60553076171875, 2.604890625, 2.621768798828125, 2.6413154296875, 2.592539794921875, 2.583783203125]",tokens/s,24.193032362052822,kWh,7.592829943865324e-05,8.375808276304601e-06,5.030880306752271e-05,0.00013461291078248057,tokens/kWh,468008.60061484715,,s,630,26.037380851745596,0.04132917595515176,0.0008461596932565667,0.041192014694213866,0.04200077819824219,0.04252378139495849,0.045277171020507816,"[0.044763137817382816, 0.041047870635986326, 0.041023166656494144, 0.040728927612304684, 0.040747360229492186, 0.04168889617919922, 0.04103903961181641, 0.040573535919189455, 0.04069807815551758, 0.04118732833862305, 0.040892414093017575, 0.0408985595703125, 0.040853504180908204, 0.04127334213256836, 0.040826335906982425, 0.041048385620117187, 0.04071139144897461, 0.04057190322875977, 0.04066847991943359, 0.040559295654296876, 0.04146745681762695, 0.04147654342651367, 0.04064972686767578, 0.04166342544555664, 0.04202707290649414, 0.04086374282836914, 0.0404664306640625, 0.04043161773681641, 0.040202239990234374, 0.040540287017822266, 0.040777313232421876, 0.04138217544555664, 0.040681472778320314, 0.04096819305419922, 0.041111553192138675, 0.04122828674316406, 0.041000415802001956, 0.04101583862304688, 0.04129990386962891, 0.04170963287353516, 0.04153500747680664, 0.04399151992797851, 0.041183231353759765, 0.04132175827026367, 0.042524833679199216, 0.042342334747314456, 0.04133523178100586, 0.04148575973510742, 0.041079231262207035, 0.04115635299682617, 0.041147968292236325, 0.04180889511108398, 0.04119254302978516, 0.04109932708740234, 0.04126192092895508, 0.04137766265869141, 0.04234867095947266, 0.04219289779663086, 0.04113612747192383, 0.040944801330566404, 0.041040287017822266, 0.041120193481445313, 0.040986591339111325, 0.044784225463867185, 0.04140188980102539, 0.04124105453491211, 0.040755199432373046, 0.04090044784545899, 0.040943519592285156, 0.04144323348999023, 0.04152355194091797, 0.04174233627319336, 0.04128710556030273, 0.04142924880981445, 0.04116857528686523, 0.041583393096923826, 0.04097359848022461, 0.04071996688842774, 0.04094972610473633, 0.04075939178466797, 0.040979232788085934, 0.040521854400634764, 0.040220672607421876, 0.040271873474121096, 0.04195849609375, 0.04086262512207031, 0.04093644714355469, 0.04089548873901367, 0.04081782531738281, 0.04082912063598633, 0.040951454162597656, 0.040874622344970704, 0.040870273590087894, 0.04142489624023438, 0.04123344039916992, 0.04104496002197266, 0.040769535064697264, 0.04111743927001953, 0.04137599945068359, 0.04119551849365234, 0.04122009658813477, 0.04111088180541992, 0.041196193695068356, 0.04117036819458008, 0.041289726257324216, 0.041197822570800784, 0.041310527801513675, 0.04105535888671875, 0.04255347061157227, 0.04120665740966797, 0.04252249526977539, 0.04139740753173828, 0.04156707382202148, 0.04121395111083984, 0.04208156967163086, 0.04119420623779297, 0.04152524948120117, 0.041364990234375, 0.041506431579589845, 0.04109811019897461, 0.0411495361328125, 0.04088105773925781, 0.04161648178100586, 0.041030559539794925, 0.04111548614501953, 0.04124256134033203, 0.04452854537963867, 0.04068876647949219, 0.040667232513427735, 0.04021353530883789, 0.040632064819335935, 0.040755199432373046, 0.04074496078491211, 0.04058489608764648, 0.04011964797973633, 0.04036297607421875, 0.04072166442871094, 0.040981246948242185, 0.04095795059204101, 0.04057088088989258, 0.04050070571899414, 0.04009625625610352, 0.04013577651977539, 0.03994844818115234, 0.04063107299804687, 0.04115660858154297, 0.04459027099609375, 0.04207494354248047, 0.04103561782836914, 0.04145971298217774, 0.040837249755859374, 0.04118431854248047, 0.04042031860351562, 0.041207809448242184, 0.04044595336914063, 0.04326316833496094, 0.04101763153076172, 0.04178585433959961, 0.04125289535522461, 0.04134502410888672, 0.04105744171142578, 0.04143395233154297, 0.04131020736694336, 0.04122009658813477, 0.041322624206542966, 0.04121724700927734, 0.0412064323425293, 0.04119756698608398, 0.04110243225097656, 0.04194380950927734, 0.04122195053100586, 0.04227705764770508, 0.04148806381225586, 0.04163631820678711, 0.041501953125, 0.041392894744873045, 0.04125696182250976, 0.04169244766235351, 0.04097052764892578, 0.041070079803466795, 0.040979393005371095, 0.04155388641357422, 0.04186115264892578, 0.04115359878540039, 0.04105516815185547, 0.041193473815917966, 0.04125686264038086, 0.041239776611328126, 0.04103372955322265, 0.04576678466796875, 0.04119532775878906, 0.04119830322265625, 0.040774879455566404, 0.040921886444091796, 0.04084348678588867, 0.040804351806640625, 0.04132863998413086, 0.04118844985961914, 0.04085625457763672, 0.0407405776977539, 0.040811008453369144, 0.0409535026550293, 0.04080060958862305, 0.04073206329345703, 0.040615936279296876, 0.04064521789550781, 0.04066025543212891, 0.04076780700683594, 0.0410792007446289, 0.04101939010620117, 0.04194617462158203, 0.04144838333129883, 0.04125286483764649, 0.04123830413818359, 0.04087580871582031, 0.04091948699951172, 0.04096758270263672, 0.04200048065185547, 0.041140735626220705, 0.041653759002685545, 0.0412042236328125, 0.041662464141845705, 0.041495742797851565, 0.041588737487792966, 0.04119846343994141, 0.041260990142822265, 0.04106444931030274, 0.04130815887451172, 0.04108697509765625, 0.041099262237548825, 0.04130815887451172, 0.04158464050292969, 0.04135116958618164, 0.040959552764892576, 0.04206636810302734, 0.04170751953125, 0.041325599670410156, 0.042580032348632814, 0.04110019302368164, 0.04080025482177734, 0.04090060806274414, 0.040683521270751956, 0.04064051055908203, 0.04077315139770508, 0.0404989128112793, 0.04214575958251953, 0.040965953826904294, 0.04107158279418945, 0.04123344039916992, 0.04090521621704102, 0.04092934417724609, 0.041049793243408204, 0.04488191986083984, 0.04125696182250976, 0.04110540771484375, 0.04115456008911133, 0.040791393280029294, 0.042414752960205075, 0.04103577423095703, 0.04126297760009766, 0.04353036880493164, 0.04214169692993164, 0.04179123306274414, 0.04125516891479492, 0.04143654251098633, 0.04115027236938477, 0.041124671936035154, 0.04097743988037109, 0.040868831634521485, 0.04125491333007812, 0.04110335922241211, 0.04114432144165039, 0.04104579162597656, 0.04257404708862305, 0.041446590423583986, 0.04189676666259766, 0.041470977783203126, 0.042562335968017576, 0.04154735946655273, 0.041457664489746096, 0.04120435333251953, 0.04122195053100586, 0.04116854476928711, 0.041390113830566407, 0.04121036911010742, 0.04142630386352539, 0.04150281524658203, 0.04129216003417969, 0.0415458869934082, 0.041446464538574215, 0.041331649780273434, 0.04222771072387695, 0.0411541748046875, 0.041221824645996094, 0.04113459014892578, 0.04132883071899414, 0.04064460754394531, 0.041924606323242186, 0.04140188980102539, 0.041304542541503904, 0.04119331359863281, 0.040892574310302736, 0.04079740905761719, 0.04084611129760742, 0.040245086669921874, 0.04098806381225586, 0.04056550216674805, 0.04060160064697266, 0.041062049865722657, 0.04070844650268555, 0.040959999084472655, 0.04099299240112304, 0.04088387298583984, 0.04106988906860352, 0.04066966247558594, 0.044959712982177734, 0.04101315307617188, 0.04144543838500977, 0.04116534423828125, 0.040804351806640625, 0.04089014434814453, 0.04068188858032227, 0.04099420928955078, 0.0408845443725586, 0.04067712020874024, 0.0409683837890625, 0.0408454704284668, 0.041037406921386715, 0.040689697265625, 0.04070419311523438, 0.04104390335083008, 0.040900863647460935, 0.041293407440185545, 0.04089014434814453, 0.04122390365600586, 0.04698720169067383, 0.042458080291748045, 0.04164019012451172, 0.04112928009033203, 0.042092864990234374, 0.04128166580200195, 0.04094976043701172, 0.045335617065429684, 0.04111990356445312, 0.041145118713378906, 0.04102963256835938, 0.04116889572143555, 0.04154163360595703, 0.04110131072998047, 0.041197471618652344, 0.041166751861572266, 0.04110291290283203, 0.041040512084960935, 0.040941150665283206, 0.04127350234985352, 0.041183456420898434, 0.040635871887207034, 0.04066156768798828, 0.040854721069335936, 0.043794559478759765, 0.04197411346435547, 0.040828353881835935, 0.04093225479125977, 0.040986366271972656, 0.04062006378173828, 0.040470752716064456, 0.04079206466674805, 0.04079123306274414, 0.04128851318359375, 0.041199615478515625, 0.041060352325439455, 0.040996223449707034, 0.04073910522460938, 0.04121379089355469, 0.04126976013183594, 0.04102143859863281, 0.04127107238769531, 0.041150718688964846, 0.04559097671508789, 0.0413226547241211, 0.04149071884155273, 0.04134902572631836, 0.041162593841552735, 0.04112566375732422, 0.04093999862670898, 0.04088195037841797, 0.041410110473632813, 0.04148700714111328, 0.04166403198242188, 0.041603553771972654, 0.04175820922851563, 0.04150527954101563, 0.041611263275146484, 0.041527198791503905, 0.04149875259399414, 0.041403839111328125, 0.04171625518798828, 0.04163974380493164, 0.04165241622924805, 0.04285440063476562, 0.04209187316894531, 0.04178963088989258, 0.04193532943725586, 0.04201267242431641, 0.04181196975708008, 0.04157782363891602, 0.04179830551147461, 0.04165222549438476, 0.04164812850952149, 0.04176863861083984, 0.041722175598144534, 0.041809921264648435, 0.04177920150756836, 0.041836544036865236, 0.04185289764404297, 0.041643070220947265, 0.04156063842773437, 0.04138979339599609, 0.041517566680908204, 0.04136675262451172, 0.041169921875, 0.04116783905029297, 0.041055233001708984, 0.04246275329589844, 0.04219952011108399, 0.04155990219116211, 0.04150697708129883, 0.04168864059448242, 0.04123078536987305, 0.042527904510498045, 0.04094572830200195, 0.04099356842041016, 0.04085887908935547, 0.04125110244750976, 0.041150943756103515, 0.04119670486450195, 0.04117308807373047, 0.04125676727294922, 0.04135145568847656, 0.04146448135375977, 0.04148348617553711, 0.04682940673828125, 0.04185513687133789, 0.04192256164550781, 0.04149353790283203, 0.04155081558227539, 0.0413460807800293, 0.04144841766357422, 0.04144268798828125, 0.04176345443725586, 0.041964958190917966, 0.04177747344970703, 0.041896224975585934, 0.04186316680908203, 0.04182944107055664, 0.04185184097290039, 0.04285187149047852, 0.045337055206298826, 0.04284415817260742, 0.04231987380981445, 0.04195072174072265, 0.04205388641357422, 0.043597984313964847, 0.04223177719116211, 0.041836673736572266, 0.042003456115722655, 0.0424376335144043, 0.042147838592529296, 0.0437841911315918, 0.04239052963256836, 0.04177407836914063, 0.04243865585327149, 0.04365926361083984, 0.04160655975341797, 0.04173030471801758, 0.04159932708740234, 0.041540897369384766, 0.04207843017578125, 0.041323009490966796, 0.04136959838867187, 0.041481246948242186, 0.04135830307006836, 0.04121916961669922, 0.04105033493041992, 0.04128201675415039, 0.0412913589477539, 0.04227459335327149, 0.04140911865234375, 0.0415992317199707, 0.041492767333984375, 0.041574111938476564, 0.04156371307373047, 0.04109356689453125, 0.041082878112792966, 0.04102137756347656, 0.041282623291015626, 0.04105292892456055, 0.04109952163696289, 0.04140031814575195, 0.04153488159179688, 0.0417163200378418, 0.04160412979125976, 0.04148329544067383, 0.04131423950195313, 0.04513407897949219, 0.04290310287475586, 0.041640575408935544, 0.041611263275146484, 0.0416409912109375, 0.04182470321655273, 0.0417410888671875, 0.04193254470825195, 0.0415536003112793, 0.041987903594970705, 0.04180809783935547, 0.041823646545410154, 0.04147612762451172, 0.0415814094543457, 0.041283584594726565, 0.041240062713623044, 0.04110121536254883, 0.04116131210327149, 0.04087551879882813, 0.04151491165161133, 0.042178848266601565, 0.041838912963867186, 0.04112953567504883, 0.041226688385009765, 0.04150886535644531, 0.04098867034912109, 0.04064614486694336, 0.040952320098876956, 0.040478687286376956, 0.04064668655395508, 0.04041932678222656, 0.040543712615966794, 0.04030928039550781, 0.04117900848388672, 0.041013343811035156, 0.04106038284301758, 0.04072447967529297, 0.040416671752929685, 0.03992022323608398, 0.03999065780639648, 0.040495681762695315, 0.0402977294921875, 0.043488033294677736, 0.04067331314086914, 0.04032291030883789, 0.04149379348754883, 0.040718238830566404, 0.04069055938720703, 0.04065903854370117, 0.04081459045410156, 0.04060160064697266, 0.04062617492675781, 0.04063958358764649, 0.04047065734863281, 0.0404136962890625, 0.04046454238891602, 0.04030831909179688, 0.0408741455078125, 0.04121004867553711, 0.04113174438476563, 0.04093999862670898, 0.04113129425048828, 0.04075798416137695, 0.04675382232666016, 0.041271808624267575, 0.041180606842041015, 0.041211967468261716, 0.041281440734863284, 0.04113049697875976, 0.04126428985595703, 0.04173920059204102, 0.04119551849365234, 0.04106854248046875, 0.04125894546508789, 0.04119148635864258, 0.04124467086791992, 0.041183231353759765, 0.041270496368408204, 0.04106454467773438, 0.041134750366210934, 0.041166881561279296, 0.041161983489990235, 0.04128025436401367, 0.041164478302001956, 0.04219254302978516, 0.04108355331420899, 0.04098457717895508, 0.04093337631225586, 0.040822784423828126, 0.04094771194458008, 0.040594688415527345, 0.04051635360717774, 0.040359935760498046, 0.040365280151367186, 0.0404918098449707, 0.040495105743408207, 0.041194847106933594, 0.040807071685791015, 0.04088995361328125, 0.04070851135253906, 0.04050124740600586, 0.04039420700073242, 0.04034409713745117, 0.040235008239746094, 0.04073056030273438, 0.04043487930297852, 0.04030758285522461, 0.04070604705810547, 0.041852447509765626, 0.04158464050292969, 0.04119599914550781, 0.04101696014404297, 0.04101772689819336, 0.04112112045288086, 0.040643230438232425, 0.04131161499023438, 0.040497791290283205, 0.040359935760498046, 0.040179454803466796, 0.040151294708251954, 0.040169471740722655, 0.04044784164428711, 0.04083523178100586, 0.04108863830566406, 0.04087152099609375, 0.04084611129760742]",tokens/s,24.195982060836325,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1071.198208,9784.19712,0.0,9388.949504,9304.608768,s,1,33.34423828125,33.34423828125,0.0,33.34423828125,33.34423828125,33.34423828125,33.34423828125,[33.34423828125],,kWh,0.0007540017895958953,8.316458329058557e-05,0.00027355355217595356,0.0011107199250624345,,MB,1440.055296,10197.336064,0.0,9789.505536,9597.896704,s,10,8.040341491699218,0.8040341491699218,0.0023356114356497845,0.8030022888183594,0.8071809997558594,0.8079320526123047,0.8085328948974609,"[0.8070140991210938, 0.80868310546875, 0.8029677734375, 0.8014375610351563, 0.8019359741210937, 0.8016148071289062, 0.8054298095703125, 0.8030368041992187, 0.8028924560546875, 0.8053291015625]",tokens/s,318.39443668442726,kWh,2.351742092180127e-05,2.59274873552298e-06,1.414095148369321e-05,4.025112114101746e-05,tokens/kWh,6360071.291011222,MB,1457.840128,10197.336064,0.0,9789.505536,9597.899264,s,10,376.00018359375,37.600018359375,0.16895826576110604,37.603142578125,37.6943734375,37.853501171875,37.980803359375,"[38.01262890625, 37.4249765625, 37.36190625, 37.58154296875, 37.6554140625, 37.65901171875, 37.64826953125, 37.55991796875, 37.4717734375, 37.6247421875]",tokens/s,1.6755310967631987,kWh,0.001098004854404441,0.0001211190368270474,0.0004170829212729071,0.0016362068125043952,tokens/kWh,38503.69007055504,,s,630,375.9972374267582,0.5968210117885044,0.00443225393698296,0.5958622131347656,0.6028890075683593,0.6050607788085938,0.6108657287597656,"[0.6028455810546876, 0.601858154296875, 0.6067220458984375, 0.6018051147460938, 0.6022013549804688, 0.6019547119140625, 0.6029561157226563, 0.6032117919921876, 0.6029594116210938, 0.612116943359375, 0.606504150390625, 0.6009823608398438, 0.6021836547851562, 0.6019022827148437, 0.6039021606445313, 0.604809814453125, 0.6030663452148437, 0.6019942626953125, 0.5999908447265625, 0.6002979125976563, 0.6002705688476563, 0.6017738037109375, 0.6031939086914062, 0.6015057983398437, 0.602144775390625, 0.6057083129882812, 0.6109083862304687, 0.60127978515625, 0.603673095703125, 0.6017578735351562, 0.6011526489257812, 0.6061841430664062, 0.6056959838867187, 0.6005821533203125, 0.6010084228515625, 0.60104052734375, 0.6021704711914062, 0.6035027465820313, 0.6000682983398438, 0.60494677734375, 0.6022489624023437, 0.6039393310546874, 0.6041251220703125, 0.6077747192382813, 0.6067715454101562, 0.6014808349609375, 0.6006743774414063, 0.6013679809570313, 0.604412353515625, 0.6046351928710938, 0.6008729858398437, 0.6072293701171875, 0.603181640625, 0.6019993896484375, 0.6030438232421875, 0.6034349975585938, 0.6034082641601562, 0.6035252685546875, 0.6033141479492188, 0.6079754028320312, 0.6067548217773437, 0.6035963745117188, 0.6037651977539062, 0.6079428100585937, 0.6043832397460938, 0.6001909790039063, 0.5939834594726563, 0.5917960205078125, 0.5906411743164063, 0.591706298828125, 0.5910667114257813, 0.593160888671875, 0.59062841796875, 0.5940695190429688, 0.5983685302734375, 0.611418212890625, 0.5901310424804688, 0.5917617797851562, 0.59225, 0.5902669067382813, 0.5958062133789063, 0.59508935546875, 0.5886234741210937, 0.5887841796875, 0.5903662109375, 0.5908258056640625, 0.588355224609375, 0.5906476440429688, 0.5884783935546875, 0.5889412231445312, 0.589370849609375, 0.596641845703125, 0.5964757080078125, 0.5919793090820312, 0.5924515991210938, 0.5938770141601563, 0.5919756469726563, 0.605154052734375, 0.6060114135742187, 0.60096923828125, 0.6042640991210938, 0.597992919921875, 0.5971519775390625, 0.597608642578125, 0.5909672241210937, 0.5906780395507812, 0.592594482421875, 0.5959049682617188, 0.5965209350585937, 0.592761962890625, 0.5915247802734375, 0.5908375854492187, 0.5888574829101563, 0.5906227416992188, 0.5963468627929688, 0.5918330688476563, 0.5916201171875, 0.590329833984375, 0.5922201538085937, 0.591213623046875, 0.5905967407226562, 0.5925728759765625, 0.5887221069335937, 0.5880332641601562, 0.5918829956054688, 0.6064515380859375, 0.5910056762695313, 0.5916113891601562, 0.5899713134765625, 0.5907954711914063, 0.5892399291992187, 0.5882429809570312, 0.590924072265625, 0.5881221923828125, 0.5944566040039062, 0.592416748046875, 0.594651123046875, 0.594651123046875, 0.592395751953125, 0.5933265991210938, 0.5949963989257813, 0.6025059204101563, 0.594542724609375, 0.5944998779296875, 0.5925673217773437, 0.5933776245117187, 0.5965089721679687, 0.5941063842773437, 0.59082666015625, 0.5932439575195313, 0.5892044677734375, 0.5920706787109375, 0.5915607299804687, 0.5894164428710937, 0.5919284057617188, 0.5921057739257812, 0.5938305053710937, 0.5996436767578125, 0.5973121337890624, 0.5914080200195313, 0.5909532470703125, 0.5937902221679687, 0.59068212890625, 0.60174609375, 0.59428271484375, 0.588859375, 0.588945068359375, 0.5886996459960937, 0.591849853515625, 0.5910482177734375, 0.5970559692382813, 0.591372314453125, 0.589613037109375, 0.591730712890625, 0.5980591430664063, 0.5909022216796875, 0.590512451171875, 0.589815673828125, 0.5902853393554688, 0.5953883666992188, 0.6008665771484375, 0.5984281616210938, 0.5926400146484375, 0.5929915771484375, 0.5961532592773438, 0.5939130859375, 0.595165283203125, 0.5936787719726563, 0.59468798828125, 0.5955399780273437, 0.6051920166015625, 0.5947493286132812, 0.59442724609375, 0.5943694458007812, 0.5935206298828125, 0.5939766845703125, 0.6018453979492188, 0.5960242919921875, 0.6012723388671875, 0.594763427734375, 0.5954083862304688, 0.596875732421875, 0.5931905517578125, 0.5947828369140625, 0.5952688598632813, 0.5937939453125, 0.5983102416992188, 0.5983355712890625, 0.5950941162109376, 0.5954183349609375, 0.5935399780273437, 0.5976902465820313, 0.5952139892578125, 0.602274169921875, 0.5936903686523437, 0.5936167602539062, 0.59613232421875, 0.5942803344726563, 0.5962844848632812, 0.595685302734375, 0.5948220825195313, 0.5952529907226562, 0.5961361083984374, 0.5995029907226562, 0.6011207885742188, 0.5961404418945313, 0.5949779052734375, 0.5952394409179688, 0.5943582763671875, 0.5994179077148437, 0.5971793823242187, 0.5935636596679688, 0.594302978515625, 0.5938565063476563, 0.5945220947265625, 0.5976268920898438, 0.6006149291992188, 0.5948243408203125, 0.5963108520507813, 0.5984461059570313, 0.601712646484375, 0.5953228759765625, 0.5968281860351563, 0.5951876831054688, 0.5951051025390625, 0.5932755737304688, 0.6020219116210938, 0.5925532836914063, 0.5928407592773437, 0.5972486572265625, 0.6136115112304688, 0.5967626342773438, 0.5968455810546875, 0.5948057861328125, 0.5957645874023437, 0.6007316284179688, 0.59920263671875, 0.5974547119140625, 0.5957315673828125, 0.5976392822265625, 0.5954722900390625, 0.5965398559570313, 0.6025751342773438, 0.5965640869140625, 0.595261474609375, 0.594862060546875, 0.5949419555664063, 0.595453125, 0.59492626953125, 0.5956987915039063, 0.595679931640625, 0.5981223754882813, 0.6001539306640625, 0.6035088500976562, 0.5981088256835938, 0.5971773681640625, 0.5991137084960938, 0.597984130859375, 0.607182861328125, 0.5975752563476563, 0.5976677856445313, 0.5981531982421875, 0.59568896484375, 0.594882568359375, 0.5987255859375, 0.5973217163085938, 0.596738037109375, 0.5974507446289062, 0.6019256591796875, 0.6013419799804688, 0.5962445068359375, 0.599704833984375, 0.5971033325195313, 0.594956298828125, 0.5952719116210937, 0.608171630859375, 0.5966090087890625, 0.5966104736328125, 0.6002491455078125, 0.5949972534179687, 0.5944392700195312, 0.5959771118164062, 0.5941405639648437, 0.5949609985351563, 0.5957724609375, 0.601207763671875, 0.601726806640625, 0.5959757690429688, 0.596114013671875, 0.5957713623046875, 0.5965967407226562, 0.5979279174804687, 0.603367431640625, 0.60029541015625, 0.59593115234375, 0.5955235595703126, 0.5981327514648438, 0.5978749389648438, 0.5946727294921875, 0.5946375122070312, 0.5988843383789062, 0.6046651611328125, 0.595911376953125, 0.5954580688476563, 0.5949931640625, 0.5955930786132813, 0.5955933227539062, 0.6029557495117187, 0.5952073974609375, 0.5954889526367187, 0.59537060546875, 0.6116347045898437, 0.5965767211914063, 0.595527587890625, 0.6024244995117187, 0.5977281494140625, 0.598308837890625, 0.599946533203125, 0.6005275268554687, 0.5966800537109375, 0.5993351440429687, 0.5958189086914063, 0.5957488403320312, 0.5959925537109375, 0.6074736938476563, 0.595789306640625, 0.6001536254882812, 0.5938220825195313, 0.5955078125, 0.5955349731445313, 0.5965604248046875, 0.5952570190429688, 0.5971381225585938, 0.5971549072265625, 0.60133056640625, 0.6001909790039063, 0.5976555786132812, 0.5961093139648438, 0.59944140625, 0.6011473999023438, 0.5951934814453125, 0.6015812377929688, 0.5974590454101563, 0.5977605590820313, 0.5963837890625, 0.5938460693359375, 0.5969553833007812, 0.5939158935546875, 0.5954846801757813, 0.5952341918945312, 0.5960772094726563, 0.6067825317382812, 0.5993645629882812, 0.596664306640625, 0.5983659057617188, 0.5962693481445313, 0.5970942993164062, 0.59681396484375, 0.5955235595703126, 0.5929987182617188, 0.593972900390625, 0.592578125, 0.5972463989257812, 0.5952061157226562, 0.5933255615234375, 0.596800048828125, 0.5977803955078125, 0.6018182373046875, 0.5944452514648437, 0.5963675537109375, 0.595975830078125, 0.5949536743164062, 0.595263427734375, 0.6091591186523437, 0.595114501953125, 0.60036328125, 0.594218994140625, 0.5989412231445312, 0.5965144653320312, 0.5959823608398438, 0.596597412109375, 0.5954786376953125, 0.595578857421875, 0.6014299926757812, 0.6001397705078125, 0.5972869262695313, 0.5978328247070313, 0.5955162963867188, 0.59441943359375, 0.5973363037109375, 0.607594482421875, 0.5962998046875, 0.5958758544921875, 0.6099005737304688, 0.5968058471679687, 0.5973866577148438, 0.5946961669921875, 0.5964100341796875, 0.5956954956054688, 0.6040125732421875, 0.6104871215820312, 0.5966166381835938, 0.5963967895507812, 0.5998364868164062, 0.5974837036132813, 0.5974556884765625, 0.5977908325195312, 0.6004901733398438, 0.5943525390625, 0.596400390625, 0.5953576049804687, 0.597854248046875, 0.5953843383789063, 0.5959700317382812, 0.5951561889648438, 0.594907958984375, 0.6006824951171875, 0.6024642333984375, 0.5962158203125, 0.6019788818359375, 0.5938500366210937, 0.5992626342773437, 0.6107612915039062, 0.5971549072265625, 0.6003631591796875, 0.59579296875, 0.5981795654296875, 0.5943314819335938, 0.59375634765625, 0.5951851196289063, 0.5953706665039062, 0.5994371337890625, 0.59782763671875, 0.5985177612304687, 0.5939118041992187, 0.5950873413085938, 0.5945466918945312, 0.5935913696289062, 0.5980929565429688, 0.5980690307617188, 0.5955616455078125, 0.5945672607421875, 0.5931753540039062, 0.5958485717773437, 0.5952047119140625, 0.5952163696289062, 0.5934772338867188, 0.5939346923828125, 0.5987833251953125, 0.6131842651367188, 0.594884521484375, 0.5942286987304688, 0.596968017578125, 0.594314453125, 0.5932551879882813, 0.5972001953125, 0.5957782592773437, 0.5934410400390625, 0.5956968994140625, 0.592969482421875, 0.5955541381835937, 0.5919118041992187, 0.5964756469726562, 0.593744140625, 0.5946019897460938, 0.6026619262695313, 0.5984078979492188, 0.594002197265625, 0.5937841796875, 0.5958027954101562, 0.5939199829101562, 0.5926154174804688, 0.5971343383789063, 0.596537353515625, 0.5970851440429688, 0.5933087158203125, 0.6032945556640625, 0.5925457153320313, 0.5930682373046875, 0.59221337890625, 0.5930604248046875, 0.5945692138671875, 0.5978451538085937, 0.598553466796875, 0.5981306762695312, 0.59144189453125, 0.5925986938476563, 0.5965909423828125, 0.5966292724609376, 0.5924080200195313, 0.5907730712890625, 0.5921033935546876, 0.5935547485351562, 0.59438720703125, 0.5947315673828125, 0.5929389038085937, 0.5950689086914063, 0.5974876098632812, 0.6009405517578125, 0.5931389770507812, 0.59338232421875, 0.5940979614257812, 0.5931397094726563, 0.5954005737304687, 0.5951715087890626, 0.5949296875, 0.5940667724609375, 0.5914609985351562, 0.5911316528320313, 0.5908694458007813, 0.5943107299804687, 0.6021800537109375, 0.5940326538085937, 0.5967232055664062, 0.5942800903320312, 0.6022614135742187, 0.5944718627929687, 0.5990830078125, 0.5948395385742188, 0.5916785888671875, 0.5934662475585938, 0.5949556884765625, 0.5983237915039062, 0.5984083251953125, 0.5919404296875, 0.5921072387695312, 0.5899962768554687, 0.5920009155273438, 0.590489501953125, 0.5917916259765625, 0.5956400146484375, 0.5975126953125, 0.5985182495117187, 0.5944566040039062, 0.594249755859375, 0.594356201171875, 0.5968773193359375, 0.593915771484375, 0.5965591430664062, 0.5984838256835937, 0.5934976806640625, 0.5949558715820312, 0.5969436645507813, 0.59708349609375, 0.5944019165039063, 0.5960962524414063, 0.5939883422851563, 0.6109265747070313, 0.5965557861328125, 0.596955078125, 0.6001993408203125, 0.5978842163085938, 0.5951463623046875, 0.5976583251953125, 0.5990177001953125, 0.5944686279296875, 0.5951160278320312, 0.6075435791015625, 0.5938401489257813, 0.5951651611328125, 0.5972459716796875, 0.5951201171875, 0.5962383422851563, 0.5981963500976563, 0.6022985229492187, 0.595721923828125, 0.5977508544921875, 0.5967196655273438, 0.5956064453125, 0.5976572265625, 0.6013710327148437, 0.6000189208984374, 0.598857666015625, 0.5942784423828125, 0.5940072021484375, 0.594172119140625, 0.5938895874023438, 0.5953923950195312, 0.5947453002929688, 0.5956060180664062, 0.60209765625, 0.602881591796875, 0.596602783203125, 0.5950305786132812, 0.5968320922851562, 0.5974058837890625, 0.5957222290039063, 0.6005103759765625, 0.5988222045898437, 0.5954149169921875, 0.5952000732421875, 0.5938410034179687, 0.5949718017578125, 0.592651123046875, 0.5953099975585937, 0.5948351440429688, 0.594010986328125, 0.604080078125, 0.5999165649414062, 0.5941574096679687, 0.5981466064453125, 0.5956267700195312, 0.595819580078125, 0.5965194091796875, 0.5995903930664063, 0.60062109375, 0.5965291748046875, 0.5959049682617188, 0.5951348876953125, 0.5948922729492188]",tokens/s,1.6755442255682014,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1231.867904,8448.311296,0.0,8053.06368,7930.605568,s,1,20.01771875,20.01771875,0.0,20.01771875,20.01771875,20.01771875,20.01771875,[20.01771875],,kWh,0.00037114494468332474,4.093289112526352e-05,0.0001430862255800136,0.0005551640613886018,,MB,1240.8832,10214.11328,0.0,9806.282752,9135.58528,s,10,18.41105407714844,1.8411054077148439,0.0058580154062901615,1.8429332885742187,1.8458336547851564,1.8459052307128907,1.845962491455078,"[1.8262845458984376, 1.8353111572265626, 1.8402359619140625, 1.8409066162109375, 1.8422747802734376, 1.8453515625, 1.845976806640625, 1.843591796875, 1.8453031005859375, 1.8458177490234375]",tokens/s,139.04690026289362,kWh,5.352741918416617e-05,5.9037187881541e-06,3.562297294279898e-05,9.505411091511925e-05,tokens/kWh,2693202.82453224,MB,1259.106304,10214.11328,0.0,9806.282752,9135.58784,s,10,90.73418359375,9.073418359375001,0.0156509527946229,9.0774873046875,9.088825683593749,9.089517822265625,9.090071533203124,"[9.0414873046875, 9.051458984375, 9.064400390625, 9.071638671875, 9.076765625, 9.078208984375, 9.0830068359375, 9.0883349609375, 9.088671875, 9.0902099609375]",tokens/s,6.943358886885889,kWh,0.00026539329654541496,2.9273421409762916e-05,0.000176476363403201,0.00047114308135837895,tokens/kWh,133717.3408518728,,s,630,90.7288251647949,0.14401400819808718,0.0019797132612469886,0.1439374542236328,0.14570172576904297,0.14631678695678713,0.15306474502563477,"[0.15379833984375, 0.1397642822265625, 0.1409845428466797, 0.14038812255859376, 0.14089727783203124, 0.1408256072998047, 0.14947740173339844, 0.14503549194335938, 0.14089599609375, 0.1415963897705078, 0.1414220428466797, 0.14118199157714845, 0.14272079467773438, 0.14735565185546876, 0.14382797241210937, 0.14251513671875, 0.14129362487792968, 0.1414345245361328, 0.14134719848632812, 0.14360572814941405, 0.14593434143066406, 0.14409318542480468, 0.1424937286376953, 0.1414691162109375, 0.14184463500976563, 0.1420700225830078, 0.14439846801757814, 0.14492057800292968, 0.1443736572265625, 0.14278460693359374, 0.1422459259033203, 0.14192393493652344, 0.142700927734375, 0.14486528015136718, 0.14446185302734374, 0.1448591003417969, 0.14298713684082032, 0.14260578918457031, 0.14261036682128905, 0.14342037963867188, 0.14498789978027343, 0.14528025817871093, 0.14367526245117188, 0.14357798767089844, 0.14229299926757813, 0.14309791564941407, 0.14418118286132814, 0.14484632873535155, 0.14460751342773437, 0.14494650268554687, 0.1429410858154297, 0.14263821411132813, 0.143804443359375, 0.1449316864013672, 0.14434518432617188, 0.1438693084716797, 0.14451664733886718, 0.14444610595703125, 0.1428118438720703, 0.1436764221191406, 0.14362690734863282, 0.14530979919433593, 0.14513551330566407, 0.1523686981201172, 0.1416813507080078, 0.1412472381591797, 0.14095619201660156, 0.14148200988769533, 0.1419370880126953, 0.148048095703125, 0.14472735595703126, 0.14170159912109376, 0.14197555541992188, 0.1413112335205078, 0.1409502716064453, 0.14308146667480467, 0.14584422302246094, 0.1445457000732422, 0.1417667541503906, 0.14226431274414061, 0.14122598266601563, 0.14181919860839845, 0.14360797119140625, 0.1447523193359375, 0.14514796447753905, 0.14314576721191405, 0.14196879577636717, 0.142608154296875, 0.14223052978515624, 0.14413189697265624, 0.14523802185058593, 0.1449505615234375, 0.1436904602050781, 0.14239744567871093, 0.1425807342529297, 0.14269747924804688, 0.14421577453613282, 0.14531939697265625, 0.14443399047851563, 0.1435891571044922, 0.14319638061523438, 0.1424691162109375, 0.1433242492675781, 0.14391714477539064, 0.14513385009765625, 0.14452175903320313, 0.14384332275390624, 0.14342515563964844, 0.14303680419921874, 0.14375730895996094, 0.14464125061035157, 0.14478764343261719, 0.14430677795410157, 0.14445703125, 0.14446456909179686, 0.1427569580078125, 0.1437493438720703, 0.14498825073242189, 0.1450030975341797, 0.14388633728027345, 0.1446646728515625, 0.14433010864257811, 0.14358738708496094, 0.1443046112060547, 0.1438963165283203, 0.1448379821777344, 0.15338607788085937, 0.14150137329101561, 0.1414450225830078, 0.14122195434570312, 0.14120346069335937, 0.14220492553710937, 0.1482581787109375, 0.1459554901123047, 0.14240150451660155, 0.14162944030761718, 0.14164787292480469, 0.14160281372070313, 0.14318899536132812, 0.14694633483886718, 0.14541497802734374, 0.14252774047851563, 0.14294493103027345, 0.14155302429199218, 0.14197824096679687, 0.14442413330078124, 0.14596383666992188, 0.14480802917480468, 0.14270585632324218, 0.14298390197753907, 0.14149183654785155, 0.14314533996582032, 0.14456640625, 0.14539967346191407, 0.14411978149414062, 0.14321871948242187, 0.14269760131835937, 0.1426236114501953, 0.14321417236328124, 0.14509507751464842, 0.14506710815429688, 0.14410986328125, 0.14347737121582033, 0.14311013793945312, 0.1430711669921875, 0.1435661163330078, 0.14529171752929687, 0.14498252868652345, 0.14449647521972656, 0.143414306640625, 0.14314093017578125, 0.14363935852050783, 0.14387318420410156, 0.14573846435546875, 0.14403532409667968, 0.14494700622558593, 0.1434799041748047, 0.14385862731933594, 0.1444381408691406, 0.14431027221679688, 0.14497782897949218, 0.14496572875976563, 0.14371961975097655, 0.14379283142089844, 0.14393942260742187, 0.14457679748535157, 0.14378802490234374, 0.14436341857910157, 0.14422991943359376, 0.15387295532226564, 0.14117225646972656, 0.14120393371582032, 0.14122157287597656, 0.1414822998046875, 0.1424199676513672, 0.14897561645507812, 0.14533631896972657, 0.14146969604492188, 0.1415045166015625, 0.14139334106445312, 0.14181741333007813, 0.14350361633300782, 0.14801997375488282, 0.14481202697753906, 0.14329241943359375, 0.14143849182128906, 0.14180134582519532, 0.14239395141601563, 0.14503961181640626, 0.14611430358886718, 0.14477107238769532, 0.1426282501220703, 0.14226693725585937, 0.14192437744140626, 0.14295805358886718, 0.14489208984375, 0.14623983764648438, 0.1445419464111328, 0.14333926391601562, 0.1427412109375, 0.14198789978027343, 0.14271212768554686, 0.14565840148925782, 0.1458855743408203, 0.14513874816894531, 0.14258067321777343, 0.1426862030029297, 0.14315866088867188, 0.1438536376953125, 0.14469740295410155, 0.14642166137695312, 0.14453001403808594, 0.14417277526855468, 0.14322886657714845, 0.14377609252929688, 0.14387577819824218, 0.14503762817382812, 0.14447001647949217, 0.14501478576660157, 0.1440911407470703, 0.143499267578125, 0.14340940856933593, 0.14417234802246093, 0.14511293029785155, 0.14631587219238282, 0.14440867614746095, 0.1448283233642578, 0.14360064697265626, 0.1431357421875, 0.1447751007080078, 0.14470745849609376, 0.1455485382080078, 0.15309706115722657, 0.14139187622070312, 0.14157122802734376, 0.1412823944091797, 0.14143052673339843, 0.14214306640625, 0.14870323181152345, 0.1457953643798828, 0.14304269409179687, 0.14187660217285156, 0.14167945861816406, 0.1417554931640625, 0.14370687866210938, 0.1468087615966797, 0.144859130859375, 0.14338621520996095, 0.14236099243164063, 0.1417472381591797, 0.14258476257324218, 0.14486441040039064, 0.14564639282226563, 0.14478970336914063, 0.14382386779785156, 0.1419252471923828, 0.14229005432128905, 0.14355136108398436, 0.14554829406738282, 0.14500146484375, 0.14621437072753907, 0.1433564453125, 0.14213529968261718, 0.1424752655029297, 0.1438126983642578, 0.14569667053222657, 0.1456818542480469, 0.14504531860351563, 0.14346319580078126, 0.1428164825439453, 0.14243699645996094, 0.14429766845703124, 0.14505180358886718, 0.1457236785888672, 0.14437196350097656, 0.14336384582519532, 0.14425479125976562, 0.14304864501953124, 0.14476480102539063, 0.1452046661376953, 0.14426821899414063, 0.1444085693359375, 0.14378562927246094, 0.14370428466796875, 0.14489817810058594, 0.14448214721679686, 0.1451992645263672, 0.14500250244140625, 0.14500601196289062, 0.1433871307373047, 0.14431639099121094, 0.14396182250976564, 0.1449190673828125, 0.1441544647216797, 0.14485708618164062, 0.15255990600585936, 0.14148051452636717, 0.1425960693359375, 0.14130093383789064, 0.1417442626953125, 0.14271490478515625, 0.14888934326171874, 0.14532675170898438, 0.1423209991455078, 0.1424608917236328, 0.14159103393554687, 0.1423816680908203, 0.14396424865722657, 0.14698617553710938, 0.14518531799316406, 0.14277842712402344, 0.1421528625488281, 0.14270086669921875, 0.1427072296142578, 0.14452940368652345, 0.14571929931640626, 0.1451685791015625, 0.1431092529296875, 0.14294822692871093, 0.14217913818359376, 0.14292533874511718, 0.14451145935058593, 0.14507846069335936, 0.14543238830566407, 0.14287271118164063, 0.14326889038085938, 0.1430734405517578, 0.14329270935058594, 0.1444929656982422, 0.14553651428222655, 0.14552114868164062, 0.1437532196044922, 0.14322694396972657, 0.14340089416503907, 0.14331494140625, 0.14419676208496093, 0.14578569030761718, 0.14455987548828125, 0.14387840270996094, 0.14362185668945313, 0.14368182373046876, 0.1448078155517578, 0.14434725952148436, 0.14496563720703126, 0.14503526306152345, 0.14494924926757813, 0.143519775390625, 0.14319612121582032, 0.14492697143554686, 0.14488316345214844, 0.14514614868164064, 0.1452085723876953, 0.143828857421875, 0.14447833251953124, 0.14434962463378906, 0.14395196533203125, 0.1450160675048828, 0.14412693786621095, 0.15298562622070314, 0.1412021484375, 0.14137705993652344, 0.14218882751464842, 0.14241325378417968, 0.14305381774902343, 0.14896847534179689, 0.14485154724121094, 0.14157020568847656, 0.14132447814941407, 0.14281686401367188, 0.14250384521484374, 0.14419171142578124, 0.1473047637939453, 0.14447410583496093, 0.14197145080566406, 0.1425482940673828, 0.14238156127929688, 0.14281692504882812, 0.14539523315429687, 0.1461627197265625, 0.14449043273925782, 0.1424959716796875, 0.14346630859375, 0.14220109558105468, 0.14314675903320312, 0.14585650634765626, 0.14527040100097657, 0.14413568115234374, 0.14420262145996093, 0.14292807006835936, 0.14285562133789062, 0.14364915466308595, 0.14557347106933594, 0.14451724243164063, 0.14484713745117186, 0.144278564453125, 0.14339117431640624, 0.14354888916015626, 0.1440214385986328, 0.14489971923828124, 0.14484326171875, 0.14487142944335937, 0.14404403686523437, 0.14332261657714843, 0.14349772644042968, 0.14565989685058595, 0.144959716796875, 0.14523574829101563, 0.14418060302734376, 0.14416450500488281, 0.1445672607421875, 0.14356224060058595, 0.14484284973144532, 0.14511964416503906, 0.14515737915039062, 0.14567706298828126, 0.14383120727539062, 0.1444036865234375, 0.14422285461425782, 0.14417100524902343, 0.14397859191894533, 0.14587689208984375, 0.1537274932861328, 0.14221839904785155, 0.14148089599609376, 0.14127708435058595, 0.14149139404296876, 0.14341818237304688, 0.14954905700683593, 0.1461709747314453, 0.14289308166503906, 0.14155046081542969, 0.14151065063476562, 0.14169088745117187, 0.1443094024658203, 0.14735037231445314, 0.14531558227539063, 0.14356707763671875, 0.14250518798828124, 0.14185491943359374, 0.14207212829589844, 0.14464402770996093, 0.1460284423828125, 0.14594534301757814, 0.14329417419433593, 0.1427021484375, 0.14167904663085937, 0.14278271484375, 0.14458236694335938, 0.14554934692382812, 0.14543043518066406, 0.14341539001464843, 0.14322207641601561, 0.14199058532714845, 0.14450025939941405, 0.14450732421875, 0.14570089721679688, 0.14498374938964845, 0.1436429443359375, 0.1433632354736328, 0.14272598266601563, 0.14463795471191407, 0.14519296264648437, 0.1458214111328125, 0.14435757446289063, 0.14440873718261718, 0.14391859436035156, 0.14295085144042968, 0.1450762939453125, 0.14518368530273437, 0.14538035583496095, 0.14503651428222655, 0.14400326538085936, 0.14310447692871095, 0.14385369873046874, 0.14508358764648438, 0.1452388458251953, 0.14558198547363282, 0.14517584228515626, 0.14474070739746095, 0.14396873474121094, 0.14464825439453124, 0.14421395874023438, 0.1462129211425781, 0.14536044311523438, 0.15422186279296876, 0.14164390563964843, 0.14156040954589844, 0.14159872436523438, 0.1418603515625, 0.14326425170898438, 0.14934848022460936, 0.14580313110351562, 0.14290310668945314, 0.14172384643554686, 0.14165107727050782, 0.14223965454101561, 0.14464950561523438, 0.14722221374511718, 0.1452969970703125, 0.14333378601074218, 0.1419911651611328, 0.14186944580078126, 0.1425451202392578, 0.14526016235351563, 0.1457997131347656, 0.14536294555664062, 0.14381471252441405, 0.14249098205566407, 0.14198431396484376, 0.14328221130371094, 0.14537303161621093, 0.14555357360839843, 0.14508441162109376, 0.14340821838378906, 0.14239382934570313, 0.14248124694824219, 0.14399754333496093, 0.14445135498046874, 0.14612696838378905, 0.14459458923339844, 0.1440809326171875, 0.14290950012207032, 0.14308927917480468, 0.14479434204101563, 0.14468095397949218, 0.14529481506347655, 0.14555804443359374, 0.1438078155517578, 0.14358937072753905, 0.14382560729980468, 0.14509671020507814, 0.14554521179199217, 0.14433453369140625, 0.14462416076660156, 0.1444595489501953, 0.1435914306640625, 0.1444085693359375, 0.1444552917480469, 0.1447178192138672, 0.14495578002929688, 0.14533631896972657, 0.14475273132324218, 0.14511033630371092, 0.14381936645507812, 0.14488128662109376, 0.14536895751953124, 0.14482838439941406, 0.1558402557373047, 0.14168550109863282, 0.14155938720703126, 0.14133404541015626, 0.141491455078125, 0.14241958618164063, 0.14959161376953126, 0.14631753540039064, 0.14200650024414063, 0.1416884765625, 0.14160850524902344, 0.1424034881591797, 0.14422889709472655, 0.14717987060546875, 0.1455615997314453, 0.14257586669921876, 0.14163328552246093, 0.14178250122070313, 0.14267190551757813, 0.1454535675048828, 0.1462494659423828, 0.14526693725585937, 0.14329443359375, 0.14200425720214843, 0.14180157470703125, 0.1435219268798828, 0.14548768615722657, 0.14559432983398438, 0.14559642028808595, 0.14323721313476562, 0.14280181884765625, 0.14199623107910156, 0.14433094787597656, 0.14562342834472655, 0.14586611938476562, 0.1447895965576172, 0.14426547241210938, 0.14382748413085938, 0.14249574279785157, 0.14421810913085936, 0.14534042358398438, 0.1460121612548828, 0.14489584350585938, 0.14478536987304688, 0.14392282104492188, 0.14265196228027344, 0.14388153076171875, 0.14570918273925781, 0.14569468688964843, 0.14485693359375, 0.1442166748046875, 0.1433849334716797, 0.14386976623535155, 0.14483433532714843, 0.145850341796875, 0.14496751403808594, 0.14506419372558593, 0.14378204345703124, 0.14393548583984375, 0.14362214660644532, 0.14465434265136717, 0.1461411895751953, 0.14632867431640625]",tokens/s,6.94376896047868,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,2132.529152,11826.823168,0.0,11431.575552,10953.091072,s,1,22.00828515625,22.00828515625,0.0,22.00828515625,22.00828515625,22.00828515625,22.00828515625,[22.00828515625],,kWh,0.0004223264299625043,4.657848849772365e-05,0.0001591851273479994,0.0006280900458082273,,MB,1606.619136,12722.307072,0.0,12314.476544,11624.128512,s,10,18.991810424804687,1.899181042480469,0.006285078080570184,1.9012791137695313,1.9044025756835938,1.9050386291503907,1.9055474719238281,"[1.883064453125, 1.8956903076171876, 1.8957227783203126, 1.8987581787109375, 1.89982373046875, 1.9027657470703125, 1.9033148193359375, 1.90426123046875, 1.9027344970703124, 1.9056746826171875]",tokens/s,134.79494280631894,kWh,5.527992636249715e-05,6.09700754509649e-06,3.678297387080254e-05,9.815990777839619e-05,tokens/kWh,2607989.410278791,MB,1610.563584,12724.404224,0.0,12316.573696,11624.131072,s,10,93.7228251953125,9.37228251953125,0.025483996830139362,9.3828662109375,9.3948287109375,9.39677373046875,9.39832974609375,"[9.3171259765625, 9.34112109375, 9.3562275390625, 9.366119140625, 9.38015625, 9.385576171875, 9.390072265625, 9.394396484375, 9.3933115234375, 9.39871875]",tokens/s,6.721948454788035,kWh,0.0002742070814204203,3.024713545027438e-05,0.00018208203455439553,0.00048653625142509014,tokens/kWh,129486.75420478886,,s,630,93.71858575439465,0.14875965992761037,0.0018670747989621259,0.14857625579833983,0.15037186431884766,0.1509479766845703,0.15800266555786133,"[0.1578668212890625, 0.14541209411621095, 0.1457986602783203, 0.14605926513671874, 0.1453059539794922, 0.1458253173828125, 0.15310092163085937, 0.14805197143554688, 0.14697065734863282, 0.14716067504882813, 0.1450806427001953, 0.14627027893066405, 0.14753695678710937, 0.1493697967529297, 0.14764236450195312, 0.1489152069091797, 0.14611293029785155, 0.14620063781738282, 0.14700703430175782, 0.14798915100097657, 0.14824479675292968, 0.14863360595703126, 0.14670236206054688, 0.1462451171875, 0.14616213989257812, 0.1480745849609375, 0.1482296600341797, 0.1492954864501953, 0.1476485137939453, 0.14700953674316405, 0.14799667358398438, 0.1458524169921875, 0.14820358276367188, 0.14860841369628905, 0.1489126739501953, 0.14828134155273437, 0.14825471496582032, 0.14572134399414063, 0.14747238159179688, 0.1499832305908203, 0.14793516540527343, 0.14889718627929688, 0.1484554901123047, 0.1467065887451172, 0.14656761169433594, 0.14847109985351561, 0.1483701171875, 0.1493987274169922, 0.14844192504882814, 0.14801837158203124, 0.14762471008300782, 0.14702188110351563, 0.14848345947265626, 0.14958409118652344, 0.14734915161132814, 0.14943206787109375, 0.14713548278808594, 0.14738394165039062, 0.1484449920654297, 0.14857887268066405, 0.14798460388183593, 0.1500403594970703, 0.1471443786621094, 0.157739013671875, 0.14625526428222657, 0.14730096435546874, 0.14523155212402344, 0.14586297607421875, 0.14646885681152344, 0.15389430236816407, 0.14810812377929689, 0.14865177917480468, 0.14545510864257813, 0.1459568634033203, 0.14678118896484374, 0.14951321411132812, 0.14969378662109376, 0.14967196655273438, 0.14652275085449218, 0.146429443359375, 0.14754412841796874, 0.14606150817871094, 0.14944248962402343, 0.15077203369140624, 0.14768975830078124, 0.14713536071777344, 0.14652297973632813, 0.1462025604248047, 0.1475850830078125, 0.14964041137695314, 0.14925868225097655, 0.14944496154785156, 0.14662185668945313, 0.1462395477294922, 0.14750338745117186, 0.1484638671875, 0.1490513916015625, 0.15031826782226562, 0.14733544921875, 0.14775144958496095, 0.14702386474609375, 0.14782669067382812, 0.148791015625, 0.14926876831054686, 0.14798643493652344, 0.1500037078857422, 0.14678016662597657, 0.14770314025878906, 0.14914802551269532, 0.1483410186767578, 0.14852272033691405, 0.14906600952148438, 0.14852854919433595, 0.14907656860351562, 0.14708122253417968, 0.14872735595703124, 0.1490979766845703, 0.1485731201171875, 0.14914968872070314, 0.14837554931640626, 0.14685600280761718, 0.14935443115234376, 0.14855081176757812, 0.1491198425292969, 0.14914764404296876, 0.1484083251953125, 0.15698739624023436, 0.1463190155029297, 0.14816015625, 0.1452280578613281, 0.1454698486328125, 0.14778080749511718, 0.15338783264160155, 0.148872314453125, 0.14789926147460938, 0.14662246704101561, 0.14626815795898437, 0.14814002990722655, 0.14752153015136718, 0.15078604125976564, 0.1497046661376953, 0.14700108337402343, 0.14695452880859375, 0.14835098266601562, 0.1465528259277344, 0.14892031860351562, 0.15001365661621094, 0.148453125, 0.14939535522460937, 0.1458305206298828, 0.147615234375, 0.1497912292480469, 0.14783247375488281, 0.1486422119140625, 0.15010809326171876, 0.14765802001953124, 0.14784707641601563, 0.14703085327148438, 0.14873190307617187, 0.15023922729492187, 0.1482073211669922, 0.14931936645507812, 0.14695074462890625, 0.1481719055175781, 0.14858438110351563, 0.14817170715332031, 0.14839132690429688, 0.15061024475097656, 0.14844342041015626, 0.14807472229003907, 0.14878448486328125, 0.147706298828125, 0.14908546447753906, 0.1482872314453125, 0.1488506622314453, 0.15016563415527343, 0.14749990844726563, 0.14795529174804686, 0.14851461791992188, 0.1486037139892578, 0.14991500854492187, 0.14820147705078124, 0.1487445068359375, 0.14731590270996095, 0.147917724609375, 0.14872108459472655, 0.14986502075195313, 0.1490370635986328, 0.1496309814453125, 0.15805815124511718, 0.14560710144042968, 0.1467632598876953, 0.14735002136230468, 0.14718975830078124, 0.146513916015625, 0.15251251220703124, 0.14778976440429686, 0.1474512023925781, 0.14939622497558594, 0.14654197692871093, 0.14752616882324218, 0.1492071075439453, 0.14929075622558594, 0.1484126739501953, 0.1469706268310547, 0.14818861389160157, 0.14991593933105468, 0.147013916015625, 0.1480990753173828, 0.15048854064941405, 0.14776963806152343, 0.14847821044921874, 0.1482629089355469, 0.14788995361328125, 0.14904920959472656, 0.14798883056640624, 0.14853453063964844, 0.15028915405273438, 0.14759730529785156, 0.14849842834472657, 0.14829977416992188, 0.14836531066894532, 0.1494530487060547, 0.14814183044433593, 0.1485455322265625, 0.14985011291503905, 0.1477181396484375, 0.14847795104980469, 0.14866998291015626, 0.14788861083984375, 0.14911692810058594, 0.14912307739257813, 0.1486840057373047, 0.14943650817871093, 0.14737274169921874, 0.14937692260742189, 0.14788009643554687, 0.14844511413574218, 0.15067123413085937, 0.148555908203125, 0.14955711364746094, 0.1474397430419922, 0.14872157287597657, 0.15056291198730468, 0.14832025146484376, 0.1486929931640625, 0.14895513916015626, 0.1483014678955078, 0.15027235412597656, 0.14828924560546874, 0.14915525817871095, 0.14872198486328125, 0.15993215942382813, 0.14751708984375, 0.14737619018554687, 0.14643898010253906, 0.14692965698242189, 0.14870527648925783, 0.15146803283691407, 0.1508106231689453, 0.14653599548339843, 0.14792678833007813, 0.14833261108398438, 0.14645660400390625, 0.1486231689453125, 0.15122511291503907, 0.14828544616699219, 0.14971856689453125, 0.14693621826171874, 0.14740486145019532, 0.14841036987304687, 0.1483357391357422, 0.15048361206054686, 0.14838829040527343, 0.1484613494873047, 0.14987858581542968, 0.14722682189941405, 0.1478544616699219, 0.1494486083984375, 0.1489008331298828, 0.15049955749511718, 0.14792291259765625, 0.14841976928710937, 0.1482860870361328, 0.1481871337890625, 0.15021670532226564, 0.14863526916503905, 0.14864012145996094, 0.14857420349121095, 0.14819532775878907, 0.14851890563964842, 0.1487337646484375, 0.1484781494140625, 0.1506299591064453, 0.14853575134277344, 0.14922752380371093, 0.14804893493652344, 0.14816458129882812, 0.15020541381835936, 0.1488690185546875, 0.14906381225585938, 0.14823628234863281, 0.14857830810546874, 0.1500584259033203, 0.1488143310546875, 0.14919686889648437, 0.14810931396484375, 0.1489304656982422, 0.14990911865234374, 0.1488056640625, 0.14905699157714844, 0.14861817932128907, 0.14947535705566406, 0.14953855895996093, 0.14837910461425782, 0.16095333862304687, 0.14748570251464843, 0.14649932861328124, 0.14717747497558595, 0.14834857177734376, 0.1456494445800781, 0.15342060852050782, 0.14872166442871093, 0.1476091766357422, 0.14962693786621092, 0.14657160949707032, 0.1472184295654297, 0.14970675659179689, 0.14991352844238282, 0.1497290496826172, 0.14698687744140626, 0.14824412536621093, 0.1495392608642578, 0.14683782958984376, 0.14890412902832031, 0.14968608093261718, 0.14815635681152345, 0.15051712036132814, 0.147901123046875, 0.14726499938964843, 0.14922921752929688, 0.14900070190429687, 0.14986483764648437, 0.14855760192871093, 0.14835952758789062, 0.14913690185546874, 0.14779592895507812, 0.1493282928466797, 0.14874188232421875, 0.1486168975830078, 0.14991007995605468, 0.14843904113769532, 0.1491183624267578, 0.14833721923828125, 0.1489775390625, 0.14949392700195313, 0.14821171569824218, 0.14970841979980468, 0.1482710418701172, 0.1488501739501953, 0.14969952392578126, 0.14888345336914063, 0.14876057434082032, 0.14870527648925783, 0.1490145263671875, 0.1499129638671875, 0.14834136962890626, 0.14980709838867187, 0.1486069793701172, 0.14851887512207032, 0.14970474243164061, 0.14883634948730468, 0.15015936279296874, 0.14893670654296876, 0.1485701141357422, 0.14867625427246095, 0.14870358276367188, 0.15068365478515625, 0.15983421325683594, 0.1475067901611328, 0.14824703979492188, 0.14649754333496093, 0.14741299438476563, 0.14839590454101562, 0.15120375061035157, 0.15081289672851564, 0.14701158142089843, 0.14820565795898438, 0.14740879821777345, 0.14737788391113282, 0.14829977416992188, 0.1518011474609375, 0.1477840576171875, 0.14971731567382812, 0.14720643615722656, 0.14742323303222657, 0.14916812133789062, 0.14943846130371094, 0.1512489013671875, 0.14784259033203126, 0.14839447021484375, 0.14763357543945313, 0.14719418334960938, 0.14937635803222657, 0.15040777587890625, 0.14934255981445313, 0.1489320068359375, 0.14788652038574218, 0.1487787170410156, 0.14705094909667968, 0.14965475463867187, 0.15120005798339844, 0.14869961547851562, 0.15063040161132812, 0.1474949188232422, 0.14834072875976562, 0.14789427185058593, 0.150466552734375, 0.15071846008300782, 0.1488343048095703, 0.14881129455566405, 0.1471453399658203, 0.14837539672851563, 0.1502578582763672, 0.14976937866210938, 0.14887388610839844, 0.14923139953613282, 0.14886729431152343, 0.14908758544921874, 0.14841923522949219, 0.1494950714111328, 0.14941462707519532, 0.1492295684814453, 0.14889503479003907, 0.14904595947265625, 0.14950726318359375, 0.14831494140625, 0.14960435485839843, 0.14936679077148438, 0.14997299194335936, 0.14920399475097657, 0.15953305053710937, 0.14723043823242188, 0.14750968933105468, 0.14654812622070312, 0.14725692749023436, 0.148378662109375, 0.15187120056152345, 0.1507197723388672, 0.14664691162109375, 0.14829779052734374, 0.14833689880371093, 0.1466343994140625, 0.14940269470214843, 0.1504930877685547, 0.14905958557128907, 0.14892236328125, 0.14779705810546875, 0.14837446594238282, 0.1477242889404297, 0.14979644775390624, 0.15104365539550782, 0.14844195556640624, 0.1482395477294922, 0.14779061889648437, 0.14775299072265624, 0.14894688415527343, 0.1495347900390625, 0.1491763916015625, 0.14946870422363281, 0.147685791015625, 0.14830738830566406, 0.1484125061035156, 0.14931709289550782, 0.15089881896972657, 0.14942912292480467, 0.14970675659179689, 0.146998779296875, 0.14823065185546874, 0.15011839294433593, 0.1492991943359375, 0.14949334716796875, 0.14953712463378907, 0.14853330993652344, 0.14871128845214843, 0.1482254333496094, 0.14880216979980468, 0.14944674682617187, 0.1495451202392578, 0.150596923828125, 0.14859494018554686, 0.150186279296875, 0.14766831970214844, 0.14924656677246093, 0.14919480895996093, 0.1499402313232422, 0.15055258178710937, 0.14867251586914063, 0.14881190490722657, 0.14751731872558593, 0.14920256042480468, 0.15068588256835938, 0.15008607482910155, 0.15136093139648438, 0.15862098693847657, 0.14686630249023438, 0.1481856689453125, 0.14656716918945312, 0.1464617919921875, 0.14816569519042969, 0.1531820526123047, 0.14896307373046874, 0.1480133056640625, 0.14709735107421876, 0.1475536346435547, 0.14741334533691405, 0.14935478210449218, 0.1525148468017578, 0.14903910827636718, 0.14906777954101563, 0.14587289428710937, 0.14798841857910155, 0.14929046630859374, 0.1497379913330078, 0.14989935302734375, 0.149032958984375, 0.14734693908691407, 0.14878115844726564, 0.14779023742675781, 0.14837350463867188, 0.1515166473388672, 0.14926626586914063, 0.1508338623046875, 0.1469246368408203, 0.14834918212890624, 0.14894966125488282, 0.14850662231445313, 0.1506078643798828, 0.1495548553466797, 0.14880546569824218, 0.14855833435058594, 0.14803135681152343, 0.1485489959716797, 0.14964198303222656, 0.14907725524902343, 0.15048069763183594, 0.14822898864746092, 0.14866233825683595, 0.1474140167236328, 0.14947021484375, 0.15194090270996094, 0.14911509704589843, 0.14987059020996094, 0.14783238220214845, 0.1486299591064453, 0.1496678466796875, 0.14896258544921875, 0.15022265625, 0.14943904113769532, 0.14874453735351562, 0.14895513916015626, 0.1489469451904297, 0.14966989135742187, 0.14976141357421874, 0.1493059844970703, 0.15016978454589844, 0.14903482055664063, 0.15973487854003907, 0.14637324523925782, 0.14639094543457032, 0.14741127014160157, 0.14830918884277344, 0.14672773742675782, 0.15436618041992187, 0.14774864196777343, 0.14762745666503907, 0.14828729248046876, 0.1473115234375, 0.14725436401367187, 0.15095639038085937, 0.1503678741455078, 0.15055474853515624, 0.14770399475097656, 0.1484169921875, 0.14728707885742187, 0.14742947387695313, 0.15051046752929687, 0.15043075561523436, 0.1493553924560547, 0.14874227905273438, 0.14767417907714844, 0.14769244384765626, 0.14814413452148437, 0.14986367797851563, 0.1510996551513672, 0.14797013854980468, 0.14934672546386718, 0.14725030517578125, 0.14796890258789064, 0.1509396514892578, 0.1497681884765625, 0.15033958435058595, 0.14846937561035156, 0.14851036071777343, 0.14906851196289062, 0.14833868408203124, 0.14983576965332032, 0.14883021545410155, 0.15006271362304688, 0.14914802551269532, 0.14860858154296874, 0.14947573852539062, 0.14853289794921876, 0.1492545623779297, 0.14992778015136718, 0.14949594116210937, 0.15028633117675783, 0.14834063720703125, 0.14954505920410155, 0.148279296875, 0.1499334716796875, 0.1506185302734375, 0.14935250854492188, 0.150136962890625, 0.14760557556152343, 0.14960633850097657, 0.14959222412109374, 0.1496629180908203, 0.1509547882080078, 0.14948953247070312]",tokens/s,6.7222525279139616,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,824.107008,554.631168,0.0,159.383552,142.313472,s,1,8.16002978515625,8.16002978515625,0.0,8.16002978515625,8.16002978515625,8.16002978515625,8.16002978515625,[8.16002978515625],,kWh,2.1358915637529205e-05,2.3453210161104453e-06,8.320006655987422e-06,3.2024243309627075e-05,,MB,1142.02624,628.031488,0.0,220.20096,185.324544,s,18,0.20789641571044926,0.011549800872802735,0.00010180969120244798,0.011525103569030762,0.011686764812469483,0.01177346272468567,0.011777770700454711,"[0.011513312339782715, 0.011650015830993652, 0.011535327911376953, 0.011778847694396972, 0.01148038387298584, 0.011480416297912598, 0.011419615745544433, 0.011519935607910156, 0.01162441635131836, 0.011513759613037109, 0.011450624465942383, 0.011772512435913086, 0.011460927963256836, 0.011565407752990723, 0.011571999549865722, 0.011421536445617676, 0.011530271530151367, 0.011607104301452637]",tokens/s,22164.884297081193,kWh,3.491230954794669e-07,3.850182628413345e-08,1.9269818711959725e-07,5.803231088831976e-07,tokens/kWh,441133561.7715776,MB,1153.683456,632.225792,0.0,224.395264,185.327104,s,18,10.093456604003906,0.5607475891113282,0.0033757969675585003,0.5597496948242187,0.5635010986328125,0.5675152709960938,0.5706674096679688,"[0.5616351318359375, 0.5596442260742187, 0.5714554443359375, 0.5620497436523437, 0.5597603759765625, 0.559658935546875, 0.5604142456054687, 0.5586361083984375, 0.558799560546875, 0.55605615234375, 0.5620787353515625, 0.5601438598632813, 0.5668199462890625, 0.5591299438476562, 0.5607298583984375, 0.5581077880859375, 0.5585975341796875, 0.559739013671875]",tokens/s,112.35001491462904,kWh,1.649760427512293e-05,1.8194083952481014e-06,6.559899807548376e-06,2.4876912477919406e-05,tokens/kWh,2532468.6114451867,,s,1134,10.083655643463134,0.008892112560373136,0.00027634942801051267,0.00885923194885254,0.009003254222869874,0.009087467432022095,0.009776603565216065,"[0.008827327728271484, 0.00877177619934082, 0.008989760398864746, 0.009033984184265136, 0.008794655799865722, 0.008816479682922363, 0.008876352310180665, 0.008869888305664063, 0.008881343841552734, 0.008792767524719238, 0.008884160041809083, 0.008835264205932617, 0.008729920387268067, 0.008841119766235351, 0.00900380802154541, 0.008854592323303222, 0.008901568412780761, 0.008850751876831054, 0.008829376220703126, 0.009147647857666016, 0.00880742359161377, 0.00877126407623291, 0.008735039710998534, 0.008696000099182128, 0.008687423706054688, 0.00863385581970215, 0.008667648315429688, 0.008566783905029298, 0.008638463973999023, 0.009177087783813476, 0.008740927696228027, 0.008839103698730469, 0.009045951843261719, 0.008806464195251464, 0.008869888305664063, 0.009034815788269044, 0.008768447875976563, 0.00900607967376709, 0.00897107219696045, 0.008800224304199218, 0.00881715202331543, 0.008860992431640626, 0.008790528297424317, 0.009363360404968261, 0.009117695808410644, 0.009712639808654786, 0.009779359817504883, 0.009066656112670898, 0.008972991943359375, 0.00894927978515625, 0.008993375778198242, 0.008892288208007812, 0.008949312210083007, 0.00884342384338379, 0.008926912307739258, 0.008872511863708497, 0.008917247772216797, 0.008847135543823242, 0.00900432014465332, 0.008907711982727051, 0.00882256031036377, 0.008921088218688965, 0.008963232040405273, 0.008923359870910644, 0.00890345573425293, 0.00894156837463379, 0.00889241600036621, 0.008781824111938476, 0.00889241600036621, 0.009076607704162598, 0.008878208160400391, 0.008916576385498047, 0.00904643154144287, 0.00897433567047119, 0.008921088218688965, 0.010473471641540527, 0.008947711944580078, 0.008829119682312012, 0.008828512191772461, 0.00885372829437256, 0.008855551719665527, 0.008859647750854491, 0.00886905574798584, 0.008876864433288573, 0.008878016471862793, 0.008851327896118165, 0.008784064292907715, 0.008843551635742188, 0.00886684799194336, 0.00877184009552002, 0.008739263534545898, 0.00877184009552002, 0.008840000152587891, 0.008836031913757324, 0.008847135543823242, 0.008880640029907226, 0.008998623847961426, 0.008953599929809571, 0.008887968063354491, 0.008864224433898926, 0.00876966381072998, 0.00881868839263916, 0.008720383644104004, 0.008717823982238769, 0.008655136108398437, 0.008669343948364258, 0.008658335685729981, 0.008698080062866211, 0.009064895629882812, 0.008689663887023925, 0.008834303855895996, 0.008755968093872071, 0.008759008407592774, 0.008804191589355468, 0.008732928276062011, 0.00879190444946289, 0.008806719779968261, 0.008810527801513672, 0.00890067195892334, 0.008879648208618165, 0.008845408439636231, 0.008947903633117676, 0.008852864265441894, 0.008858048439025878, 0.008844736099243165, 0.00905673599243164, 0.008944607734680176, 0.008925439834594727, 0.009542880058288574, 0.008918784141540527, 0.008882687568664551, 0.00898691177368164, 0.008865792274475098, 0.008889535903930663, 0.008882975578308106, 0.008945440292358399, 0.008851712226867675, 0.00890060806274414, 0.008814592361450196, 0.008795455932617187, 0.00886240005493164, 0.008929023742675781, 0.008859135627746583, 0.008981247901916505, 0.01564243221282959, 0.008941760063171387, 0.00898252773284912, 0.009140255928039551, 0.0088406400680542, 0.00885750389099121, 0.008892959594726562, 0.008814080238342285, 0.008845824241638184, 0.008867936134338379, 0.008824576377868652, 0.008841183662414551, 0.008964384078979492, 0.00883407974243164, 0.008981472015380859, 0.008882176399230958, 0.008962047576904298, 0.008861696243286133, 0.008931679725646973, 0.008810144424438476, 0.00881884765625, 0.008891712188720704, 0.008826848030090331, 0.009091551780700684, 0.009545696258544923, 0.008978303909301757, 0.008984895706176757, 0.009049663543701172, 0.009135583877563476, 0.008991488456726074, 0.009088319778442383, 0.009071136474609375, 0.009021311759948731, 0.008978879928588868, 0.00930406379699707, 0.009080991744995118, 0.009072480201721191, 0.008873984336853028, 0.008962112426757813, 0.008892352104187012, 0.008900128364562988, 0.0088536958694458, 0.008841024398803712, 0.009269887924194337, 0.008907903671264648, 0.008943615913391113, 0.008919039726257324, 0.008972352027893066, 0.008912832260131835, 0.008937024116516113, 0.008884736061096191, 0.008996064186096192, 0.008946399688720703, 0.00901529598236084, 0.009771007537841797, 0.011583456039428711, 0.009238559722900391, 0.008893664360046388, 0.008876607894897462, 0.008851200103759766, 0.008853983879089356, 0.008824831962585449, 0.009244928359985352, 0.00882256031036377, 0.008754783630371094, 0.00873305606842041, 0.009052160263061524, 0.008803999900817872, 0.008765791893005371, 0.008787039756774903, 0.008747296333312988, 0.008759103775024413, 0.008888863563537597, 0.008784128189086913, 0.008765119552612305, 0.008917344093322755, 0.008839167594909669, 0.00910758399963379, 0.009039104461669922, 0.00901091194152832, 0.008965056419372558, 0.008880096435546875, 0.008847359657287598, 0.009020832061767578, 0.008855263710021972, 0.008776576042175292, 0.008904704093933105, 0.008880000114440918, 0.009015423774719238, 0.008990240097045899, 0.009058783531188965, 0.008992192268371581, 0.008770112037658692, 0.008802304267883301, 0.008654623985290528, 0.008637727737426758, 0.008632384300231933, 0.008718815803527832, 0.008596192359924317, 0.008613632202148438, 0.00864230442047119, 0.008566975593566895, 0.008601632118225098, 0.008706015586853027, 0.00903987216949463, 0.008644607543945313, 0.008755040168762208, 0.00872704029083252, 0.008990431785583496, 0.008894463539123536, 0.008890368461608887, 0.008816639900207519, 0.008843296051025391, 0.00880793571472168, 0.008843744277954101, 0.00882688045501709, 0.00881868839263916, 0.008865792274475098, 0.008790016174316406, 0.008843296051025391, 0.008754976272583008, 0.008857791900634765, 0.008730624198913574, 0.008810720443725585, 0.008734496116638183, 0.008731679916381836, 0.008725472450256348, 0.008817888259887696, 0.008794912338256836, 0.008800607681274414, 0.008943264007568359, 0.00895792007446289, 0.009414688110351562, 0.008965888023376466, 0.009216256141662597, 0.00927945613861084, 0.009127967834472657, 0.009128191947937011, 0.009047200202941894, 0.008927840232849121, 0.008790016174316406, 0.009003007888793945, 0.008970239639282226, 0.008945664405822755, 0.009082079887390137, 0.00897103977203369, 0.008902303695678711, 0.008871711730957032, 0.009026111602783203, 0.008838879585266112, 0.008675104141235351, 0.008798815727233887, 0.008785344123840333, 0.008810303688049316, 0.008805055618286133, 0.008773599624633789, 0.008855551719665527, 0.008751104354858399, 0.008803808212280273, 0.008859423637390137, 0.008760064125061036, 0.008898783683776855, 0.008922431945800781, 0.008839008331298828, 0.00889731216430664, 0.008777567863464356, 0.008803359985351563, 0.00882540798187256, 0.008745375633239747, 0.00871628761291504, 0.008738816261291504, 0.008753439903259278, 0.009354751586914062, 0.008805791854858398, 0.009441375732421875, 0.0089303035736084, 0.009033439636230468, 0.008884063720703125, 0.008892576217651367, 0.00885366439819336, 0.008826623916625977, 0.008822431564331054, 0.008878527641296386, 0.008859904289245605, 0.008840991973876952, 0.00884937572479248, 0.008822848320007324, 0.008826175689697265, 0.008810336112976075, 0.008857824325561524, 0.00889913558959961, 0.00886963176727295, 0.008868127822875977, 0.008960127830505371, 0.008856896400451661, 0.008800992012023926, 0.008933247566223145, 0.00894108772277832, 0.008915295600891113, 0.009019647598266602, 0.008843071937561035, 0.009033727645874023, 0.009057696342468263, 0.008942399978637695, 0.008988063812255859, 0.008950143814086915, 0.008949695587158203, 0.0089682559967041, 0.008946751594543457, 0.008973504066467285, 0.008947296142578125, 0.008927392005920411, 0.008841216087341308, 0.00875449562072754, 0.008785759925842285, 0.008739839553833008, 0.00876527976989746, 0.00869164752960205, 0.00879417610168457, 0.008816096305847168, 0.008792767524719238, 0.008768511772155761, 0.008774208068847657, 0.00879967975616455, 0.008784735679626465, 0.008750111579895019, 0.008845888137817382, 0.008726335525512696, 0.00882534408569336, 0.00875539207458496, 0.008730560302734376, 0.008805631637573242, 0.008881152153015137, 0.008762944221496581, 0.008781824111938476, 0.008816736221313477, 0.008816543579101563, 0.008964096069335938, 0.008833024024963379, 0.008870207786560058, 0.00882470417022705, 0.00885331153869629, 0.008822784423828126, 0.009063776016235352, 0.00885212802886963, 0.008962047576904298, 0.008943072319030762, 0.008886048316955567, 0.008929471969604492, 0.008898880004882813, 0.00895740795135498, 0.008868767738342285, 0.008843135833740235, 0.008888480186462402, 0.009006943702697754, 0.008996864318847657, 0.008945247650146485, 0.008948479652404785, 0.009033087730407716, 0.008877759933471679, 0.008829536437988282, 0.00882688045501709, 0.008742143630981445, 0.00882259178161621, 0.008837663650512695, 0.008807007789611816, 0.008851231575012207, 0.008806431770324707, 0.00889241600036621, 0.0088056640625, 0.008794783592224121, 0.008788064002990722, 0.008822751998901366, 0.008876031875610351, 0.008882176399230958, 0.009013248443603515, 0.00885756778717041, 0.008829312324523926, 0.0087807035446167, 0.008901375770568848, 0.00882688045501709, 0.008787967681884766, 0.008830656051635742, 0.009017663955688477, 0.009046015739440917, 0.008798272132873534, 0.009377440452575684, 0.009037983894348145, 0.008846847534179688, 0.008891008377075195, 0.008884223937988281, 0.008809696197509766, 0.0088307523727417, 0.008782848358154297, 0.0090316801071167, 0.008933664321899414, 0.008892127990722656, 0.008880127906799316, 0.008865023612976074, 0.008858367919921875, 0.008848992347717285, 0.008860063552856446, 0.008855487823486328, 0.008915007591247559, 0.008814208030700684, 0.008803008079528808, 0.00877667236328125, 0.00886678409576416, 0.008859392166137695, 0.008979999542236328, 0.008874272346496582, 0.0088536958694458, 0.008996864318847657, 0.008930591583251954, 0.008913311958312988, 0.008812959671020509, 0.008824735641479493, 0.008816639900207519, 0.008822400093078614, 0.008834815979003907, 0.008777471542358398, 0.008803135871887207, 0.008816703796386719, 0.008827103614807128, 0.00881436824798584, 0.008751456260681151, 0.008793760299682617, 0.008722432136535644, 0.008806400299072266, 0.008835071563720704, 0.008831328392028808, 0.008854559898376465, 0.008728799819946289, 0.008886688232421875, 0.008867136001586914, 0.008954400062561036, 0.008923295974731446, 0.008923135757446288, 0.008882176399230958, 0.008880127906799316, 0.008884287834167481, 0.008819840431213378, 0.008980319976806641, 0.00883187198638916, 0.008836576461791992, 0.008835712432861327, 0.00880844783782959, 0.00892518424987793, 0.008855263710021972, 0.008912384033203125, 0.008897215843200683, 0.008822879791259765, 0.00883523178100586, 0.008930879592895508, 0.008825119972229003, 0.008970239639282226, 0.008828927993774414, 0.008879903793334962, 0.008900832176208497, 0.00891808032989502, 0.00899350357055664, 0.008906304359436035, 0.008827648162841796, 0.008885951995849609, 0.008921088218688965, 0.008865311622619628, 0.008815072059631347, 0.008840736389160156, 0.008843232154846191, 0.008816991806030273, 0.008838399887084961, 0.00878275203704834, 0.008769536018371582, 0.008765439987182617, 0.008830207824707031, 0.008827327728271484, 0.008775296211242676, 0.008790719985961913, 0.008804512023925782, 0.008855392456054687, 0.008845312118530273, 0.008820735931396484, 0.008801695823669433, 0.008860095977783203, 0.00881065559387207, 0.008733856201171874, 0.008751520156860352, 0.008747776031494141, 0.008746432304382325, 0.00874726390838623, 0.008765439987182617, 0.008765439987182617, 0.008773632049560547, 0.008869888305664063, 0.009381600379943848, 0.008839232444763184, 0.008732895851135253, 0.009042176246643066, 0.008697600364685058, 0.008697855949401855, 0.008732864379882813, 0.008706944465637207, 0.008804384231567383, 0.008892576217651367, 0.009902848243713378, 0.00893727970123291, 0.008773695945739747, 0.00888435173034668, 0.008863391876220704, 0.008838720321655273, 0.008897024154663086, 0.00892460823059082, 0.00887440013885498, 0.008853119850158691, 0.00886070442199707, 0.008898336410522461, 0.008964384078979492, 0.008924896240234376, 0.008889439582824708, 0.008935808181762695, 0.008935711860656738, 0.00889030361175537, 0.008878399848937988, 0.008893952369689942, 0.008876383781433106, 0.008824480056762696, 0.008828927993774414, 0.00879372787475586, 0.008812640190124512, 0.00882697582244873, 0.009654175758361817, 0.008836000442504884, 0.008877311706542969, 0.008890848159790039, 0.008837120056152344, 0.008845024108886719, 0.008800224304199218, 0.008824895858764649, 0.008923392295837402, 0.008871935844421386, 0.008851455688476563, 0.00880844783782959, 0.008849056243896484, 0.008798144340515136, 0.00889510440826416, 0.008784735679626465, 0.00875820827484131, 0.008759296417236329, 0.008685248374938965, 0.008675904273986816, 0.008654591560363769, 0.008642239570617676, 0.008563008308410645, 0.008574591636657716, 0.008611264228820801, 0.008561599731445312, 0.008588543891906738, 0.00865187168121338, 0.008712127685546875, 0.008658368110656739, 0.008655136108398437, 0.0086364164352417, 0.008722496032714844, 0.008745247840881348, 0.00877952003479004, 0.009174943923950196, 0.008886272430419923, 0.008904704093933105, 0.008889535903930663, 0.00885638427734375, 0.008840928077697754, 0.008956255912780762, 0.008970175743103027, 0.008830975532531739, 0.00883407974243164, 0.008775775909423827, 0.008915840148925781, 0.008893728256225586, 0.008901023864746093, 0.008967488288879395, 0.008889344215393067, 0.008828031539916991, 0.008864319801330567, 0.00886195182800293, 0.008783935546875, 0.008846336364746094, 0.008839167594909669, 0.008792063713073731, 0.008781375885009766, 0.008868288040161133, 0.008781824111938476, 0.008811903953552245, 0.008880191802978516, 0.010013504028320313, 0.0090764799118042, 0.008869791984558105, 0.00885155200958252, 0.00896985626220703, 0.0088307523727417, 0.008839776039123535, 0.008865792274475098, 0.008890368461608887, 0.008853631973266602, 0.008830016136169433, 0.009368288040161132, 0.00883516788482666, 0.008863743782043456, 0.008859647750854491, 0.008914943695068359, 0.008871935844421386, 0.008955007553100585, 0.00888044834136963, 0.00879379177093506, 0.008827775955200194, 0.008833024024963379, 0.00937382411956787, 0.008836159706115723, 0.008903103828430176, 0.008845696449279785, 0.00885331153869629, 0.00876972770690918, 0.008732768058776855, 0.008755104064941406, 0.008815839767456054, 0.008774335861206056, 0.008748127937316894, 0.008746047973632813, 0.008850879669189453, 0.0089169282913208, 0.009007679939270019, 0.008895551681518555, 0.008997983932495117, 0.008939359664916992, 0.008804351806640624, 0.008865280151367188, 0.008886783599853516, 0.008782943725585938, 0.008885472297668458, 0.00883619213104248, 0.008804960250854492, 0.008890368461608887, 0.008799712181091308, 0.008917247772216797, 0.008876128196716309, 0.008791359901428222, 0.008860671997070312, 0.009241855621337891, 0.008879872322082519, 0.009955424308776856, 0.008900927543640138, 0.008900287628173829, 0.008841407775878907, 0.00884921646118164, 0.00890595245361328, 0.008810591697692872, 0.00888697624206543, 0.008863743782043456, 0.008838239669799805, 0.008931391716003418, 0.00882534408569336, 0.008855392456054687, 0.008829216003417968, 0.00878764820098877, 0.008999456405639648, 0.00881049633026123, 0.008799615859985352, 0.008773887634277344, 0.008810879707336426, 0.008748064041137696, 0.008784319877624512, 0.008806015968322754, 0.008786687850952149, 0.008855936050415038, 0.008893631935119628, 0.00881276798248291, 0.008849791526794434, 0.008828096389770508, 0.008818976402282715, 0.00881436824798584, 0.008860416412353515, 0.008894368171691895, 0.008775936126708985, 0.009041664123535156, 0.008962143898010254, 0.008859647750854491, 0.009093119621276855, 0.008970335960388183, 0.008937376022338867, 0.008902655601501466, 0.008928863525390626, 0.008958111763000489, 0.00895030403137207, 0.008988384246826173, 0.010232000350952149, 0.008892576217651367, 0.008858976364135742, 0.008874303817749023, 0.008875424385070801, 0.008856160163879395, 0.008924511909484864, 0.008839839935302735, 0.008869888305664063, 0.008817791938781738, 0.008727423667907715, 0.008751104354858399, 0.008724160194396972, 0.008778335571289063, 0.008918399810791016, 0.008838656425476075, 0.008837984085083008, 0.008832703590393067, 0.008816767692565917, 0.00887168025970459, 0.008837727546691895, 0.009317631721496583, 0.008844160079956054, 0.009297792434692382, 0.009973759651184083, 0.008919039726257324, 0.009072768211364745, 0.01081926441192627, 0.008917183876037597, 0.008890463829040527, 0.008869791984558105, 0.009002304077148438, 0.009071295738220214, 0.008887840270996093, 0.008884703636169434, 0.008898752212524414, 0.00892848014831543, 0.008890975952148437, 0.008955295562744141, 0.008892576217651367, 0.00892899227142334, 0.008872672080993653, 0.008902655601501466, 0.008980480194091797, 0.008855551719665527, 0.008841055870056153, 0.008861215591430664, 0.008888352394104003, 0.008923744201660156, 0.008828927993774414, 0.008857407569885255, 0.00878611183166504, 0.008794143676757812, 0.00881065559387207, 0.008836000442504884, 0.008846591949462891, 0.008909791946411132, 0.008900544166564942, 0.008886303901672363, 0.008938207626342773, 0.008948927879333496, 0.008903488159179688, 0.008890399932861328, 0.00885961627960205, 0.008855551719665527, 0.008884032249450684, 0.009906368255615234, 0.009743647575378418, 0.00913379192352295, 0.008881152153015137, 0.008935423851013183, 0.00899891185760498, 0.008962207794189454, 0.008900447845458985, 0.008872096061706542, 0.008898207664489746, 0.00886188793182373, 0.008828960418701171, 0.008913920402526856, 0.008891039848327637, 0.008870207786560058, 0.008839167594909669, 0.008871647834777832, 0.008903648376464844, 0.008906399726867676, 0.008870112419128417, 0.008886240005493163, 0.008818016052246094, 0.008794528007507324, 0.008869824409484863, 0.00886832046508789, 0.00924687957763672, 0.008782784461975097, 0.008842144012451172, 0.008867679595947265, 0.008872096061706542, 0.008788127899169922, 0.008833120346069336, 0.01009228801727295, 0.009058303833007812, 0.009010687828063964, 0.008984959602355957, 0.008883551597595215, 0.008890527725219727, 0.008862336158752442, 0.008931584358215332, 0.008836864471435547, 0.008878080368041993, 0.008847359657287598, 0.008769536018371582, 0.008867072105407716, 0.008775487899780273, 0.008724639892578125, 0.008780575752258301, 0.00879798412322998, 0.00892848014831543, 0.008897472381591796, 0.008821855545043946, 0.008842368125915528, 0.008841055870056153, 0.008812543869018554, 0.008824992179870605, 0.00873356819152832, 0.008737248420715332, 0.008867584228515624, 0.008801152229309082, 0.008851327896118165, 0.00886070442199707, 0.008749631881713868, 0.008811008453369141, 0.008753055572509766, 0.008822400093078614, 0.009271424293518066, 0.008763296127319336, 0.008755552291870116, 0.008720576286315918, 0.008716095924377441, 0.008766752243041992, 0.008706720352172851, 0.008763456344604492, 0.008740415573120117, 0.008831551551818848, 0.008763263702392578, 0.0089268798828125, 0.00890294361114502, 0.008881504058837891, 0.008790271759033203, 0.008802623748779296, 0.008810175895690917, 0.008839391708374023, 0.008922495841979981, 0.008874496459960938, 0.008927359580993652, 0.009014880180358887, 0.008992768287658692, 0.00888259220123291, 0.00889241600036621, 0.008988415718078614, 0.008900863647460937, 0.008935711860656738, 0.008903679847717285, 0.008944095611572266, 0.008917247772216797, 0.00887564754486084, 0.00885366439819336, 0.0089169921875, 0.009298144340515136, 0.008929408073425293, 0.00894105625152588, 0.009101056098937988, 0.008953472137451172, 0.008964991569519043, 0.00886188793182373, 0.008853440284729003, 0.008873215675354004, 0.008929056167602539, 0.008932543754577636, 0.008818079948425293, 0.008779935836791992, 0.008828831672668456, 0.00876576042175293, 0.00881161594390869, 0.008962207794189454, 0.008836864471435547, 0.008806528091430664, 0.008835647583007812, 0.008814911842346191, 0.00887388801574707, 0.009119423866271972, 0.008935968399047851, 0.008994976043701171, 0.008853280067443847, 0.008887807846069335, 0.008871711730957032, 0.008861536026000977, 0.008941887855529785, 0.00882265567779541, 0.00888092803955078, 0.008855392456054687, 0.008881376266479493, 0.009087008476257324, 0.008851936340332031, 0.008677663803100586, 0.008845312118530273, 0.00882688045501709, 0.008765631675720215, 0.008779583930969238, 0.008820735931396484, 0.008701888084411621, 0.008805600166320801, 0.0086429443359375, 0.009085247993469239, 0.008942912101745606, 0.008839008331298828, 0.008887295722961425, 0.008878080368041993, 0.00897433567047119, 0.008927231788635253, 0.008976672172546387, 0.008922271728515625, 0.00903657627105713, 0.00888748836517334, 0.008917695999145507, 0.008869471549987793, 0.00883456039428711, 0.008868672370910645, 0.008853504180908203, 0.008814240455627441, 0.008808799743652344, 0.008803584098815919, 0.008796319961547852, 0.009092960357666015, 0.008826687812805175, 0.00879097557067871, 0.008845120429992677, 0.008851167678833007, 0.008796640396118164, 0.008857631683349609, 0.008797504425048829, 0.00885750389099121, 0.008862719535827637, 0.00884006404876709, 0.008847264289855957, 0.008904800415039063, 0.008769696235656738, 0.00872316837310791, 0.00874112033843994, 0.008740799903869629, 0.008746815681457519, 0.00880844783782959, 0.008773823738098145, 0.008798015594482423, 0.0088307523727417, 0.009076959609985351, 0.008936448097229004, 0.008911104202270507, 0.008775648117065429, 0.008778047561645509, 0.008884256362915038, 0.008748671531677247, 0.008784704208374024, 0.008749055862426757, 0.008783424377441406, 0.00888649559020996, 0.008798720359802246, 0.008785408020019531, 0.00899289608001709, 0.008872032165527344, 0.008894559860229492, 0.00884931182861328, 0.008880288124084472, 0.008885215759277344, 0.008857407569885255, 0.008843263626098634, 0.008853407859802246, 0.008953951835632324, 0.008878080368041993, 0.008951680183410644, 0.008919136047363281, 0.008888256072998047, 0.008890560150146484, 0.008905632019042969, 0.008897791862487793, 0.008887519836425781, 0.008897055625915527, 0.008881888389587402, 0.008922783851623536, 0.008944255828857422, 0.008841440200805665, 0.008829024314880371, 0.008830656051635742, 0.008818752288818359, 0.008861632347106933, 0.008840767860412598, 0.008804800033569336, 0.008832320213317871, 0.008784576416015625, 0.008891488075256348, 0.008954784393310548, 0.008812543869018554, 0.008897919654846192, 0.008856191635131835, 0.008812543869018554, 0.00871769618988037, 0.008664735794067383, 0.008758560180664062, 0.008734399795532227, 0.008798272132873534, 0.008742848396301269, 0.00884761619567871, 0.008866720199584961, 0.008807264328002929, 0.009191424369812011, 0.008908127784729004, 0.008839839935302735, 0.00867302417755127, 0.0087740478515625, 0.008781503677368165, 0.008816800117492675, 0.008916576385498047, 0.00878223991394043, 0.008978272438049316, 0.008872096061706542, 0.008835391998291016, 0.008863391876220704, 0.008885919570922851, 0.008889920234680175, 0.008854016304016114, 0.008890399932861328, 0.008928576469421386, 0.00887497615814209, 0.008853504180908203, 0.008912896156311035, 0.008874239921569825, 0.00887388801574707, 0.008922207832336425, 0.008913984298706054, 0.008959839820861817, 0.0089552001953125, 0.008922016143798828, 0.008912896156311035, 0.008883999824523927, 0.009147680282592774, 0.008904671669006348, 0.008911520004272462, 0.008894559860229492, 0.008937472343444825, 0.008937536239624023, 0.008902848243713379, 0.008881279945373535, 0.008815232276916505, 0.008761343955993652, 0.008816639900207519, 0.008951199531555177, 0.008784159660339356, 0.00882310390472412, 0.008828927993774414, 0.008763392448425293, 0.008720383644104004, 0.008760736465454102, 0.008804832458496094, 0.00876147174835205, 0.008851263999938965, 0.008941791534423827, 0.008755167961120606, 0.008859328269958496, 0.008759615898132324, 0.008779264450073243, 0.008715904235839844, 0.008708703994750976, 0.008765343666076661, 0.008648736000061035, 0.008740511894226075, 0.008795999526977539, 0.008815168380737304, 0.008880288124084472, 0.008869664192199707, 0.00879036808013916, 0.008775263786315919, 0.008773632049560547, 0.00882703971862793, 0.008923392295837402, 0.008920639991760253, 0.00894438362121582, 0.009024864196777343, 0.008912351608276367, 0.008941632270812988, 0.00913923168182373, 0.009004128456115723, 0.0090033597946167, 0.009023776054382324, 0.008986687660217286, 0.008923135757446288, 0.008966143608093263, 0.00893238353729248, 0.00897532844543457, 0.009011103630065917]",tokens/s,112.45921519891753,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,2133.262336,4618.911744,0.0,4223.664128,4030.321664,s,1,12.9877333984375,12.9877333984375,0.0,12.9877333984375,12.9877333984375,12.9877333984375,12.9877333984375,[12.9877333984375],,kWh,0.00016406305547914143,1.8089961443284726e-05,6.06139373799941e-05,0.00024276695430242023,,MB,1618.501632,5021.564928,0.0,4613.7344,4385.21344,s,10,4.788660247802735,0.4788660247802735,0.0014680126505054807,0.47900039672851563,0.4798681488037109,0.4809925308227539,0.4818920364379883,"[0.48211691284179686, 0.47681765747070315, 0.47686187744140623, 0.4785756530761719, 0.47771749877929687, 0.4790259094238281, 0.4793768615722656, 0.47957470703125, 0.4796182861328125, 0.47897488403320315]",tokens/s,534.5962894683643,kWh,1.4391370430555777e-05,1.5864802138101464e-06,9.506211308665405e-06,2.5484061953031327e-05,tokens/kWh,10045494.335707689,MB,1622.540288,5023.66208,0.0,4615.831552,4385.216,s,10,42.01978515625,4.201978515625,0.015899671911357487,4.197839599609375,4.223855419921875,4.224817602539063,4.225587348632813,"[4.17725537109375, 4.2219921875, 4.19841455078125, 4.18525537109375, 4.20359521484375, 4.1972646484375, 4.19577880859375, 4.22577978515625, 4.1908076171875, 4.2236416015625]",tokens/s,14.992937199877476,kWh,0.00012341991380652849,1.3614399468116394e-05,7.641430187213566e-05,0.00021344861514678058,tokens/kWh,295153.00418640464,,s,630,42.01672320556641,0.066693211437407,0.0008805868155106545,0.06655326461791992,0.0671962188720703,0.06759352226257324,0.07020866516113282,"[0.06733004760742188, 0.06611942291259766, 0.0661690902709961, 0.0663934097290039, 0.0663410873413086, 0.06653343963623047, 0.06625958251953125, 0.06640412902832031, 0.06625484466552735, 0.06584832000732421, 0.06630409240722657, 0.06601119995117187, 0.06608367919921875, 0.06639974212646485, 0.06603228759765625, 0.065834716796875, 0.06608089447021484, 0.06592511749267578, 0.06672383880615235, 0.06621091461181641, 0.06607929229736328, 0.06596012878417969, 0.06619888305664062, 0.0659648666381836, 0.06614755249023438, 0.06618192291259765, 0.06619545745849609, 0.0663796157836914, 0.06706393432617187, 0.06595587158203126, 0.0661277084350586, 0.06624066925048828, 0.06604560089111328, 0.0663280029296875, 0.0664073257446289, 0.06620569610595703, 0.0660664291381836, 0.06615039825439453, 0.065775390625, 0.06687766265869141, 0.06601455688476562, 0.06581878662109375, 0.06645171356201172, 0.0665459213256836, 0.06593360137939454, 0.06618911743164063, 0.06636124420166016, 0.06616063690185547, 0.06656368255615235, 0.06636176300048828, 0.06643465423583984, 0.06622249603271485, 0.06918348693847656, 0.06618521881103516, 0.0662194595336914, 0.06625321960449218, 0.06620700836181641, 0.06630668640136719, 0.06695756530761719, 0.06610944366455078, 0.0663552017211914, 0.06627875518798829, 0.06621849822998047, 0.06753257751464843, 0.06839542388916016, 0.06723123168945312, 0.0672542724609375, 0.07038169860839844, 0.06735715484619141, 0.06663954925537109, 0.06627350616455079, 0.06669014739990234, 0.06631053161621094, 0.06696355438232422, 0.06654966735839844, 0.06651302337646485, 0.0664970245361328, 0.0768532485961914, 0.06685078430175781, 0.06645558166503907, 0.06687744140625, 0.0668958740234375, 0.06624578857421876, 0.06671587371826172, 0.06711974334716797, 0.06640249633789062, 0.0668424301147461, 0.0667026596069336, 0.06663238525390625, 0.06668841552734375, 0.06676105499267578, 0.06669471740722656, 0.06637987518310547, 0.0668157730102539, 0.066787841796875, 0.06673235321044922, 0.0675041275024414, 0.0664430694580078, 0.06668860626220703, 0.06672239685058594, 0.06642588806152344, 0.06692758178710938, 0.0668569564819336, 0.06670134735107422, 0.06716851043701172, 0.06685052490234375, 0.06660707092285156, 0.06681398773193359, 0.06736640167236328, 0.06654412841796875, 0.06646927642822266, 0.06733001708984375, 0.06653151702880859, 0.0668635482788086, 0.06682828521728515, 0.0672314224243164, 0.06733971405029297, 0.06665510559082032, 0.06655101013183594, 0.06662252807617187, 0.0667174072265625, 0.06668643188476563, 0.06645609283447265, 0.06638082885742187, 0.06667385864257812, 0.06667801666259765, 0.06722560119628906, 0.06661865234375, 0.06682915496826172, 0.06643891143798829, 0.0664659194946289, 0.06668486022949219, 0.06678739166259766, 0.06655522918701172, 0.06686377716064452, 0.06635724639892578, 0.0663345947265625, 0.06628546905517578, 0.0662489242553711, 0.06715142059326172, 0.06626143646240235, 0.06667046356201171, 0.06646182250976562, 0.07020543670654297, 0.06638947296142578, 0.06610793304443359, 0.06632653045654296, 0.06658662414550781, 0.06656409454345703, 0.06705971527099609, 0.06667855834960937, 0.06655203247070313, 0.06631401824951172, 0.06661968231201172, 0.06661933135986328, 0.06635926055908203, 0.06634089660644531, 0.0665759048461914, 0.06662560272216797, 0.06685942077636718, 0.06672959899902343, 0.06644915008544922, 0.06636608123779297, 0.0664039077758789, 0.06704959869384766, 0.06648831939697265, 0.06688371276855469, 0.06626255798339843, 0.06608739471435547, 0.06725651550292969, 0.06648992156982422, 0.06633078765869141, 0.06660710144042968, 0.06652726745605468, 0.06621324920654297, 0.06659123229980468, 0.06628108978271484, 0.06635552215576172, 0.06612361907958984, 0.06667324829101562, 0.06641049957275391, 0.06652518463134766, 0.06631014251708985, 0.06643411254882813, 0.06617798614501953, 0.06619292449951172, 0.06615001678466798, 0.06606114959716797, 0.07067135620117188, 0.06689826965332031, 0.06632582092285157, 0.06675481414794922, 0.06619967651367188, 0.06609747314453125, 0.06597427368164062, 0.06591283416748046, 0.06622322845458985, 0.0662938232421875, 0.06596281433105469, 0.06640435028076172, 0.06732511901855469, 0.06633350372314453, 0.06681517028808594, 0.06761759948730468, 0.06689791870117187, 0.06607667541503906, 0.06650675201416016, 0.06632569885253906, 0.06619782257080079, 0.06668544006347656, 0.06620703887939453, 0.06647599792480469, 0.06831382751464844, 0.06745702362060547, 0.0664801254272461, 0.06637347412109375, 0.06633618927001952, 0.0663436508178711, 0.06669312286376954, 0.06627327728271484, 0.06643004608154297, 0.0661817626953125, 0.06599324798583985, 0.06625401306152344, 0.06646841430664062, 0.06642278289794921, 0.06614351654052734, 0.06598729705810547, 0.0661294403076172, 0.06613785552978516, 0.06588470458984375, 0.06616697692871094, 0.0665716781616211, 0.0663288345336914, 0.06655625915527344, 0.06589234924316406, 0.0662282257080078, 0.06630963134765624, 0.06619171142578124, 0.06615261077880859, 0.0662507553100586, 0.06652252960205078, 0.06618086242675782, 0.06591983795166016, 0.06589234924316406, 0.06635475158691406, 0.06626143646240235, 0.06656803131103516, 0.06664790344238282, 0.06714604949951172, 0.06677932739257812, 0.06719602966308594, 0.06748585510253906, 0.06638480377197266, 0.06644217681884766, 0.06745292663574219, 0.06677053070068359, 0.06787318420410156, 0.06834598541259766, 0.06727458953857422, 0.06661634826660157, 0.06671052551269531, 0.06659481811523438, 0.0665128936767578, 0.06643302154541016, 0.06685430145263672, 0.06648226928710937, 0.06660704040527343, 0.06692256164550782, 0.06646015930175782, 0.06654064178466797, 0.06684496307373047, 0.06665660858154297, 0.06645283508300781, 0.06698079681396485, 0.06656819152832032, 0.06718195343017579, 0.06704144287109375, 0.06684239959716796, 0.0667286376953125, 0.06673817443847656, 0.0664815673828125, 0.06674082946777343, 0.06651904296875, 0.06661491394042969, 0.06694745635986328, 0.06670336151123046, 0.0669839324951172, 0.06656409454345703, 0.06662457275390625, 0.06651996612548829, 0.06616067504882812, 0.066733154296875, 0.06675267028808594, 0.06650752258300781, 0.06673612976074218, 0.06636339569091797, 0.06635724639892578, 0.06658787536621094, 0.06633757019042968, 0.06657843017578124, 0.06672179412841797, 0.06698150634765625, 0.06648051452636719, 0.06676403045654297, 0.06668326568603515, 0.066542236328125, 0.06637948608398438, 0.06630809783935547, 0.06655705261230468, 0.06663462066650391, 0.06669020843505859, 0.06637654113769531, 0.06723696136474609, 0.06631903839111328, 0.0671297607421875, 0.0664653091430664, 0.07095123291015625, 0.06671507263183593, 0.0661279067993164, 0.0661974105834961, 0.06616143798828125, 0.06615225219726563, 0.0663695068359375, 0.06632406616210937, 0.06655449676513672, 0.06595276641845703, 0.06652339172363281, 0.06598326110839844, 0.06603142547607421, 0.06605840301513671, 0.06616473388671874, 0.0662610855102539, 0.0662034912109375, 0.06640054321289063, 0.06589561462402344, 0.06790000152587891, 0.06614169311523438, 0.06722598266601562, 0.0677623062133789, 0.06659891510009766, 0.06705766296386718, 0.06639513397216797, 0.06652825927734375, 0.06670320129394532, 0.0667772445678711, 0.06937190246582031, 0.06704742431640626, 0.06931660461425782, 0.06665328216552735, 0.06643599700927734, 0.06610739135742187, 0.06614412689208984, 0.06666214752197265, 0.06638972473144532, 0.06672764587402344, 0.06641718292236329, 0.0666096954345703, 0.06619532775878906, 0.06649839782714843, 0.06624272155761719, 0.06661529541015625, 0.06683238220214843, 0.06639584350585938, 0.06629622650146484, 0.06624195098876953, 0.06627734375, 0.0665031967163086, 0.066393310546875, 0.06630271911621094, 0.06611766052246094, 0.06646073913574219, 0.06637593841552734, 0.06618182373046876, 0.06674636840820312, 0.06603705596923828, 0.06645830535888672, 0.06620518493652344, 0.06717404937744141, 0.06664473724365234, 0.06690144348144532, 0.06643910217285157, 0.06618978881835938, 0.06619766235351562, 0.06645760345458984, 0.06641254425048829, 0.0663115234375, 0.06660099029541015, 0.06643942260742187, 0.0666382064819336, 0.06642412567138672, 0.06675475311279297, 0.06643309020996094, 0.0702099838256836, 0.06666620635986328, 0.06649446105957031, 0.06631858825683594, 0.06656412506103515, 0.06646736145019531, 0.06672227478027344, 0.06646784210205078, 0.06632243347167968, 0.06645286560058594, 0.06648076629638672, 0.06665625762939453, 0.066510498046875, 0.06700681304931641, 0.06662348937988281, 0.06675027465820313, 0.06652301025390625, 0.06751683044433594, 0.06639606475830079, 0.06688358306884766, 0.066698974609375, 0.0666770248413086, 0.06654771423339843, 0.06644070434570312, 0.0664060821533203, 0.06660163116455078, 0.0664557113647461, 0.06661145782470704, 0.06657833862304688, 0.06656156921386719, 0.06673244476318359, 0.06662518310546875, 0.06668278503417968, 0.0662911376953125, 0.06693366241455079, 0.06683853149414062, 0.06632582092285157, 0.06616291046142578, 0.06614473724365234, 0.06632991790771485, 0.0661118392944336, 0.06598220825195313, 0.06634556579589844, 0.06643711853027344, 0.06629376220703125, 0.06631231689453125, 0.06658182525634766, 0.06671366119384765, 0.0670928955078125, 0.06659081268310547, 0.06877110290527344, 0.06694156646728516, 0.06655503845214844, 0.0667779541015625, 0.06686685180664062, 0.06693513488769531, 0.06683033752441406, 0.06678083038330078, 0.06665660858154297, 0.06674022674560547, 0.07725260925292969, 0.06658662414550781, 0.06842499542236329, 0.06648291015625, 0.06623353576660156, 0.06672035217285156, 0.06703231811523437, 0.06670025634765625, 0.06642221069335938, 0.06660550689697266, 0.06680178833007812, 0.06632185363769531, 0.06621855926513671, 0.06671132659912109, 0.06695343780517578, 0.06725993347167969, 0.06676441955566406, 0.06635395050048828, 0.06737686157226562, 0.06673772430419922, 0.06628797149658203, 0.07008419036865235, 0.0666509780883789, 0.06672930908203124, 0.0666751708984375, 0.06679798126220703, 0.06634268951416016, 0.06651074981689453, 0.06670140838623047, 0.06700614166259766, 0.06787923431396484, 0.06829222106933594, 0.06702092742919921, 0.06635340881347657, 0.06668287658691406, 0.0667658233642578, 0.06675353240966797, 0.0666767349243164, 0.06658428955078124, 0.06659001922607422, 0.06719792175292968, 0.06655999755859375, 0.06642195129394532, 0.06645843505859375, 0.07015328216552734, 0.06653794860839844, 0.06620207977294922, 0.06676882934570312, 0.06677510070800781, 0.0665921630859375, 0.06691913604736328, 0.06732316589355469, 0.06670006561279297, 0.06652127838134765, 0.06654137420654296, 0.06794854736328125, 0.06684671783447266, 0.06664601898193359, 0.06657164764404297, 0.06644322967529297, 0.06636752319335938, 0.06614080047607422, 0.06656966400146484, 0.06756409454345703, 0.06613766479492188, 0.06658092498779297, 0.06622000122070312, 0.06616886138916016, 0.06634825897216796, 0.06604029083251953, 0.06658617401123047, 0.06616483306884766, 0.06731843566894531, 0.06625193786621093, 0.06622073364257812, 0.0662734375, 0.06815277099609375, 0.06752108764648437, 0.06617407989501953, 0.06634284973144532, 0.06620256042480469, 0.06600704193115234, 0.06638579559326171, 0.06624422454833985, 0.06604032135009766, 0.06645942687988281, 0.066287841796875, 0.06603119659423828, 0.06630032348632812, 0.06629785919189453, 0.06577152252197266, 0.06593724822998047, 0.06586585235595703, 0.066070556640625, 0.06599622344970703, 0.06648786926269531, 0.06589542388916016, 0.06815455627441407, 0.06633760070800782, 0.0658545913696289, 0.06611443328857422, 0.06640748596191406, 0.06603052520751954, 0.0667290267944336, 0.06658544158935546, 0.06625846099853516, 0.06627708435058594, 0.06664482879638672, 0.06816553497314454, 0.06685084533691406, 0.06669292449951172, 0.06652339172363281, 0.06692848205566407, 0.06697795104980468, 0.06765436553955079, 0.06690995025634766, 0.06685337829589844, 0.06687923431396485, 0.06650777435302735, 0.06700685119628906, 0.06674495697021485, 0.06719078063964844, 0.06664601898193359, 0.06731314849853516, 0.06703689575195312, 0.06650704193115234, 0.06681622314453126, 0.06702518463134766, 0.06692179107666016, 0.06696006774902344, 0.06703529357910157, 0.06653683471679687, 0.0667959976196289, 0.06658866882324219, 0.0672911376953125, 0.06659846496582031, 0.07059625244140624, 0.06653564453125, 0.06681862640380859, 0.06678294372558594, 0.06655744171142577, 0.06686185455322266, 0.06687334442138672, 0.06668492889404297, 0.06662067413330078, 0.06710892486572266, 0.06662624359130859, 0.0667688980102539, 0.0666269760131836, 0.06691622161865235, 0.06674230194091797, 0.06709232330322265, 0.06657724761962891, 0.06631353759765625, 0.06729593658447265, 0.06652864074707031, 0.06990911865234375, 0.06687872314453125, 0.0665771484375, 0.06677417755126953, 0.0667957763671875, 0.066943359375, 0.06725865936279297, 0.06653228759765625, 0.0667658233642578, 0.06934323120117188, 0.06936528015136718, 0.06684127807617188, 0.06689469146728516, 0.06712592315673828, 0.06661286163330078, 0.06711363220214844, 0.066779296875, 0.066864990234375, 0.06685689544677734, 0.0671744613647461, 0.06716329956054687]",tokens/s,14.994029803745788,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1034.113024,896.466944,0.0,501.219328,495.906816,s,1,8.51478515625,8.51478515625,0.0,8.51478515625,8.51478515625,8.51478515625,8.51478515625,[8.51478515625],,kWh,3.898841823337685e-05,4.293543847427892e-06,1.479251183399144e-05,5.807447391479619e-05,,MB,1263.230976,1060.0448,0.0,652.214272,602.748928,s,10,0.5001668472290038,0.050016684722900384,0.0006455890622503593,0.05000513458251953,0.05044609451293945,0.05104636001586914,0.05152657241821289,"[0.051646625518798825, 0.0493304328918457, 0.05031270217895508, 0.04941177749633789, 0.04983046340942383, 0.05011782455444336, 0.050098175048828124, 0.04931433486938477, 0.05019241714477539, 0.04991209411621094]",tokens/s,5118.292054307013,kWh,1.6843767835250786e-06,1.8575746518989712e-07,1.1239153308045111e-06,2.9940495795194866e-06,tokens/kWh,85502926.12091123,MB,1274.0608,1072.627712,0.0,664.797184,611.073536,s,10,19.49507995605469,1.9495079956054688,0.008052663444680344,1.9501111450195312,1.95942490234375,1.9622285034179687,1.9644713842773438,"[1.943477783203125, 1.950402099609375, 1.9455321044921876, 1.94429345703125, 1.9650321044921875, 1.9343731689453125, 1.9498201904296875, 1.952763916015625, 1.950583251953125, 1.9588018798828124]",tokens/s,32.31584591702778,kWh,5.653039424814239e-05,6.235005340874565e-06,2.4545577426994328e-05,8.73109770160113e-05,tokens/kWh,721558.7564487671,,s,630,19.488627748489375,0.03093432975950695,0.00047099883001466136,0.030900239944458008,0.03128377838134765,0.03153155012130737,0.03251617794036866,"[0.030343807220458985, 0.030756256103515626, 0.030570560455322266, 0.03055436706542969, 0.030871328353881837, 0.030700031280517577, 0.0306376953125, 0.030679264068603516, 0.03081222343444824, 0.03073606491088867, 0.031139360427856446, 0.03132620811462403, 0.031193216323852538, 0.031150335311889647, 0.03108710479736328, 0.031033599853515625, 0.03110268783569336, 0.031127071380615233, 0.03115804862976074, 0.031146432876586912, 0.030974239349365235, 0.031096895217895507, 0.03115001678466797, 0.030936864852905272, 0.031117408752441407, 0.03095680046081543, 0.030896703720092775, 0.03082271957397461, 0.031285247802734374, 0.03117670440673828, 0.030920703887939452, 0.03126095962524414, 0.03137212753295898, 0.030980928421020508, 0.031188512802124025, 0.031017536163330077, 0.031174495697021486, 0.030943199157714842, 0.030628223419189454, 0.03064950370788574, 0.030726783752441405, 0.031016960144042968, 0.030899295806884764, 0.030702495574951173, 0.030852127075195312, 0.030476383209228516, 0.030619808197021484, 0.030427423477172852, 0.03063974380493164, 0.03093996810913086, 0.03077292823791504, 0.030635871887207032, 0.030361408233642577, 0.03044528007507324, 0.030431648254394532, 0.030335519790649416, 0.03032899284362793, 0.030398591995239258, 0.03070742416381836, 0.03062182426452637, 0.030913471221923828, 0.03064841651916504, 0.03034339141845703, 0.029878335952758787, 0.03026527976989746, 0.030281728744506835, 0.030460895538330077, 0.03057472038269043, 0.03077884864807129, 0.030730079650878907, 0.030908384323120118, 0.031005279541015625, 0.03101487922668457, 0.030797248840332032, 0.031169055938720703, 0.03091872024536133, 0.03077324867248535, 0.030705919265747072, 0.030643903732299804, 0.030431455612182617, 0.03070876884460449, 0.030572479248046874, 0.03080691146850586, 0.03053379249572754, 0.030657440185546874, 0.03084998321533203, 0.030838783264160157, 0.030922752380371094, 0.03095043182373047, 0.030592992782592775, 0.030911487579345705, 0.030836063385009764, 0.030855199813842774, 0.03075369644165039, 0.030694751739501952, 0.030964096069335936, 0.030838783264160157, 0.03086966323852539, 0.030853023529052736, 0.030711872100830078, 0.030502784729003907, 0.030709600448608397, 0.032264350891113285, 0.03134259223937988, 0.03126275253295899, 0.03153033638000488, 0.03171622467041016, 0.03134623908996582, 0.03129567909240723, 0.03105699157714844, 0.031114143371582033, 0.031226144790649415, 0.031403743743896484, 0.031104671478271485, 0.031265312194824216, 0.031493951797485355, 0.031135744094848632, 0.03127055931091308, 0.031166816711425783, 0.03132204818725586, 0.031174720764160155, 0.03120947265625, 0.031145280838012695, 0.031247039794921876, 0.03126681518554687, 0.03108803176879883, 0.030557695388793944, 0.03103753662109375, 0.030960031509399414, 0.030689504623413084, 0.030522239685058593, 0.030468576431274413, 0.030736127853393556, 0.030843711853027343, 0.031109216690063477, 0.031102752685546874, 0.031061599731445313, 0.031260608673095706, 0.030815935134887694, 0.030866336822509766, 0.030729728698730467, 0.030974464416503908, 0.030778303146362304, 0.030837696075439455, 0.030838016510009766, 0.0310296630859375, 0.031043071746826172, 0.031097696304321288, 0.030936800003051757, 0.03071151924133301, 0.030609983444213867, 0.030631935119628906, 0.03077292823791504, 0.030665023803710938, 0.030878944396972655, 0.03068582344055176, 0.03072630310058594, 0.030885887145996094, 0.030814016342163086, 0.030638208389282228, 0.030535743713378905, 0.030629728317260744, 0.03091676712036133, 0.030863359451293947, 0.030859039306640624, 0.030869375228881835, 0.030660415649414064, 0.03094748878479004, 0.031091167449951173, 0.031217439651489258, 0.031006816864013673, 0.031031200408935547, 0.030834144592285156, 0.030691999435424805, 0.03069856071472168, 0.03181216049194336, 0.030906688690185546, 0.030663839340209963, 0.030935136795043946, 0.030845760345458984, 0.03077324867248535, 0.03074470329284668, 0.03104140853881836, 0.031016191482543944, 0.031127456665039063, 0.03132912063598633, 0.031037311553955078, 0.03082048034667969, 0.030684511184692384, 0.030762239456176756, 0.03256291198730469, 0.030993919372558593, 0.03093494415283203, 0.030965375900268554, 0.031144927978515625, 0.03088108825683594, 0.030974655151367186, 0.030904319763183592, 0.031124639511108398, 0.0310994873046875, 0.031222143173217774, 0.03086310386657715, 0.030898303985595704, 0.03080188751220703, 0.030816287994384767, 0.03092889595031738, 0.030879232406616212, 0.030962175369262695, 0.03092403221130371, 0.030794496536254882, 0.030906368255615234, 0.030963712692260743, 0.03096780776977539, 0.03091059112548828, 0.030967775344848632, 0.030971839904785158, 0.03103331184387207, 0.030873407363891603, 0.030791711807250977, 0.030838111877441406, 0.03080054473876953, 0.030838752746582033, 0.030922239303588867, 0.030503423690795898, 0.030428672790527345, 0.0303438720703125, 0.030650367736816408, 0.03057663917541504, 0.030502239227294923, 0.030517919540405274, 0.030604576110839842, 0.030962207794189452, 0.030849023818969725, 0.03087311935424805, 0.03055580711364746, 0.030692447662353517, 0.030830495834350585, 0.030689279556274415, 0.030611135482788085, 0.03173587226867676, 0.03076531219482422, 0.03087366485595703, 0.030828447341918946, 0.03026323127746582, 0.030242912292480467, 0.030574304580688477, 0.030661151885986327, 0.03078118324279785, 0.030902271270751954, 0.03068262481689453, 0.03101136016845703, 0.030898399353027343, 0.030267967224121093, 0.030699520111083983, 0.030521568298339845, 0.030525503158569337, 0.0308121280670166, 0.030477439880371094, 0.030617536544799803, 0.030595232009887695, 0.030580543518066407, 0.030467071533203126, 0.030760608673095702, 0.03096384048461914, 0.030754751205444335, 0.030699520111083983, 0.03192831993103027, 0.030992191314697267, 0.03100492858886719, 0.03101308822631836, 0.030852832794189454, 0.030719263076782227, 0.030828351974487304, 0.030847808837890626, 0.03075872039794922, 0.030837087631225585, 0.031028671264648436, 0.03138227272033691, 0.03107200050354004, 0.03103878402709961, 0.03118707275390625, 0.031014463424682618, 0.031062303543090822, 0.03132086372375488, 0.03112259292602539, 0.031093536376953126, 0.031170656204223633, 0.03551132965087891, 0.03143564796447754, 0.03162112045288086, 0.03134623908996582, 0.03117251205444336, 0.0312073917388916, 0.031192928314208983, 0.03135094451904297, 0.031537120819091796, 0.03128361511230469, 0.031188447952270507, 0.031184640884399414, 0.03108348846435547, 0.031131807327270507, 0.03119705581665039, 0.031225791931152345, 0.03203702545166016, 0.0323680305480957, 0.031262624740600584, 0.03120185661315918, 0.03298099136352539, 0.03239116668701172, 0.03138764762878418, 0.031143936157226562, 0.030988447189331053, 0.03105366325378418, 0.03102720069885254, 0.0308787841796875, 0.03059916877746582, 0.0306177921295166, 0.030410560607910156, 0.0302508487701416, 0.03045187187194824, 0.030685407638549805, 0.030621471405029296, 0.030703264236450194, 0.030853471755981444, 0.030887935638427736, 0.030896127700805662, 0.03119308853149414, 0.030693376541137695, 0.03059507179260254, 0.030447071075439452, 0.03043587112426758, 0.030570207595825197, 0.03083625602722168, 0.030611295700073243, 0.03100048065185547, 0.030671327590942384, 0.03047644805908203, 0.03050739288330078, 0.030447551727294922, 0.03060108757019043, 0.03079360008239746, 0.030814079284667967, 0.030913120269775392, 0.030797664642333984, 0.030961568832397462, 0.03092246437072754, 0.0306876163482666, 0.030600223541259765, 0.030958560943603514, 0.03052297592163086, 0.030402687072753905, 0.030517248153686522, 0.03038547134399414, 0.030519775390625, 0.03048899269104004, 0.03037808036804199, 0.030533344268798827, 0.030772607803344728, 0.031037984848022462, 0.03093507194519043, 0.030755456924438478, 0.030671680450439453, 0.030516128540039062, 0.030437376022338865, 0.03042323112487793, 0.030847999572753908, 0.030622528076171874, 0.030482528686523437, 0.03051852798461914, 0.030708192825317383, 0.030884031295776368, 0.031065088272094726, 0.031108095169067384, 0.030907712936401367, 0.03095577621459961, 0.031095232009887695, 0.030842912673950194, 0.03085001564025879, 0.030591840744018554, 0.031159679412841798, 0.031204256057739257, 0.031077280044555664, 0.031122400283813478, 0.031301631927490234, 0.031076351165771485, 0.031160192489624025, 0.031234176635742187, 0.03106435203552246, 0.0311080322265625, 0.03204995346069336, 0.031136768341064453, 0.031075328826904298, 0.03196703910827637, 0.031111040115356446, 0.031178432464599608, 0.03144355201721191, 0.03136828804016113, 0.03090118408203125, 0.03079987144470215, 0.030881792068481444, 0.0307957763671875, 0.03075200080871582, 0.031040512084960937, 0.03083852767944336, 0.03080601692199707, 0.03087068748474121, 0.03080303955078125, 0.03090127944946289, 0.03081113624572754, 0.03111292839050293, 0.03087343978881836, 0.030959775924682617, 0.03095756721496582, 0.030834688186645507, 0.030840896606445314, 0.030761056900024415, 0.030256383895874022, 0.031586912155151366, 0.03058585548400879, 0.03061862373352051, 0.030842880249023437, 0.031222911834716798, 0.03122617530822754, 0.03116499137878418, 0.030825471878051756, 0.03064499282836914, 0.030621631622314453, 0.030593536376953126, 0.030682592391967772, 0.03052169609069824, 0.03070899200439453, 0.030946271896362305, 0.03076803207397461, 0.03064678382873535, 0.03055449676513672, 0.030420223236083985, 0.03071183967590332, 0.030978559494018554, 0.030659040451049804, 0.030674688339233397, 0.030757055282592774, 0.03003798484802246, 0.030630048751831056, 0.03024185562133789, 0.0302807674407959, 0.03052649688720703, 0.03099679946899414, 0.030955615997314452, 0.03085139274597168, 0.030644224166870116, 0.030588512420654298, 0.030699392318725587, 0.03125283241271973, 0.03123628807067871, 0.03110028839111328, 0.03104217529296875, 0.03097395133972168, 0.030979103088378906, 0.03112063980102539, 0.03144470405578613, 0.0310435848236084, 0.031104927062988282, 0.031157535552978517, 0.03177350425720215, 0.031153535842895506, 0.031113279342651366, 0.031087167739868166, 0.031016992568969726, 0.031208703994750977, 0.031023712158203126, 0.031043935775756835, 0.031030303955078126, 0.030989055633544923, 0.031008159637451172, 0.031079008102416993, 0.031045663833618165, 0.030955360412597655, 0.03127631950378418, 0.030954336166381834, 0.03108780860900879, 0.03110393524169922, 0.03095747184753418, 0.03102921676635742, 0.031035327911376955, 0.03162732887268067, 0.03153254318237304, 0.031141696929931642, 0.030866111755371094, 0.031221759796142577, 0.030912511825561522, 0.030543872833251953, 0.030849023818969725, 0.030760959625244142, 0.030880767822265624, 0.030852096557617188, 0.031545343399047854, 0.030594911575317383, 0.030519519805908203, 0.03088777542114258, 0.03121552085876465, 0.031160512924194337, 0.031139839172363282, 0.03112550354003906, 0.03083263969421387, 0.030429407119750975, 0.030572639465332032, 0.03619110488891602, 0.030766304016113282, 0.030474143981933592, 0.030830432891845703, 0.03031449508666992, 0.030306304931640625, 0.030527360916137697, 0.030515552520751953, 0.03051024055480957, 0.03146937561035156, 0.03156265640258789, 0.031070112228393554, 0.030832799911499023, 0.030318431854248047, 0.03037183952331543, 0.0301977596282959, 0.030676992416381835, 0.030547967910766603, 0.03065353584289551, 0.03044374465942383, 0.031105728149414064, 0.03022412872314453, 0.030394399642944336, 0.030298208236694334, 0.03053683280944824, 0.03091100883483887, 0.030886112213134767, 0.030788896560668945, 0.030641120910644533, 0.03072204780578613, 0.031092384338378905, 0.03106163215637207, 0.03129743957519531, 0.03112384033203125, 0.030975872039794922, 0.031170560836791993, 0.031104864120483397, 0.03082080078125, 0.03085955238342285, 0.03102908706665039, 0.031067935943603516, 0.03157439994812012, 0.030930240631103514, 0.031584768295288085, 0.03108268737792969, 0.03116851234436035, 0.031143936157226562, 0.031094783782958983, 0.031117536544799804, 0.03095523262023926, 0.031010496139526368, 0.03090880012512207, 0.031396127700805666, 0.030920192718505858, 0.0309967041015625, 0.031064064025878906, 0.03084492874145508, 0.030948928833007813, 0.031477792739868164, 0.031057600021362305, 0.030931808471679687, 0.030653440475463867, 0.0310732479095459, 0.031069599151611327, 0.03086409568786621, 0.03112739181518555, 0.030605600357055663, 0.03050268745422363, 0.03033238410949707, 0.030616352081298828, 0.03094259262084961, 0.03093328094482422, 0.031737951278686526, 0.031236127853393556, 0.03133225631713867, 0.03258988952636719, 0.031107072830200196, 0.03100611114501953, 0.0307042236328125, 0.031083776473999025, 0.0304289608001709, 0.030333343505859374, 0.030214719772338867, 0.031070207595825194, 0.03002979278564453, 0.03034217643737793, 0.030706687927246092, 0.03097599983215332, 0.030836896896362306, 0.031197023391723634, 0.030991840362548827, 0.03093097686767578, 0.030781919479370118, 0.03053059196472168, 0.03081113624572754, 0.03117990493774414, 0.03342015838623047, 0.030807327270507813, 0.03071049690246582, 0.03142201614379883, 0.030933439254760744, 0.03108016014099121, 0.031184288024902345, 0.0310743350982666, 0.03141087913513184, 0.031107295989990236, 0.03108803176879883, 0.03153775978088379, 0.03171526336669922, 0.03143440055847168, 0.03103984069824219, 0.031096288681030274, 0.030763776779174804, 0.030729248046875, 0.030730016708374025, 0.03132889556884766, 0.03240176010131836, 0.03319004821777344, 0.03123740768432617, 0.031318559646606445, 0.03179427146911621, 0.031036352157592772, 0.03088559913635254, 0.030777631759643556]",tokens/s,32.32654490251799,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1033.510912,1613.692928,0.0,1218.445312,1206.173696,s,1,9.1831015625,9.1831015625,0.0,9.1831015625,9.1831015625,9.1831015625,9.1831015625,[9.1831015625],,kWh,6.382324057082845e-05,7.032595791535425e-06,2.3776685687981747e-05,9.463252205034562e-05,,MB,1275.94496,1911.488512,0.0,1503.657984,1463.228416,s,10,1.9248706817626953,0.19248706817626954,0.0011163075534855215,0.19229170989990235,0.19386943054199218,0.19389991760253908,0.19392430725097656,"[0.19239820861816406, 0.19070640563964844, 0.1913957061767578, 0.1932269744873047, 0.19386265563964844, 0.19218521118164061, 0.1920773468017578, 0.1912587127685547, 0.19382905578613283, 0.19393040466308595]",tokens/s,1329.9594742934557,kWh,5.888236950166705e-06,6.493722790158906e-07,3.927919809000313e-06,1.0465529038182906e-05,tokens/kWh,24461257.43533825,MB,1293.959168,1911.488512,0.0,1503.657984,1463.230976,s,10,19.02480383300781,1.9024803833007808,0.010420759895332518,1.9060280151367186,1.91608125,1.916356787109375,1.9165772167968749,"[1.91602001953125, 1.907142822265625, 1.90618115234375, 1.8814990234375, 1.91663232421875, 1.894983154296875, 1.9058748779296875, 1.8914891357421875, 1.8981241455078126, 1.906857177734375]",tokens/s,33.114664704555715,kWh,5.504454917149943e-05,6.071256374725795e-06,3.197691447039852e-05,9.309272001662375e-05,tokens/kWh,676744.6475809276,,s,630,19.021390398025527,0.030192683171469073,0.0006366162060178063,0.030160079956054686,0.030726429557800295,0.03112152633666992,0.03256156150817872,"[0.03147190475463867, 0.030670848846435547, 0.03013222312927246, 0.03253247833251953, 0.031373311996459964, 0.030482080459594725, 0.030052703857421877, 0.03110403251647949, 0.03020899200439453, 0.030287872314453124, 0.030420799255371094, 0.030612960815429687, 0.03062652778625488, 0.030502912521362304, 0.030594335556030274, 0.030865280151367188, 0.030816703796386718, 0.0306911678314209, 0.03205366516113281, 0.03079311943054199, 0.030898944854736328, 0.0305860481262207, 0.030468927383422852, 0.030503999710083007, 0.03061199951171875, 0.03057411193847656, 0.030427520751953124, 0.030842527389526368, 0.03051532745361328, 0.03050569534301758, 0.030340576171875, 0.030360095977783202, 0.03041279983520508, 0.030324735641479493, 0.030393632888793945, 0.030544416427612305, 0.030306495666503907, 0.03021824073791504, 0.029959552764892577, 0.02966387176513672, 0.030558048248291017, 0.030363487243652343, 0.030124256134033203, 0.030185567855834962, 0.03015872001647949, 0.030052480697631837, 0.03001046371459961, 0.02985580825805664, 0.029909343719482423, 0.03005900764465332, 0.03001747131347656, 0.02985580825805664, 0.02969183921813965, 0.02971564865112305, 0.029700479507446288, 0.03053366470336914, 0.029576992034912108, 0.029780288696289063, 0.03010345649719238, 0.030109535217285155, 0.03006224060058594, 0.03016783905029297, 0.03027168083190918, 0.03059030342102051, 0.029733535766601562, 0.029560287475585936, 0.02962486457824707, 0.02972585678100586, 0.029810592651367186, 0.029748416900634764, 0.030097152709960936, 0.03027078437805176, 0.03016160011291504, 0.029750463485717773, 0.03001545524597168, 0.029565759658813476, 0.029846656799316407, 0.030018367767333985, 0.030181024551391603, 0.030147008895874024, 0.030040063858032227, 0.029724672317504884, 0.029503488540649415, 0.02959974479675293, 0.029951391220092775, 0.030121919631958007, 0.029962656021118163, 0.030168544769287108, 0.030272287368774416, 0.030215648651123046, 0.030045824050903322, 0.030067615509033203, 0.03006185531616211, 0.030034656524658202, 0.0300214729309082, 0.030553375244140625, 0.03001638412475586, 0.030294015884399415, 0.0302608642578125, 0.03039244842529297, 0.030893951416015624, 0.03100454330444336, 0.03044937515258789, 0.03053443145751953, 0.030562271118164064, 0.030666784286499025, 0.031053823471069338, 0.030410463333129883, 0.030341279983520507, 0.030695552825927733, 0.03063155174255371, 0.030656896591186523, 0.030756864547729492, 0.03167027282714844, 0.030574464797973634, 0.03072012710571289, 0.03052463912963867, 0.030728992462158204, 0.031059455871582032, 0.030797536849975587, 0.030519872665405273, 0.0305828800201416, 0.030630016326904298, 0.030220287322998047, 0.030007295608520508, 0.030038015365600586, 0.0328135986328125, 0.03022233581542969, 0.02997983932495117, 0.030430015563964845, 0.03312844848632813, 0.03140950393676758, 0.030319263458251953, 0.030099456787109374, 0.029825023651123047, 0.02958950424194336, 0.029716480255126954, 0.03003523254394531, 0.02948579216003418, 0.02978553581237793, 0.029833408355712892, 0.029782400131225586, 0.029634559631347656, 0.029586559295654298, 0.02984659194946289, 0.03013612747192383, 0.03012531280517578, 0.030110176086425782, 0.03011612892150879, 0.03003392028808594, 0.029754623413085938, 0.02961859130859375, 0.031641120910644534, 0.03035219192504883, 0.029744384765625, 0.0300053768157959, 0.029936384201049805, 0.02981670379638672, 0.029741056442260744, 0.029849311828613282, 0.030029727935791017, 0.029796415328979493, 0.029763391494750976, 0.029919647216796876, 0.030354816436767577, 0.030196319580078124, 0.02982310485839844, 0.02981260871887207, 0.029968095779418946, 0.03043951988220215, 0.029894975662231444, 0.02999622344970703, 0.029813568115234376, 0.030148672103881835, 0.030187456130981446, 0.030225887298583984, 0.03003878402709961, 0.03015452766418457, 0.03017932891845703, 0.030459552764892577, 0.030379648208618163, 0.030444255828857424, 0.030461759567260743, 0.030400608062744142, 0.03146275138854981, 0.032117694854736326, 0.030817888259887696, 0.03131619262695313, 0.03060918426513672, 0.031135839462280275, 0.03052947235107422, 0.030322751998901366, 0.030492671966552733, 0.030754175186157227, 0.03046272087097168, 0.033539104461669925, 0.030597984313964845, 0.03172761535644531, 0.03074662399291992, 0.031956703186035156, 0.031353120803833005, 0.030352928161621093, 0.0298951358795166, 0.02976153564453125, 0.031053823471069338, 0.029845279693603517, 0.02982464027404785, 0.029753952026367186, 0.029446144104003907, 0.029144512176513673, 0.029117151260375975, 0.029109407424926757, 0.02908844757080078, 0.029038591384887694, 0.029066911697387697, 0.029027999877929686, 0.02897737693786621, 0.02913942337036133, 0.029352960586547853, 0.029248287200927734, 0.03015497589111328, 0.030650367736816408, 0.030121984481811522, 0.02994611167907715, 0.02981452751159668, 0.029454336166381836, 0.029177087783813477, 0.029346559524536135, 0.029274112701416017, 0.029747200012207032, 0.029337600708007814, 0.02939673614501953, 0.029191936492919922, 0.02941798400878906, 0.029132352828979493, 0.029137344360351564, 0.029148160934448244, 0.029645471572875975, 0.030146848678588866, 0.03027078437805176, 0.0303721923828125, 0.03037161636352539, 0.029995647430419922, 0.030144512176513674, 0.029380607604980468, 0.02953990364074707, 0.02925312042236328, 0.029225536346435547, 0.02908812713623047, 0.029171072006225585, 0.02915977668762207, 0.029172000885009767, 0.03607721710205078, 0.031161344528198243, 0.030402559280395508, 0.030726144790649414, 0.030373888015747072, 0.03018547248840332, 0.031059839248657226, 0.0296646728515625, 0.030405344009399413, 0.030150432586669922, 0.030270944595336913, 0.03034601593017578, 0.03042505645751953, 0.03062918472290039, 0.03074892807006836, 0.030724544525146485, 0.030691551208496093, 0.030666528701782228, 0.030697471618652345, 0.030633983612060548, 0.030451711654663087, 0.030605024337768554, 0.030593311309814453, 0.03044175910949707, 0.030468864440917967, 0.03056662368774414, 0.030532352447509764, 0.03060940742492676, 0.03096985626220703, 0.03058483123779297, 0.03120742416381836, 0.030673952102661134, 0.030624736785888673, 0.030513151168823242, 0.0304167366027832, 0.0302675838470459, 0.029951423645019532, 0.030043872833251953, 0.0298536319732666, 0.029813631057739258, 0.029740991592407225, 0.030238143920898436, 0.029778560638427733, 0.029693952560424806, 0.030902271270751954, 0.030255104064941408, 0.030251007080078125, 0.03014633560180664, 0.030308223724365233, 0.029878591537475584, 0.029650976181030273, 0.029572479248046873, 0.02969254493713379, 0.02955580711364746, 0.030058496475219725, 0.029608991622924803, 0.030372831344604494, 0.031525152206420895, 0.030296607971191405, 0.029773920059204102, 0.030176799774169923, 0.02967190361022949, 0.029900800704956054, 0.030562559127807618, 0.02984934425354004, 0.029457887649536132, 0.029769535064697265, 0.029928159713745118, 0.030232288360595702, 0.029323551177978517, 0.029793664932250975, 0.02940787124633789, 0.029423519134521483, 0.029452384948730467, 0.030261247634887696, 0.0305930233001709, 0.030457855224609375, 0.03030966377258301, 0.03032143974304199, 0.029996992111206055, 0.0295731201171875, 0.029466623306274413, 0.029237152099609375, 0.0294421443939209, 0.02977791976928711, 0.029549760818481444, 0.029331584930419922, 0.029870784759521485, 0.030295391082763672, 0.029911712646484376, 0.03063596725463867, 0.03024492835998535, 0.03029769515991211, 0.03010806465148926, 0.030234624862670898, 0.03023244857788086, 0.03024028778076172, 0.03069615936279297, 0.030556032180786133, 0.03025833511352539, 0.030364511489868164, 0.03032268714904785, 0.030289920806884765, 0.0300579833984375, 0.03014713668823242, 0.030193599700927734, 0.030340896606445313, 0.030306528091430664, 0.030437376022338865, 0.030220287322998047, 0.030422336578369142, 0.030309055328369142, 0.030322656631469727, 0.030199840545654298, 0.03033420753479004, 0.03025814437866211, 0.030395296096801756, 0.030511615753173828, 0.030287456512451173, 0.02955753517150879, 0.030259199142456054, 0.03011337661743164, 0.030341535568237304, 0.029748319625854492, 0.03011062431335449, 0.02978201675415039, 0.030707231521606447, 0.02985603141784668, 0.030085823059082032, 0.029706239700317383, 0.029693952560424806, 0.02991923141479492, 0.02981180763244629, 0.029794912338256836, 0.029935039520263673, 0.030290815353393556, 0.029920448303222658, 0.029885248184204103, 0.030122112274169922, 0.029937088012695314, 0.02961862373352051, 0.02953625679016113, 0.029621984481811522, 0.029270303726196288, 0.029302783966064453, 0.02951372718811035, 0.029501279830932616, 0.029347999572753906, 0.030097183227539064, 0.02998089599609375, 0.030412607192993164, 0.03051247978210449, 0.030390911102294922, 0.030271455764770507, 0.030892288208007813, 0.030330495834350588, 0.030062976837158202, 0.030293088912963867, 0.030241695404052735, 0.03014588737487793, 0.030069408416748048, 0.030640127182006836, 0.030138368606567382, 0.02999091148376465, 0.030154367446899415, 0.030277055740356447, 0.029973440170288086, 0.02996544075012207, 0.03018227195739746, 0.03034316825866699, 0.030504543304443358, 0.030414848327636718, 0.030081600189208985, 0.029887712478637696, 0.029780607223510742, 0.029892608642578124, 0.03032678413391113, 0.030405792236328125, 0.03063075256347656, 0.03067193603515625, 0.0313306884765625, 0.03149676895141602, 0.030713855743408205, 0.03196723175048828, 0.030928415298461916, 0.030880224227905272, 0.03334944152832031, 0.030935232162475585, 0.030488576889038086, 0.031073888778686522, 0.030202272415161133, 0.03042508888244629, 0.030476287841796876, 0.030324575424194335, 0.03048464012145996, 0.031204479217529297, 0.030380287170410157, 0.030325376510620117, 0.030095392227172852, 0.030054527282714842, 0.03214102554321289, 0.030268831253051756, 0.029962976455688475, 0.02958470344543457, 0.02954924774169922, 0.029471872329711914, 0.029723007202148436, 0.02965318489074707, 0.029503807067871094, 0.029632511138916014, 0.02959974479675293, 0.0297042236328125, 0.029775840759277344, 0.029822975158691405, 0.02982707214355469, 0.02958745574951172, 0.02966067123413086, 0.029731327056884766, 0.029639904022216796, 0.03023072052001953, 0.02969455909729004, 0.029650943756103516, 0.029388799667358398, 0.029288448333740235, 0.029300159454345703, 0.02973548889160156, 0.030031871795654298, 0.029841407775878907, 0.030092832565307617, 0.030171104431152344, 0.03017900848388672, 0.030012224197387697, 0.03137273597717285, 0.03243244934082031, 0.029967744827270507, 0.02961417579650879, 0.02957391929626465, 0.029663232803344725, 0.02960588836669922, 0.029711456298828126, 0.03009222412109375, 0.029931488037109374, 0.029850976943969727, 0.030073503494262695, 0.030000768661499023, 0.03001350402832031, 0.02994550323486328, 0.029940383911132813, 0.029755392074584962, 0.029646848678588866, 0.03025263977050781, 0.030159263610839843, 0.030766304016113282, 0.030427520751953124, 0.03034169578552246, 0.030438655853271483, 0.03043609619140625, 0.030310144424438478, 0.030457151412963866, 0.03025948715209961, 0.03032534408569336, 0.030351295471191406, 0.030457984924316405, 0.030357183456420897, 0.030318912506103517, 0.030263296127319338, 0.030308351516723633, 0.03027939224243164, 0.030592607498168944, 0.030286495208740234, 0.03029609680175781, 0.03030191993713379, 0.030442848205566406, 0.03040287971496582, 0.030372480392456054, 0.03034854316711426, 0.030376096725463868, 0.0326907844543457, 0.030641216278076172, 0.029911231994628907, 0.029751903533935548, 0.0295118408203125, 0.029531776428222658, 0.029945663452148438, 0.029612607955932617, 0.029659135818481445, 0.030253055572509766, 0.02998681640625, 0.029995008468627928, 0.030125280380249024, 0.030415647506713866, 0.02988595199584961, 0.03000275230407715, 0.029937728881835938, 0.030157247543334962, 0.029712831497192383, 0.029604032516479493, 0.02938230323791504, 0.02953353691101074, 0.02948080062866211, 0.03003443145751953, 0.029743263244628906, 0.029941856384277345, 0.03022870445251465, 0.030332927703857423, 0.030121984481811522, 0.0301496639251709, 0.029737951278686524, 0.029608991622924803, 0.029509695053100585, 0.029567903518676757, 0.02953625679016113, 0.029878047943115233, 0.030072256088256834, 0.030046367645263673, 0.03093286323547363, 0.030395584106445314, 0.030153663635253906, 0.03016089630126953, 0.030114015579223632, 0.029929248809814454, 0.029468704223632812, 0.029554176330566406, 0.029436447143554687, 0.029460416793823243, 0.029993120193481444, 0.030185312271118165, 0.030063711166381835, 0.030452543258666993, 0.030347360610961913, 0.030359424591064454, 0.030214271545410155, 0.030494527816772463, 0.03057254409790039, 0.03004640007019043, 0.030040063858032227, 0.030523359298706056, 0.030780736923217773, 0.030468671798706055, 0.030453439712524413, 0.030441951751708985, 0.030451711654663087, 0.030410751342773438, 0.03057459259033203, 0.030395967483520508, 0.030413248062133788, 0.030379167556762697, 0.030392768859863282, 0.030478559494018554, 0.030447391510009764, 0.030369407653808595, 0.030289983749389647, 0.03035580825805664, 0.030615936279296874, 0.030416576385498047, 0.03034876823425293, 0.031877983093261716, 0.032573440551757815, 0.030557439804077147, 0.03061222457885742, 0.031016960144042968, 0.030611455917358397, 0.030390207290649413, 0.03079583930969238, 0.030473312377929686, 0.030247840881347656, 0.030003200531005858, 0.029865983963012696, 0.029671424865722655, 0.029715904235839842, 0.02972060775756836, 0.029571647644042968, 0.02958460807800293, 0.029520639419555662, 0.029822399139404297, 0.02959008026123047, 0.029546432495117188, 0.029445375442504883]",tokens/s,33.120607212046714,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1085.898752,8106.47552,0.0,7711.227904,7603.953664,s,1,18.641384765625,18.641384765625,0.0,18.641384765625,18.641384765625,18.641384765625,18.641384765625,[18.641384765625],,kWh,0.00033724906177082703,3.719273954371149e-05,0.0001315656608080057,0.0005060074621225442,,MB,1238.069248,9870.180352,0.0,9462.349824,8756.504576,s,10,16.571733886718746,1.657173388671875,0.005695760134307078,1.6594805297851563,1.6617247680664062,1.6619040100097655,1.662047403564453,"[1.642461181640625, 1.65444482421875, 1.6555794677734375, 1.6585115966796875, 1.6541756591796875, 1.6610345458984375, 1.660449462890625, 1.662083251953125, 1.6616849365234374, 1.6613089599609374]",tokens/s,154.4799124521114,kWh,4.8202024260833316e-05,5.316259229355053e-06,3.2103109015799156e-05,8.562139250598752e-05,tokens/kWh,2989906.990616835,MB,1256.022016,9870.180352,0.0,9462.349824,8756.507136,s,10,82.34577734374999,8.234577734375,0.016255357341610887,8.241509277343749,8.25075615234375,8.251183740234374,8.251525810546875,"[8.2009775390625, 8.2143671875, 8.223310546875, 8.228544921875, 8.2426123046875, 8.24040625, 8.2453994140625, 8.24788671875, 8.251611328125, 8.2506611328125]",tokens/s,7.650665526783283,kWh,0.0002409144947095827,2.657342281018311e-05,0.0001601282392136,0.0004276161567333656,tokens/kWh,147328.39021160468,,s,630,82.34075679016118,0.13069961395263674,0.0019530654863721322,0.13057984161376954,0.13235903625488282,0.1331479217529297,0.1400793441772461,"[0.13893551635742188, 0.12739881896972657, 0.1281716766357422, 0.1281764831542969, 0.12808396911621095, 0.12900732421875, 0.12900901794433595, 0.13369232177734375, 0.13002336120605468, 0.12795699310302736, 0.1282478790283203, 0.12825497436523436, 0.129184326171875, 0.12880531311035157, 0.13193011474609376, 0.13184819030761719, 0.13043276977539062, 0.12846925354003907, 0.12811241149902344, 0.1283865966796875, 0.1292335968017578, 0.13049856567382812, 0.13147955322265625, 0.13099417114257814, 0.12988134765625, 0.1286023406982422, 0.12846339416503907, 0.12916841125488282, 0.1301554870605469, 0.13091839599609376, 0.13138729858398437, 0.1306288604736328, 0.12922966003417968, 0.12963623046875, 0.12919171142578126, 0.1294476776123047, 0.13091845703125, 0.1312957763671875, 0.13156556701660158, 0.13001231384277342, 0.12954217529296874, 0.12889170837402344, 0.13035519409179688, 0.12991693115234376, 0.13176422119140624, 0.13158108520507814, 0.1312407989501953, 0.1301724853515625, 0.12928025817871094, 0.13052044677734376, 0.13092332458496095, 0.13003366088867188, 0.13144985961914063, 0.13050064086914062, 0.1318668212890625, 0.12899346923828126, 0.13061897277832032, 0.13022227478027343, 0.13028025817871094, 0.13159014892578125, 0.132210693359375, 0.13058805847167967, 0.13121315002441405, 0.13991322326660155, 0.1275998077392578, 0.1280213165283203, 0.12786447906494142, 0.12774230194091796, 0.1277675552368164, 0.13016575622558593, 0.13545875549316405, 0.1306542663574219, 0.1288970184326172, 0.12824986267089844, 0.12756537628173828, 0.12829945373535157, 0.12905062866210937, 0.13305145263671875, 0.13252703857421874, 0.1309327392578125, 0.12860826110839843, 0.12896617126464843, 0.1283973388671875, 0.1281212158203125, 0.13095698547363283, 0.132129150390625, 0.13204249572753907, 0.12965846252441407, 0.12837135314941406, 0.1293758087158203, 0.12868858337402345, 0.1301436767578125, 0.13119340515136718, 0.13201773071289064, 0.13140013122558594, 0.12929776000976562, 0.12912503051757812, 0.12901376342773438, 0.13061695861816405, 0.1302408905029297, 0.13229055786132812, 0.13236972045898437, 0.13076141357421875, 0.12902787780761718, 0.1294844512939453, 0.12975765991210939, 0.13076693725585936, 0.1304470977783203, 0.13214288330078125, 0.13153660583496093, 0.12992530822753906, 0.13021650695800782, 0.13024575805664063, 0.13055442810058593, 0.131830078125, 0.13075421142578125, 0.13242594909667968, 0.13120729064941405, 0.1299488983154297, 0.13001808166503906, 0.1299571228027344, 0.1305771484375, 0.13168412780761718, 0.1315616912841797, 0.13172735595703125, 0.13051235961914062, 0.14113037109375, 0.12770079803466797, 0.1281025848388672, 0.12817768859863282, 0.12807420349121093, 0.12819046020507813, 0.13012991333007812, 0.13605679321289063, 0.1302139129638672, 0.12912435913085937, 0.12855705261230468, 0.1283108215332031, 0.12955081176757813, 0.12886416625976563, 0.1329439697265625, 0.13226393127441408, 0.13094912719726562, 0.12882534790039063, 0.12912640380859375, 0.12847718811035155, 0.12873274230957032, 0.13159059143066407, 0.13233561706542968, 0.13138893127441406, 0.12994309997558592, 0.1294775695800781, 0.12870761108398437, 0.1293076171875, 0.13037132263183593, 0.13155743408203124, 0.13206716918945313, 0.13080812072753906, 0.12963145446777344, 0.12936402893066407, 0.12912669372558594, 0.1300955810546875, 0.13120716857910156, 0.13225091552734375, 0.13093244934082032, 0.13088870239257813, 0.12988211059570312, 0.12900146484375, 0.12990821838378908, 0.1315527648925781, 0.13109075927734376, 0.1322421112060547, 0.13127430725097655, 0.1305072021484375, 0.12969334411621095, 0.12982716369628905, 0.13123583984375, 0.1306480712890625, 0.1312522277832031, 0.1315635223388672, 0.13078732299804688, 0.13080986022949218, 0.12949913024902343, 0.13116578674316406, 0.13121168518066406, 0.13136607360839844, 0.13177305603027345, 0.13169273376464843, 0.13022207641601563, 0.14015536499023437, 0.12814131164550782, 0.12783926391601563, 0.12821160888671876, 0.12814118957519532, 0.12914837646484376, 0.13038485717773438, 0.13575932312011718, 0.13007516479492187, 0.12905673217773436, 0.12838899230957032, 0.12839543151855468, 0.12823904418945312, 0.1293679656982422, 0.13350775146484375, 0.13243382263183595, 0.12989244079589843, 0.1287118377685547, 0.1287560272216797, 0.12811318969726562, 0.12890521240234376, 0.13169218444824218, 0.13325045776367186, 0.13136370849609375, 0.1295311737060547, 0.12929922485351564, 0.128901123046875, 0.1284931182861328, 0.13039231872558593, 0.13224159240722655, 0.1323702392578125, 0.1308605499267578, 0.13014291381835938, 0.1295064392089844, 0.1300693817138672, 0.12980224609375, 0.1313804473876953, 0.13241629028320312, 0.13214720153808593, 0.13099203491210937, 0.1301973114013672, 0.1295870361328125, 0.13003411865234374, 0.13066444396972657, 0.13156556701660158, 0.13164544677734374, 0.132347900390625, 0.13004595947265624, 0.13087129211425783, 0.12948069763183595, 0.13137510681152345, 0.13122122192382812, 0.13173788452148438, 0.13170384216308595, 0.1305425567626953, 0.13078486633300782, 0.1300413818359375, 0.1302959747314453, 0.13059756469726563, 0.1317069091796875, 0.13183180236816405, 0.13174374389648438, 0.13154917907714844, 0.14049481201171876, 0.1283507843017578, 0.12919430541992188, 0.1283706817626953, 0.12837673950195314, 0.12830915832519532, 0.13027468872070314, 0.13690963745117188, 0.13102284240722656, 0.1295626220703125, 0.12902400207519532, 0.12853363037109375, 0.12835311889648438, 0.13054556274414061, 0.13333721923828126, 0.13229055786132812, 0.13028457641601562, 0.13049728393554688, 0.1287006378173828, 0.12937152099609375, 0.12869007873535157, 0.131152099609375, 0.13243843078613282, 0.131874755859375, 0.1308625030517578, 0.13033139038085936, 0.12958924865722657, 0.13054566955566407, 0.12919923400878905, 0.13162342834472657, 0.13164991760253905, 0.13192752075195313, 0.13001373291015625, 0.13120863342285155, 0.13006402587890625, 0.1314231414794922, 0.12982272338867187, 0.13268569946289063, 0.13119850158691407, 0.13076130676269532, 0.13126783752441407, 0.13021852111816407, 0.13056431579589844, 0.13036338806152345, 0.13152870178222656, 0.13144009399414064, 0.13100086975097655, 0.13126246643066405, 0.130616455078125, 0.13119541931152343, 0.13120547485351564, 0.13072515869140625, 0.13122396850585938, 0.1311808624267578, 0.13150822448730468, 0.1304289245605469, 0.13058253479003906, 0.1306409912109375, 0.13108901977539061, 0.13035871887207032, 0.131402587890625, 0.13177037048339843, 0.13162413024902345, 0.13996461486816406, 0.12824986267089844, 0.12899690246582032, 0.12820938110351562, 0.1282027587890625, 0.12900338745117187, 0.13072543334960937, 0.135719482421875, 0.13090815734863281, 0.12924844360351562, 0.12854150390625, 0.12843008422851562, 0.12855705261230468, 0.13024870300292968, 0.13279641723632812, 0.1326755828857422, 0.13027122497558594, 0.12992889404296876, 0.1286961212158203, 0.1288605194091797, 0.12978378295898438, 0.13254060363769532, 0.1318338623046875, 0.13180928039550782, 0.1308112030029297, 0.1294015350341797, 0.12876153564453124, 0.12974432373046876, 0.13192076110839843, 0.1318666229248047, 0.13188710021972655, 0.13182917785644532, 0.13013241577148438, 0.1293429718017578, 0.1296328887939453, 0.13080166625976564, 0.13147048950195311, 0.13216444396972657, 0.1321697235107422, 0.13107814025878906, 0.12951513671875, 0.12986105346679688, 0.1301329345703125, 0.13050221252441407, 0.13167864990234374, 0.13249932861328126, 0.131217529296875, 0.1313846435546875, 0.12980653381347657, 0.1308134765625, 0.12995606994628905, 0.13157452392578126, 0.13173146057128907, 0.13190943908691405, 0.13118278503417968, 0.13089727783203126, 0.1306956787109375, 0.13019354248046874, 0.13020159912109375, 0.13052291870117189, 0.13164361572265626, 0.1316532440185547, 0.13308735656738283, 0.14012620544433593, 0.12829696655273437, 0.12820457458496093, 0.12826751708984374, 0.12816073608398437, 0.12899737548828125, 0.13085081481933594, 0.13553567504882813, 0.1309697265625, 0.12925340270996094, 0.12850051879882812, 0.12850994873046875, 0.12881846618652343, 0.13050726318359376, 0.13317747497558594, 0.13320101928710937, 0.13059170532226563, 0.130110595703125, 0.12919692993164061, 0.1286264953613281, 0.12942970275878907, 0.13166160583496095, 0.1332943115234375, 0.131778564453125, 0.13052674865722655, 0.1299471435546875, 0.12895443725585937, 0.12917648315429686, 0.13096754455566406, 0.1318968963623047, 0.13240777587890626, 0.13143650817871094, 0.13108837890625, 0.129544189453125, 0.1289581756591797, 0.13014653015136718, 0.13153286743164064, 0.13238636779785157, 0.13170938110351563, 0.13111488342285157, 0.13149606323242188, 0.12948419189453125, 0.13031024169921876, 0.13073049926757813, 0.132927490234375, 0.13182879638671874, 0.13113235473632812, 0.13153689575195313, 0.13080342102050782, 0.12925570678710938, 0.1304289245605469, 0.13183193969726562, 0.13213848876953124, 0.13127308654785155, 0.13165568542480469, 0.13163427734375, 0.13061827087402345, 0.13018464660644533, 0.13065887451171876, 0.131842041015625, 0.13173554992675782, 0.13281607055664063, 0.13068780517578124, 0.14088627624511718, 0.12847657775878907, 0.12828726196289061, 0.12817864990234376, 0.1282721252441406, 0.13027101135253907, 0.13066493225097656, 0.13762098693847657, 0.13073458862304688, 0.12884378051757814, 0.12850143432617187, 0.1285246124267578, 0.12947251892089845, 0.13043302917480468, 0.1336376953125, 0.13255520629882814, 0.13028483581542968, 0.1293585205078125, 0.1285509490966797, 0.13010943603515626, 0.1297097930908203, 0.13242396545410157, 0.13242274475097657, 0.13157635498046874, 0.12990882873535156, 0.12923695373535157, 0.12992953491210937, 0.1295474548339844, 0.13100099182128908, 0.13325328063964845, 0.13212608337402343, 0.13156005859375, 0.12980429077148437, 0.12916940307617186, 0.12946636962890626, 0.13043096923828126, 0.13245216369628907, 0.13192965698242187, 0.13177468872070314, 0.13058908081054688, 0.13014630126953125, 0.1291673583984375, 0.1298303680419922, 0.13090255737304687, 0.13227801513671875, 0.13193855285644532, 0.13237452697753907, 0.13075833129882813, 0.13074464416503906, 0.12919375610351563, 0.1314959411621094, 0.13160418701171875, 0.13327615356445313, 0.13225779724121095, 0.13110067749023438, 0.13025074768066405, 0.13086309814453126, 0.13005413818359374, 0.13144677734375, 0.131693603515625, 0.13102998352050782, 0.1325137939453125, 0.1304718780517578, 0.1415303955078125, 0.1285926055908203, 0.12825190734863282, 0.1283656005859375, 0.12828361511230468, 0.1293097229003906, 0.13108460998535157, 0.13580560302734376, 0.13170396423339845, 0.12993417358398437, 0.12859187316894533, 0.12940083312988282, 0.12825907897949218, 0.12942437744140625, 0.13311180114746093, 0.1330708465576172, 0.13054937744140624, 0.12996237182617187, 0.12910797119140624, 0.12851405334472657, 0.12912828063964843, 0.1317111358642578, 0.1323970489501953, 0.13160652160644531, 0.13123487854003907, 0.12992813110351562, 0.12900965881347656, 0.12895436096191407, 0.13127679443359375, 0.13221612548828124, 0.131932861328125, 0.13253631591796874, 0.13065414428710936, 0.13003372192382812, 0.12951962280273438, 0.130702392578125, 0.13098080444335938, 0.13345587158203126, 0.13215335083007812, 0.13170687866210937, 0.1302650909423828, 0.13012991333007812, 0.12970188903808594, 0.13027122497558594, 0.1325972442626953, 0.13206723022460937, 0.1317255096435547, 0.1314349060058594, 0.13033401489257812, 0.13015315246582032, 0.13065362548828124, 0.13179058837890625, 0.13291807556152344, 0.1315450897216797, 0.1315691223144531, 0.13074858093261718, 0.13038812255859375, 0.12999293518066407, 0.13138534545898437, 0.13204071044921875, 0.1320028533935547, 0.1316414031982422, 0.13171580505371094, 0.14180744934082032, 0.12851321411132813, 0.12808647155761718, 0.12903887939453124, 0.12826214599609376, 0.12901951599121095, 0.13128533935546874, 0.13619609069824218, 0.13124610900878905, 0.1289318389892578, 0.129225830078125, 0.12940176391601563, 0.12878224182128906, 0.1296315155029297, 0.1337425537109375, 0.1322852783203125, 0.13041664123535157, 0.12938380432128907, 0.12946905517578125, 0.12852345275878907, 0.1294569549560547, 0.13196493530273437, 0.13230047607421874, 0.1317952575683594, 0.13015449523925782, 0.12972393798828125, 0.12955859375, 0.12914073181152344, 0.13074269104003905, 0.1320939483642578, 0.1321553955078125, 0.13203660583496094, 0.13028099060058593, 0.12952943420410157, 0.13014309692382814, 0.13009642028808593, 0.13128572082519532, 0.132115966796875, 0.13234768676757813, 0.1316829833984375, 0.12997843933105468, 0.13084262084960938, 0.13017219543457031, 0.13082083129882813, 0.13194444274902345, 0.1323534698486328, 0.13208018493652343, 0.13037158203125, 0.13044940185546874, 0.13119488525390624, 0.1305432891845703, 0.13164166259765625, 0.13235784912109375, 0.13192218017578125, 0.13110176086425782, 0.13091119384765626, 0.13022547912597657, 0.13100898742675782, 0.13138665771484376, 0.1316790466308594, 0.13319180297851563, 0.1308078155517578, 0.1314009246826172]",tokens/s,7.651132009941381,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 615, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 160, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 282, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 615, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 160, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 282, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1173, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1281.196032,13202.55488,0.0,12807.307264,12661.927936,s,1,26.6992890625,26.6992890625,0.0,26.6992890625,26.6992890625,26.6992890625,26.6992890625,[26.6992890625],,kWh,0.0005687548774541673,6.273068353996872e-05,0.00021457433832600126,0.0008460598993201372,,MB,1247.15008,15727.525888,0.0,15319.69536,14319.896576,s,10,30.010053466796876,3.0010053466796878,0.008951754834617047,3.003243286132813,3.0097442138671875,3.0109288696289065,3.0118765942382812,"[2.980326416015625, 2.99000244140625, 2.999275146484375, 3.00148193359375, 3.001940673828125, 3.00460400390625, 3.00948095703125, 3.006282470703125, 3.0045458984375, 3.012113525390625]",tokens/s,85.30474638548658,kWh,8.732547033874993e-05,9.630788921964672e-06,5.8185046547999666e-05,0.00015514130580871426,tokens/kWh,1650108.581112771,MB,1264.951296,15727.525888,0.0,15319.69536,14319.899136,s,10,141.4810751953125,14.148107519531251,0.023793453007881927,14.15676513671875,14.1686619140625,14.1693158203125,14.1698389453125,"[14.0949990234375, 14.1190595703125, 14.1331025390625, 14.147650390625, 14.1553505859375, 14.1581796875, 14.167037109375, 14.1685166015625, 14.1672099609375, 14.1699697265625]",tokens/s,4.452892368327668,kWh,0.00041360347847291745,4.562475451827259e-05,0.0002747863309400009,0.0007340145639311909,tokens/kWh,85829.3596554657,,s,630,141.47523474121104,0.22456386466858883,0.001889570810858079,0.2246788330078125,0.22589389953613281,0.22682214126586914,0.2319333972167969,"[0.2324471435546875, 0.2196405792236328, 0.219787353515625, 0.22076824951171875, 0.2283184356689453, 0.2221222381591797, 0.21976272583007814, 0.22031964111328126, 0.22681251525878907, 0.22372528076171874, 0.22182499694824218, 0.2200743103027344, 0.22421708679199218, 0.22470416259765624, 0.22300502014160156, 0.22120387268066405, 0.22276754760742187, 0.2246222686767578, 0.22377897644042968, 0.2221419219970703, 0.22340489196777344, 0.22354124450683593, 0.224395263671875, 0.22332826232910155, 0.22270565795898437, 0.22311322021484375, 0.22372880554199218, 0.22398037719726563, 0.2229917755126953, 0.2231711730957031, 0.2239036865234375, 0.22449360656738282, 0.2237662353515625, 0.22275065612792969, 0.2237783966064453, 0.22423324584960938, 0.22391871643066405, 0.22284042358398437, 0.22395094299316406, 0.22423782348632812, 0.224579833984375, 0.22318304443359374, 0.22387245178222656, 0.22389340209960937, 0.22454135131835937, 0.2239051513671875, 0.22337782287597657, 0.22498463439941407, 0.22399020385742188, 0.22456137084960937, 0.22343270874023438, 0.22483148193359376, 0.2247200927734375, 0.22466026306152342, 0.22391119384765626, 0.22427926635742187, 0.22459759521484374, 0.22507562255859376, 0.22501986694335938, 0.22428880310058594, 0.22444032287597657, 0.22502957153320313, 0.22499526977539064, 0.23144540405273437, 0.22001802062988282, 0.22074435424804686, 0.22213558959960938, 0.2287418518066406, 0.22117094421386718, 0.22103526306152343, 0.22161203002929689, 0.22724607849121095, 0.2250198974609375, 0.220727294921875, 0.22151980590820314, 0.22427244567871094, 0.22556057739257812, 0.22254182434082032, 0.21992988586425782, 0.22342886352539063, 0.22557743835449218, 0.22429490661621093, 0.22251686096191406, 0.22267462158203125, 0.22498374938964844, 0.22477133178710937, 0.22271160888671876, 0.22261241149902344, 0.22356378173828126, 0.22483558654785157, 0.22447514343261718, 0.22282444763183593, 0.22378445434570313, 0.22469068908691406, 0.22461427307128906, 0.22334994506835937, 0.2233189697265625, 0.22513253784179688, 0.22497894287109374, 0.22437274169921875, 0.2232340545654297, 0.224247802734375, 0.22429901123046875, 0.22535107421875, 0.22401699829101562, 0.2240491485595703, 0.22434144592285157, 0.2248709716796875, 0.2241903076171875, 0.22448541259765625, 0.2247467803955078, 0.2253173828125, 0.2234916534423828, 0.2248239288330078, 0.22452444458007811, 0.22557081604003906, 0.2240482177734375, 0.22503208923339843, 0.22499385070800781, 0.22450743103027343, 0.22586256408691407, 0.2245847625732422, 0.22550828552246094, 0.22479872131347656, 0.22526156616210938, 0.2250891876220703, 0.23152024841308594, 0.22093414306640624, 0.22102732849121093, 0.222308349609375, 0.22938111877441406, 0.22157926940917969, 0.22147891235351563, 0.2225145263671875, 0.22683001708984374, 0.22384739685058594, 0.22228582763671875, 0.2220789794921875, 0.225003173828125, 0.22556092834472657, 0.2225048370361328, 0.22222019958496095, 0.22343907165527344, 0.2248417205810547, 0.2246775665283203, 0.22250233459472657, 0.2222244873046875, 0.22530712890625, 0.22444876098632813, 0.22271142578125, 0.22270115661621093, 0.22557151794433594, 0.22450189208984375, 0.22414707946777343, 0.22291903686523437, 0.224310302734375, 0.22435939025878907, 0.22449151611328125, 0.22320742797851562, 0.22443008422851562, 0.2249337615966797, 0.2246165771484375, 0.2238525390625, 0.22365129089355468, 0.2252799072265625, 0.22477180480957032, 0.2246090850830078, 0.2243230438232422, 0.22420069885253907, 0.2259539794921875, 0.22430764770507813, 0.22465536499023436, 0.22428671264648437, 0.2249947204589844, 0.2245629119873047, 0.22524957275390625, 0.22414796447753907, 0.22529852294921876, 0.22443212890625, 0.2251376953125, 0.22569879150390626, 0.2255319061279297, 0.22521766662597656, 0.22504031372070313, 0.22498912048339845, 0.22451478576660155, 0.2256796417236328, 0.2253844451904297, 0.22534553527832032, 0.23324053955078125, 0.22112393188476562, 0.22178431701660156, 0.22286787414550782, 0.22900531005859376, 0.22270565795898437, 0.22107673645019532, 0.22109599304199218, 0.22687405395507812, 0.2239930877685547, 0.22275149536132813, 0.22196421813964845, 0.2246636199951172, 0.22564659118652344, 0.22376573181152343, 0.22240130615234374, 0.22340336608886718, 0.22565341186523438, 0.22392547607421875, 0.2230955810546875, 0.22323587036132814, 0.22419683837890625, 0.22529638671875, 0.2239705352783203, 0.2235502471923828, 0.22418576049804687, 0.22466006469726563, 0.22449491882324218, 0.22421734619140626, 0.22416224670410156, 0.224810302734375, 0.22477008056640624, 0.22437510681152345, 0.2239184265136719, 0.2244071350097656, 0.2251976318359375, 0.22501589965820312, 0.22507798767089843, 0.22413725280761718, 0.22437408447265625, 0.22485801696777344, 0.22532115173339845, 0.2242525177001953, 0.224606201171875, 0.22522880554199218, 0.22497894287109374, 0.22443128967285156, 0.22531491088867187, 0.22527049255371093, 0.22495423889160157, 0.2251367645263672, 0.224257568359375, 0.2255385284423828, 0.22508543395996095, 0.22536175537109376, 0.22429302978515625, 0.2258534393310547, 0.22610099792480468, 0.22515327453613282, 0.2250260467529297, 0.22550909423828125, 0.22528598022460938, 0.22618489074707032, 0.23196646118164063, 0.22116860961914062, 0.2212575378417969, 0.22324652099609374, 0.23077069091796876, 0.22223052978515626, 0.22230221557617189, 0.2232580871582031, 0.22763165283203124, 0.22413107299804688, 0.2222344970703125, 0.2201719970703125, 0.22632899475097656, 0.22543331909179687, 0.22350624084472656, 0.22253334045410156, 0.22382601928710938, 0.22630262756347655, 0.22449130249023438, 0.22288978576660157, 0.22351907348632813, 0.22539884948730468, 0.22474137878417969, 0.2236600341796875, 0.22312675476074217, 0.22466435241699217, 0.22442095947265625, 0.22513346862792968, 0.22387303161621094, 0.22377186584472655, 0.2247196502685547, 0.22536601257324218, 0.22417543029785156, 0.22421119689941407, 0.22469635009765626, 0.22488278198242187, 0.2249566650390625, 0.2244524841308594, 0.22430943298339845, 0.2253367004394531, 0.2250916748046875, 0.2246614990234375, 0.2246661376953125, 0.22466061401367188, 0.22552790832519531, 0.22486819458007812, 0.2250467834472656, 0.22515939331054688, 0.22504701232910157, 0.22589768981933595, 0.22428953552246095, 0.2255462646484375, 0.22518783569335937, 0.22553919982910156, 0.22512728881835936, 0.22558889770507812, 0.22512879943847655, 0.22526771545410157, 0.22544178771972656, 0.22535935974121094, 0.22549264526367188, 0.22577853393554687, 0.2253020477294922, 0.23185244750976564, 0.22079283142089845, 0.22234072875976563, 0.22271629333496093, 0.23001863098144532, 0.22175379943847656, 0.22215475463867188, 0.2226175994873047, 0.2271285400390625, 0.22461247253417968, 0.22202024841308593, 0.22269541931152342, 0.22451814270019532, 0.22654348754882814, 0.2229757080078125, 0.22352528381347656, 0.22396214294433595, 0.22581961059570313, 0.22453811645507812, 0.22352716064453124, 0.22333465576171874, 0.22426966857910155, 0.22529910278320311, 0.22449151611328125, 0.2228632049560547, 0.2246800994873047, 0.22535562133789064, 0.2245771484375, 0.22423193359375, 0.22447068786621094, 0.22503794860839843, 0.224827392578125, 0.22387583923339843, 0.2243170623779297, 0.22539651489257811, 0.2250020751953125, 0.22440304565429686, 0.2241084442138672, 0.22481765747070312, 0.22545753479003905, 0.22515699768066405, 0.2248383026123047, 0.2249276123046875, 0.22499069213867187, 0.2252991943359375, 0.2254043884277344, 0.2246661376953125, 0.2248308410644531, 0.22503631591796874, 0.22677471923828124, 0.2239510040283203, 0.22541798400878907, 0.2252554168701172, 0.22547817993164063, 0.2250379180908203, 0.22535983276367189, 0.2256290588378906, 0.22555445861816406, 0.22521446228027345, 0.2260061798095703, 0.22530706787109375, 0.22559490966796875, 0.22492620849609374, 0.23259120178222656, 0.21991014099121095, 0.2217697296142578, 0.22271772766113282, 0.23089993286132812, 0.2222469177246094, 0.22277638244628906, 0.2224055633544922, 0.2287840576171875, 0.22407994079589844, 0.22254591369628907, 0.22246604919433594, 0.22514688110351563, 0.22747750854492188, 0.2235146179199219, 0.22214451599121093, 0.22410182189941405, 0.22688825988769531, 0.22470831298828126, 0.22267727661132813, 0.22270976257324218, 0.22525132751464844, 0.2258934783935547, 0.22407267761230468, 0.22231033325195312, 0.224046142578125, 0.22664697265625, 0.22509085083007813, 0.22290505981445313, 0.22389144897460939, 0.22545730590820312, 0.2262327423095703, 0.2248667449951172, 0.2239180145263672, 0.22436582946777345, 0.2259443817138672, 0.22500265502929687, 0.22504124450683594, 0.22402662658691405, 0.2259578857421875, 0.22542279052734376, 0.22511782836914063, 0.22495309448242187, 0.22493775939941407, 0.22561782836914063, 0.22517768859863282, 0.22462506103515625, 0.225112060546875, 0.22542335510253905, 0.225291748046875, 0.2255235137939453, 0.22490390014648437, 0.22562611389160156, 0.2251138916015625, 0.22549478149414062, 0.2252518005371094, 0.22536582946777345, 0.22638584899902345, 0.22524134826660155, 0.22494569396972655, 0.22560963439941406, 0.22565330505371095, 0.226197509765625, 0.23421942138671875, 0.22039059448242188, 0.22210858154296875, 0.22327909851074218, 0.23018060302734375, 0.22202297973632812, 0.22196524047851562, 0.22236114501953125, 0.22815557861328126, 0.22447712707519532, 0.22192568969726562, 0.2222786865234375, 0.22530966186523438, 0.2262056884765625, 0.22341017150878906, 0.22300262451171876, 0.22377676391601561, 0.2258841552734375, 0.2245069122314453, 0.2237019805908203, 0.2238279724121094, 0.2247740173339844, 0.2249381103515625, 0.22417613220214844, 0.2231904296875, 0.2251260223388672, 0.22578070068359374, 0.22423545837402345, 0.2238239288330078, 0.22438835144042968, 0.22562185668945312, 0.22579405212402343, 0.22478839111328125, 0.2242827453613281, 0.2244259796142578, 0.22594239807128907, 0.22472291564941407, 0.22470863342285155, 0.22458323669433594, 0.22588665771484376, 0.22543974304199219, 0.22422518920898438, 0.2252857666015625, 0.2250695343017578, 0.22567695617675781, 0.224872802734375, 0.22512136840820313, 0.22593408203125, 0.22515728759765624, 0.22534941101074218, 0.2251589813232422, 0.2246475830078125, 0.225693603515625, 0.22559123229980468, 0.22547433471679687, 0.22512258911132813, 0.2254148864746094, 0.22606681823730468, 0.2258590393066406, 0.2257904968261719, 0.22487030029296876, 0.22608079528808595, 0.225880126953125, 0.23330960083007812, 0.22114154052734375, 0.2202235565185547, 0.22274252319335938, 0.2309836730957031, 0.22202557373046874, 0.22145858764648438, 0.22221142578125, 0.22857356262207032, 0.22456349182128907, 0.22302105712890624, 0.22230213928222656, 0.22545155334472655, 0.2260731201171875, 0.22359654235839843, 0.22198655700683595, 0.223652099609375, 0.22699186706542968, 0.2241938934326172, 0.22370191955566407, 0.22386892700195313, 0.2251643829345703, 0.22475663757324219, 0.22426953125, 0.22363011169433594, 0.2242150421142578, 0.2255134735107422, 0.22458770751953125, 0.2243665008544922, 0.224655517578125, 0.2250198974609375, 0.22512435913085938, 0.22465922546386718, 0.2242798767089844, 0.22480169677734374, 0.22549655151367187, 0.2248934783935547, 0.22468569946289063, 0.22468031311035155, 0.22482330322265626, 0.2264915771484375, 0.22448416137695312, 0.22480870056152344, 0.2247088623046875, 0.22511529541015626, 0.2257293701171875, 0.22532293701171874, 0.22539065551757811, 0.2253834228515625, 0.22548329162597655, 0.225876220703125, 0.22532118225097655, 0.22504646301269532, 0.22627468872070314, 0.2252045440673828, 0.22542095947265625, 0.22495304870605468, 0.22564454650878907, 0.22573056030273436, 0.22612582397460937, 0.22438502502441407, 0.22556466674804687, 0.22642425537109376, 0.2348450927734375, 0.22040489196777344, 0.22131497192382812, 0.22313055419921876, 0.23079525756835936, 0.2227439727783203, 0.22160035705566405, 0.22258265686035156, 0.22779405212402343, 0.2258665008544922, 0.22231680297851564, 0.2226114501953125, 0.2258145294189453, 0.2262994842529297, 0.22331155395507812, 0.22269346618652344, 0.22327743530273436, 0.22597193908691407, 0.22574284362792968, 0.22327760314941406, 0.22271705627441407, 0.22527679443359375, 0.22582208251953126, 0.22337394714355469, 0.22437887573242188, 0.2248272705078125, 0.22492991638183593, 0.224719970703125, 0.22393670654296874, 0.2241616973876953, 0.2256468505859375, 0.2248294677734375, 0.22456101989746094, 0.2239698486328125, 0.22459193420410156, 0.22604393005371093, 0.22530458068847656, 0.22428671264648437, 0.2244850311279297, 0.22497520446777344, 0.2259661102294922, 0.22471267700195313, 0.22479872131347656, 0.2248371124267578, 0.2251146240234375, 0.225887451171875, 0.22505552673339843, 0.22526771545410157, 0.22529638671875, 0.22544998168945313, 0.22574867248535158, 0.22487196350097657, 0.2253168029785156, 0.22602383422851563, 0.22550706481933594, 0.22555308532714843, 0.22542745971679687, 0.22568048095703125, 0.22569804382324218, 0.22560015869140626, 0.22498240661621094, 0.22570457458496093, 0.22554566955566407]",tokens/s,4.45307619494258,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,807.604224,4683.923456,0.0,4288.67584,4213.842432,s,1,13.5787783203125,13.5787783203125,0.0,13.5787783203125,13.5787783203125,13.5787783203125,13.5787783203125,[13.5787783203125],,kWh,0.00018497083590416515,2.03963994005206e-05,6.22283831159991e-05,0.00026759561842068486,,MB,1192.230912,5107.54816,0.0,4699.717632,4535.11424,s,10,8.553767395019532,0.855376739501953,0.007683236508205055,0.8569793090820312,0.8610069885253906,0.8619868316650391,0.8627707061767578,"[0.8337443237304687, 0.852953857421875, 0.85672607421875, 0.8551661376953125, 0.8570130615234375, 0.8589288330078125, 0.8607892456054688, 0.8629666748046875, 0.856945556640625, 0.8585336303710938]",tokens/s,299.28333116593416,kWh,2.48531106444444e-05,2.7390895892942575e-06,1.6417744615666776e-05,4.400994484940543e-05,tokens/kWh,5816867.093925897,MB,1232.44544,5115.936768,0.0,4708.10624,4535.1168,s,10,40.23405615234375,4.023405615234375,0.0061507504575199655,4.025970825195312,4.029557641601563,4.030256481933594,4.030815554199219,"[4.015683837890625, 4.010197021484375, 4.019560546875, 4.02631640625, 4.025761962890625, 4.022799560546875, 4.0261796875, 4.027199462890625, 4.030955322265625, 4.02940234375]",tokens/s,15.658376516017778,kWh,0.0001180998546972224,1.3028841168202256e-05,7.851052577133302e-05,0.00020963922163675768,tokens/kWh,300516.2846347533,,s,630,40.23095680236817,0.06385866159106059,0.0015793124110772148,0.06369526290893554,0.06435266647338868,0.06462068290710449,0.07443461128234863,"[0.0795832290649414, 0.06496665954589843, 0.06382553482055664, 0.0633240966796875, 0.06302150344848632, 0.06267267227172851, 0.06260348892211914, 0.06262319946289062, 0.06255990219116211, 0.06251110458374023, 0.0625406723022461, 0.0626558723449707, 0.0636995849609375, 0.06333849716186524, 0.06316032028198242, 0.0640646743774414, 0.06386947250366211, 0.06376176071166992, 0.06457443237304687, 0.06376364898681641, 0.06333523178100586, 0.06309888076782226, 0.0625459213256836, 0.0625172462463379, 0.06259507369995117, 0.06265753555297851, 0.062499679565429685, 0.06303321456909179, 0.06385036849975587, 0.06337561416625977, 0.0632762565612793, 0.06387731170654297, 0.06397161483764649, 0.06438889312744141, 0.06388556671142578, 0.06335715103149414, 0.06384867095947265, 0.06398745727539062, 0.06370655822753907, 0.0633639030456543, 0.06284284973144531, 0.06338313674926757, 0.06284649658203124, 0.06317763137817382, 0.06418812561035156, 0.06393593597412109, 0.06363631820678711, 0.0637050895690918, 0.06414070129394531, 0.06332067108154296, 0.0645630111694336, 0.06414915466308593, 0.06368515014648438, 0.063246337890625, 0.0632828483581543, 0.06490048217773438, 0.06420988464355469, 0.06366207885742188, 0.06340329742431641, 0.06301123046875, 0.06356784057617187, 0.06435033416748047, 0.06386483383178711, 0.07312252807617188, 0.06415724945068359, 0.0634764175415039, 0.06313958358764649, 0.06327705764770508, 0.06235340881347656, 0.06369859313964844, 0.06333190536499024, 0.06303577423095703, 0.06275542449951171, 0.062441280364990234, 0.06250700759887695, 0.062476287841796874, 0.0642231674194336, 0.0637768325805664, 0.06471459197998047, 0.06404045104980469, 0.06379996871948242, 0.06357401657104492, 0.06384569549560547, 0.06351119995117188, 0.06330972671508789, 0.0628040657043457, 0.06274051284790039, 0.06330368041992188, 0.06374560165405273, 0.06343718338012695, 0.06299619293212891, 0.06276287841796875, 0.06390422439575195, 0.06333440017700195, 0.0637248649597168, 0.0641173095703125, 0.06391158294677735, 0.06370352172851562, 0.06410854339599609, 0.0637393913269043, 0.06341856002807617, 0.0632111358642578, 0.06312825775146484, 0.06380915069580079, 0.0633081283569336, 0.06303696060180664, 0.0627143669128418, 0.06400780487060546, 0.06390412902832031, 0.0633733139038086, 0.06342009735107422, 0.06392863845825195, 0.0635431022644043, 0.06429920196533204, 0.06409830474853516, 0.06335279846191406, 0.06341020965576172, 0.06396627044677734, 0.06368966293334961, 0.06398102569580078, 0.06364543914794922, 0.06342086410522461, 0.06347398376464844, 0.06394473648071289, 0.06361280059814453, 0.06330364990234374, 0.07437910461425781, 0.06458464050292968, 0.06365695953369141, 0.06322102355957031, 0.06286921691894531, 0.0636409912109375, 0.06352137756347656, 0.06316646575927734, 0.06252665710449219, 0.06272697448730469, 0.06378086471557617, 0.0632845115661621, 0.06389014434814454, 0.06371680068969726, 0.06330182266235351, 0.06348393630981446, 0.06390819168090821, 0.06345523071289062, 0.06521616363525391, 0.06425122833251953, 0.06332115173339843, 0.06318073654174805, 0.06293289566040039, 0.06377251052856445, 0.0633182716369629, 0.06302505493164062, 0.06321734237670898, 0.06329385757446289, 0.06403616333007812, 0.0636110725402832, 0.06324684906005859, 0.06309478378295899, 0.06383206558227539, 0.0639360008239746, 0.06431385803222656, 0.06393446350097656, 0.0636467514038086, 0.06354838562011719, 0.06402047729492187, 0.06362931060791016, 0.06343270492553711, 0.0631596794128418, 0.06395967864990235, 0.063825439453125, 0.06346185684204102, 0.06345913696289063, 0.06400364685058593, 0.06367295837402344, 0.06465948486328126, 0.06430003356933593, 0.06346441650390625, 0.06420480346679687, 0.0636701774597168, 0.06337340927124023, 0.0633507843017578, 0.06420025634765625, 0.06356832122802734, 0.0633481903076172, 0.06321763229370117, 0.06413497924804687, 0.06366396713256836, 0.0634090576171875, 0.06420233917236329, 0.07695600128173828, 0.06464921569824218, 0.06372963333129883, 0.06340790557861328, 0.06316249465942383, 0.06255628967285157, 0.06267497634887695, 0.06326800155639649, 0.06375507354736328, 0.06320876693725586, 0.06297875213623047, 0.06274383926391601, 0.06377510452270507, 0.06346550369262695, 0.06382009506225586, 0.06616886138916016, 0.06432262420654297, 0.06374697494506835, 0.06332412719726563, 0.06311939239501953, 0.0638914566040039, 0.06340403366088868, 0.06329958343505859, 0.0628936653137207, 0.06392464065551758, 0.06333030319213867, 0.0631621437072754, 0.06397974395751953, 0.0635590705871582, 0.06332851028442382, 0.06389139175415039, 0.06386633682250976, 0.0639119987487793, 0.06394355010986329, 0.06377459335327149, 0.06328537750244141, 0.06301283264160157, 0.06409219360351562, 0.06359775924682617, 0.06438790130615234, 0.06381568145751954, 0.0634511375427246, 0.06315766525268554, 0.06394326400756836, 0.06369894409179687, 0.06428444671630859, 0.06386825561523438, 0.06373260879516601, 0.06383580780029297, 0.06431308746337891, 0.06373430252075195, 0.06334265518188477, 0.06407782745361328, 0.0639815673828125, 0.06369865417480469, 0.06437328338623047, 0.0643663330078125, 0.06412249755859376, 0.06380992126464843, 0.06351033782958984, 0.06418450927734375, 0.06368438339233398, 0.06358780670166016, 0.07429385375976562, 0.06462054443359375, 0.06384022521972656, 0.06342838287353515, 0.06279193496704101, 0.06415468597412109, 0.06344595336914062, 0.06334156799316407, 0.06272905731201171, 0.06370470428466797, 0.06331856155395507, 0.06311731338500977, 0.0627341423034668, 0.06273763275146485, 0.06359139251708984, 0.06443993377685547, 0.06490528106689453, 0.06442208099365235, 0.06388953781127929, 0.06334627151489258, 0.0631624641418457, 0.06402285003662109, 0.06332956695556641, 0.0637993278503418, 0.06358291244506836, 0.0633133773803711, 0.06330217742919922, 0.0638579216003418, 0.06359116744995118, 0.06416588592529297, 0.06381510543823242, 0.06354585647583008, 0.06401958465576171, 0.06356268692016602, 0.06339142227172852, 0.06431161499023437, 0.06392012786865234, 0.0636701774597168, 0.063587646484375, 0.06413507080078125, 0.06363958358764649, 0.06346428680419922, 0.06417203521728515, 0.06385868835449218, 0.06366412734985352, 0.06375804901123047, 0.06428294372558593, 0.06409552001953125, 0.06460867309570313, 0.0641416015625, 0.06375116729736328, 0.06361804962158203, 0.06415071868896484, 0.06364652633666992, 0.06332387161254883, 0.06406172943115235, 0.06379520034790039, 0.06346108627319336, 0.06395318222045898, 0.06386198425292969, 0.06359939193725586, 0.06357932662963867, 0.06406358337402343, 0.07445728302001953, 0.06475315093994141, 0.06353561782836914, 0.06343024063110352, 0.06279529571533203, 0.06280928039550782, 0.06386246490478516, 0.06349619293212891, 0.06315145492553711, 0.06297868728637696, 0.06260678482055664, 0.06386134338378906, 0.06337068939208984, 0.06317091369628906, 0.06400994873046875, 0.06505027008056641, 0.06404787445068359, 0.06393836975097657, 0.06415302276611329, 0.06372438430786133, 0.06333440017700195, 0.06293724822998047, 0.06370902252197265, 0.0632176628112793, 0.06301907348632813, 0.0629554557800293, 0.06402566528320312, 0.06337631988525391, 0.06329344177246093, 0.06374115371704102, 0.06409072113037109, 0.06431254577636719, 0.06407881927490235, 0.06416793823242188, 0.06366207885742188, 0.06313478469848632, 0.06464752197265625, 0.06399814224243164, 0.06348144149780273, 0.06342851257324218, 0.06331836700439453, 0.06376710510253907, 0.06335283279418945, 0.06342860794067383, 0.06381523132324218, 0.0637014389038086, 0.06395302581787109, 0.06361619186401367, 0.06457414245605468, 0.06394684982299804, 0.06339369583129882, 0.06410610961914062, 0.06351248168945313, 0.0634150733947754, 0.06509331512451172, 0.0639870719909668, 0.06342105484008789, 0.06325411224365235, 0.06406934356689453, 0.06366073608398437, 0.06332527923583985, 0.0642159652709961, 0.0636948471069336, 0.07531222534179688, 0.06462079620361329, 0.06379996871948242, 0.06337696075439453, 0.06303801727294922, 0.0636102409362793, 0.06324070358276367, 0.06300991821289062, 0.06269583892822266, 0.06371737670898438, 0.06325500869750976, 0.06313926315307618, 0.06258652877807618, 0.06288272094726563, 0.06478438568115234, 0.06436438751220704, 0.06378307342529296, 0.06399107360839844, 0.06361072158813477, 0.06359132766723632, 0.06384409713745118, 0.06353737640380859, 0.06320115280151367, 0.06280121612548828, 0.06375711822509765, 0.06329139328002929, 0.06425775909423828, 0.06379312133789063, 0.06327436828613281, 0.06305385589599609, 0.0638554573059082, 0.06383967971801757, 0.06440409851074219, 0.06389990234375, 0.06346867370605469, 0.06398425674438477, 0.06354102325439454, 0.0631764793395996, 0.06457929229736328, 0.06410518646240235, 0.0635945930480957, 0.06369612884521485, 0.06408448028564454, 0.06363561630249023, 0.06340563201904297, 0.06410886383056641, 0.06390182495117187, 0.06461974334716797, 0.06414415740966797, 0.06425395202636719, 0.06407373046875, 0.06354534530639648, 0.06346342468261719, 0.06413311767578125, 0.06381158447265625, 0.06408956909179687, 0.0638100814819336, 0.06348185729980468, 0.06382976150512695, 0.0641170883178711, 0.06343670272827148, 0.0639447021484375, 0.06460633850097657, 0.07571673583984374, 0.06435225677490235, 0.06355267333984375, 0.06304240036010743, 0.06290633773803711, 0.06353475189208985, 0.06309747314453125, 0.06297369766235351, 0.06274867248535156, 0.06370950317382812, 0.06312419128417969, 0.06291292953491211, 0.0626324462890625, 0.06366758346557617, 0.06419321441650391, 0.06503833770751953, 0.06456114959716797, 0.06379296112060547, 0.06356150436401367, 0.06330995178222656, 0.06420451354980469, 0.0638039665222168, 0.06333030319213867, 0.06318080139160157, 0.0638230094909668, 0.06338032150268555, 0.06447305297851562, 0.06386415863037109, 0.06330780792236328, 0.06332828903198243, 0.06437542724609376, 0.0640552978515625, 0.06458163452148437, 0.064052734375, 0.06363977432250977, 0.06350467300415039, 0.06399385452270508, 0.06348121643066407, 0.06449625396728516, 0.06398361587524413, 0.06338355255126953, 0.06317670440673828, 0.06400972747802734, 0.06347574234008789, 0.06323247909545898, 0.0639283218383789, 0.06360883331298828, 0.06386191940307617, 0.06383190536499024, 0.06400415802001953, 0.06360547256469727, 0.06441558074951172, 0.06400991821289062, 0.0636844482421875, 0.0638902702331543, 0.06449533081054687, 0.06392214584350586, 0.06343702316284179, 0.06347558212280273, 0.0640857925415039, 0.06349427032470703, 0.06471900939941407, 0.06391334533691406, 0.07597491455078124, 0.0642171859741211, 0.06341791915893555, 0.06361747360229492, 0.06329296112060546, 0.06308911895751954, 0.06257247924804688, 0.06369635009765626, 0.06357852935791015, 0.06301417541503906, 0.06384118270874023, 0.06354742431640625, 0.06318025588989258, 0.06273247909545898, 0.06388518524169921, 0.06417996978759766, 0.06533542633056641, 0.06374662399291992, 0.0634511375427246, 0.06325417709350586, 0.0636399040222168, 0.06313129425048829, 0.06416121673583984, 0.06388214492797852, 0.06344294357299805, 0.06315795135498047, 0.06286515045166016, 0.06378515243530274, 0.06464374542236329, 0.06358537673950196, 0.06363590240478516, 0.06431145477294922, 0.06379868698120117, 0.06421977233886719, 0.064036865234375, 0.06374604797363281, 0.06342361450195312, 0.06404390716552734, 0.06351052856445312, 0.06409830474853516, 0.06376652908325195, 0.06351052856445312, 0.0638416976928711, 0.06405795288085937, 0.06359827041625976, 0.06482793426513672, 0.06409756469726563, 0.06391609573364258, 0.06394265747070313, 0.06387302398681641, 0.06441004943847656, 0.0641370849609375, 0.06354940795898438, 0.06423772430419922, 0.0638914566040039, 0.06347980880737304, 0.0645406723022461, 0.0639733772277832, 0.0636129264831543, 0.06380710220336915, 0.06404061126708985, 0.06463970947265625, 0.06415360260009766, 0.07515340423583984, 0.06424278259277344, 0.06361385726928712, 0.06319862365722656, 0.0627677116394043, 0.06386073684692382, 0.06337075042724609, 0.06318540954589844, 0.06317232131958007, 0.06365008163452149, 0.06380748748779297, 0.06350848007202148, 0.0631541748046875, 0.06285619354248047, 0.06392863845825195, 0.06398396682739257, 0.06446115112304687, 0.06371532821655274, 0.06376144027709961, 0.06311040115356445, 0.06364499282836913, 0.06317712020874024, 0.06383606338500977, 0.06353724670410156, 0.06316851043701172, 0.06351804733276367, 0.0635338897705078, 0.06326051330566407, 0.06312691116333008, 0.06395967864990235, 0.06397132873535157, 0.06395296096801757, 0.0638463363647461, 0.0638683853149414, 0.06458188629150391, 0.06406294250488281, 0.0636956787109375, 0.06351811218261719, 0.06378147125244141, 0.06360678482055664, 0.06482125091552735, 0.06421094512939453, 0.06389980697631836, 0.06352646255493163, 0.06325385665893554, 0.06406444549560547, 0.06366617584228515, 0.06444457244873047, 0.06416681671142578, 0.06398867034912109, 0.06407513427734375, 0.06455270385742187, 0.06422335815429688, 0.06396355056762695, 0.06360303878784179, 0.0640401611328125, 0.06465740966796875, 0.06404521942138672, 0.0636646728515625, 0.06338713455200196, 0.0640722885131836, 0.06435635375976563, 0.0642718734741211]",tokens/s,15.659582820633176,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,2191.224832,7355.695104,0.0,6960.447488,6722.822144,s,1,16.2267978515625,16.2267978515625,0.0,16.2267978515625,16.2267978515625,16.2267978515625,16.2267978515625,[16.2267978515625],,kWh,0.0002513102884541619,2.7712902526142117e-05,9.369896384800458e-05,0.0003727221548283086,,MB,1736.82688,7921.926144,0.0,7514.095616,7161.403392,s,10,10.635327636718749,1.063532763671875,0.0065886517062641575,1.065108154296875,1.0696147094726562,1.0700051086425781,1.0703174279785157,"[1.0485753173828125, 1.0566400146484376, 1.0595382080078124, 1.06260791015625, 1.06538623046875, 1.0703955078125, 1.064830078125, 1.068911865234375, 1.0695279541015625, 1.06891455078125]",tokens/s,240.70720596905096,kWh,3.092942336749653e-05,3.410400895626253e-06,2.0545266436200472e-05,5.488509069932325e-05,tokens/kWh,4664290.369901066,MB,1740.906496,7924.023296,0.0,7516.192768,7161.405952,s,10,51.21002734375,5.121002734375,0.015476669790559302,5.1233251953125,5.136107666015625,5.138426879882812,5.140282250976562,"[5.08609130859375, 5.1046337890625, 5.11413916015625, 5.12413330078125, 5.1189990234375, 5.12251708984375, 5.134634765625, 5.12854052734375, 5.14074609375, 5.13559228515625]",tokens/s,12.302278141175202,kWh,0.00015038089679375162,1.6588737902042466e-05,9.990805214859788e-05,0.000266877686844392,tokens/kWh,236063.19713319952,,s,630,51.20619602966308,0.08127967623756045,0.0018860073454918297,0.08094657516479492,0.0823894271850586,0.08315366783142089,0.09381792877197266,"[0.0912479019165039, 0.07957987213134765, 0.07925555419921874, 0.07937433624267579, 0.08023040008544922, 0.08124620819091796, 0.08011135864257812, 0.08026547241210938, 0.08010137939453126, 0.08052531433105468, 0.08065638732910156, 0.08385330963134766, 0.0819240951538086, 0.08077295684814453, 0.08018905639648438, 0.07938921356201172, 0.0794227523803711, 0.079968994140625, 0.0804659194946289, 0.08001945495605468, 0.08020582580566406, 0.08038195037841797, 0.08086246490478516, 0.08194124603271484, 0.08153459167480469, 0.08074073791503907, 0.08018892669677734, 0.08021814727783202, 0.08018582153320312, 0.07973654174804687, 0.08006275177001954, 0.08021622467041016, 0.07965270233154297, 0.08005587005615235, 0.08111353302001953, 0.08077516937255859, 0.08155340576171875, 0.08121699523925781, 0.08037814331054688, 0.08100032043457031, 0.08027993774414062, 0.07969996643066406, 0.08012799835205078, 0.08027945709228515, 0.08017314910888672, 0.0802877426147461, 0.08030003356933593, 0.08144300842285156, 0.08111027526855469, 0.08107577514648437, 0.08076185607910157, 0.08099430084228515, 0.08072988891601562, 0.0804268798828125, 0.08029833221435546, 0.08038377380371094, 0.08031254577636719, 0.08053043365478516, 0.08080623626708984, 0.08154940795898437, 0.08113414764404298, 0.08108441925048829, 0.08129945373535157, 0.09435372924804687, 0.08006655883789063, 0.08025702667236329, 0.0800030746459961, 0.07945011138916015, 0.07933542633056641, 0.08026521301269532, 0.07999897766113281, 0.07947673797607421, 0.07996415710449219, 0.08011711883544922, 0.08263539123535156, 0.08238899230957031, 0.08151411437988282, 0.08077327728271484, 0.08043087768554688, 0.08017078399658203, 0.08007091522216797, 0.07976150512695312, 0.08062159729003907, 0.07995625305175781, 0.07945970916748046, 0.08088585662841796, 0.08200204467773438, 0.08191836547851562, 0.08275138854980468, 0.082753662109375, 0.08134393310546875, 0.08036822509765625, 0.08074233245849609, 0.08087347412109375, 0.08098127746582032, 0.08084963226318359, 0.08083251190185547, 0.08084889221191406, 0.08141619110107422, 0.08219033813476563, 0.08148786926269531, 0.08098611450195313, 0.08090764617919922, 0.08050342559814454, 0.08007065582275391, 0.08074797058105469, 0.08034499359130859, 0.07982147216796875, 0.08024269104003906, 0.08149833679199218, 0.08098918151855469, 0.08154000091552735, 0.08147545623779297, 0.08153702545166015, 0.08103695678710937, 0.08090876770019531, 0.08038326263427735, 0.08059555053710937, 0.07993344116210938, 0.08026525115966797, 0.08102051544189454, 0.08106768035888672, 0.08053628540039062, 0.08138883209228516, 0.08159715270996094, 0.0815308837890625, 0.09604431915283203, 0.07986659240722656, 0.07929776000976563, 0.0799813461303711, 0.07997235107421875, 0.08001741027832031, 0.08000511932373047, 0.08023859405517578, 0.0799477767944336, 0.08005244445800781, 0.07999830627441407, 0.08309961700439453, 0.08236489868164062, 0.08140185546875, 0.08074034881591798, 0.08070931243896484, 0.08064851379394532, 0.080048095703125, 0.08004547119140625, 0.08055052947998047, 0.07995174407958984, 0.0802939224243164, 0.08087948608398438, 0.08234210968017579, 0.08290303802490234, 0.08256454467773437, 0.08187142181396484, 0.08080384063720703, 0.08158003234863281, 0.08092876434326172, 0.08075263977050781, 0.08085913848876954, 0.08078540802001953, 0.08086118316650391, 0.08097574615478516, 0.08206563568115234, 0.08169171142578124, 0.0811448974609375, 0.08075856018066406, 0.08094432067871093, 0.0808845443725586, 0.08079154968261719, 0.08033894348144531, 0.08024054718017579, 0.08033084869384766, 0.08025833892822265, 0.0807943344116211, 0.08167529296875, 0.08167318725585937, 0.0811599349975586, 0.0811087646484375, 0.08142281341552735, 0.08120524597167969, 0.08064205169677735, 0.08113094329833985, 0.0803702392578125, 0.08083455657958985, 0.08059903717041016, 0.081438720703125, 0.08112742614746093, 0.08179917144775391, 0.08218390655517578, 0.08176863861083984, 0.09386358642578126, 0.08034320068359375, 0.08071186828613282, 0.08060313415527344, 0.08064614105224609, 0.0806789093017578, 0.08063382720947265, 0.08141388702392578, 0.08049282836914062, 0.08069900512695312, 0.08066464233398438, 0.08392729949951172, 0.08181561279296876, 0.08127648162841797, 0.0806732177734375, 0.08010502624511719, 0.08056204986572266, 0.08040096282958985, 0.0801413116455078, 0.08054022216796874, 0.08021759796142579, 0.0800777587890625, 0.08030207824707031, 0.08222329711914063, 0.08170041656494141, 0.08155366516113281, 0.08152678680419922, 0.08099839782714843, 0.08121676635742188, 0.0801239013671875, 0.08035814666748047, 0.0809298553466797, 0.08049900817871093, 0.08088435363769532, 0.08118393707275391, 0.08090214538574218, 0.08159318542480469, 0.08166159820556641, 0.08193785858154297, 0.08154112243652344, 0.08229071807861328, 0.0824349136352539, 0.08188630676269532, 0.08117750549316406, 0.08098989105224609, 0.08193446350097656, 0.08127625274658203, 0.08158284759521485, 0.08118486022949219, 0.08104473876953125, 0.0820921630859375, 0.08099724578857422, 0.08199689483642578, 0.08092534637451172, 0.08036351776123046, 0.08081798553466797, 0.08098941040039062, 0.08083145904541016, 0.08105891418457031, 0.08137731170654297, 0.08109053039550781, 0.08191887664794922, 0.08187699127197266, 0.09404640197753907, 0.07928697967529297, 0.07999833679199218, 0.0803662109375, 0.08040447998046875, 0.0807383041381836, 0.08071939086914062, 0.08073574066162109, 0.08061846160888672, 0.0807356185913086, 0.08130214691162109, 0.0839527359008789, 0.08259196472167969, 0.08088355255126953, 0.08041353607177734, 0.08017839813232422, 0.08009315490722656, 0.0800889892578125, 0.0802514877319336, 0.08008889770507813, 0.07961446380615235, 0.07999839782714843, 0.08139817810058594, 0.08286796569824219, 0.08185488128662109, 0.08163766479492188, 0.08093052673339844, 0.08148947143554687, 0.0803741455078125, 0.08015641784667969, 0.08052345275878907, 0.08005830383300781, 0.0802220458984375, 0.0802360610961914, 0.08145798492431641, 0.08279158020019531, 0.08207154846191406, 0.0821277084350586, 0.0809697265625, 0.08168361663818359, 0.08302012634277343, 0.08106400299072265, 0.0809265899658203, 0.08097843170166015, 0.08104761505126953, 0.08058060455322266, 0.08158108520507812, 0.08114454650878906, 0.08198783874511718, 0.08113561248779297, 0.08085298919677734, 0.08212207794189454, 0.08103734588623047, 0.08138921356201172, 0.08069529724121094, 0.08045053100585937, 0.08029698944091797, 0.08029827117919922, 0.08156671905517578, 0.08183702087402343, 0.08178556823730469, 0.08175122833251953, 0.08110572814941407, 0.09370614624023438, 0.0801632308959961, 0.07929241943359375, 0.07926579284667969, 0.08002355194091797, 0.08009449768066407, 0.07998111724853516, 0.07998876953125, 0.0799642562866211, 0.07994985961914063, 0.08021794891357421, 0.0842542724609375, 0.08363887786865234, 0.08105779266357421, 0.08039218902587891, 0.08009891510009766, 0.0801468505859375, 0.07946854400634766, 0.0804290542602539, 0.08014848327636719, 0.08015666961669922, 0.0804714584350586, 0.08099696350097656, 0.08280815887451172, 0.08282720184326171, 0.08182653045654296, 0.08161920166015625, 0.08140185546875, 0.0816394271850586, 0.08248079681396485, 0.08147138977050782, 0.08087776184082031, 0.08143462371826173, 0.08060253143310547, 0.08158035278320312, 0.08095568084716796, 0.08224944305419922, 0.08157129669189453, 0.08103404998779297, 0.08117190551757812, 0.08105769348144531, 0.08107433319091797, 0.080880126953125, 0.08102674865722656, 0.08062188720703126, 0.08083411407470703, 0.08096812438964844, 0.0809717788696289, 0.08224153900146484, 0.08102291107177734, 0.08228460693359375, 0.08112860870361328, 0.08231993865966797, 0.08285820770263672, 0.08117852783203125, 0.08146550750732422, 0.08096959686279297, 0.0809488296508789, 0.0810642547607422, 0.08105699157714844, 0.08156400299072265, 0.081572509765625, 0.08156489562988281, 0.0953056640625, 0.08049788665771485, 0.08119574737548828, 0.08082371520996094, 0.08141686248779297, 0.0819402847290039, 0.08168057250976563, 0.08127072143554688, 0.08120735931396485, 0.08073590087890625, 0.08126499176025391, 0.08307872009277344, 0.08157574462890625, 0.08087206268310547, 0.08043049621582031, 0.08100489807128906, 0.08065814208984375, 0.0807265625, 0.08051292419433594, 0.08069363403320312, 0.08039584350585938, 0.08049884796142578, 0.08065350341796874, 0.08160348510742188, 0.08153282928466797, 0.08117967987060547, 0.08124924468994141, 0.08089395141601563, 0.08121548461914062, 0.08085443115234375, 0.08053311920166016, 0.08105878448486328, 0.08021571350097656, 0.08121711730957032, 0.08159859466552734, 0.08120326232910156, 0.08275971221923828, 0.08342940521240234, 0.08084031677246094, 0.08166899108886719, 0.08103731536865234, 0.08099635314941406, 0.08088780975341797, 0.08082147216796876, 0.08049913787841798, 0.08091088104248047, 0.08100761413574219, 0.0810948486328125, 0.0820552978515625, 0.08169113922119141, 0.0815059814453125, 0.08112092590332032, 0.0813062744140625, 0.08131171417236328, 0.08157545471191406, 0.0815313949584961, 0.0807567367553711, 0.08158767700195313, 0.08153520202636719, 0.08273273468017578, 0.08381094360351563, 0.08147334289550781, 0.08150444793701171, 0.09472480010986328, 0.08027340698242187, 0.08063590240478516, 0.08000921630859376, 0.08041887664794922, 0.08012179565429688, 0.08010546875, 0.08000224304199219, 0.08015750122070313, 0.08090825653076172, 0.08039356994628906, 0.0837083511352539, 0.08537522888183594, 0.08152268981933594, 0.08050892639160157, 0.08006034851074219, 0.08004409790039063, 0.08005427551269531, 0.08009318542480469, 0.08002355194091797, 0.08007868957519532, 0.08067292785644531, 0.08107405090332032, 0.0823235855102539, 0.08257331085205079, 0.08160160064697265, 0.08126528167724609, 0.08100281524658202, 0.08048025512695313, 0.08116425323486329, 0.08036271667480469, 0.08108934020996093, 0.08029727935791016, 0.0808414077758789, 0.08177641296386719, 0.08232895660400391, 0.08279926300048829, 0.08319789123535157, 0.08264320373535156, 0.08063919830322265, 0.08031263732910156, 0.0806503677368164, 0.08034544372558594, 0.08067481231689454, 0.0803470687866211, 0.08063699340820313, 0.08168141174316407, 0.08181894683837891, 0.08228463745117187, 0.08218889617919922, 0.08184207916259766, 0.08105101013183594, 0.08111135864257812, 0.08075100708007812, 0.08096518707275391, 0.08033932495117188, 0.08104557037353516, 0.08110899353027344, 0.08192733001708985, 0.08211952209472656, 0.08325325012207031, 0.0829676513671875, 0.08136704254150391, 0.09417731475830078, 0.08028665924072266, 0.08058486175537109, 0.08067056274414063, 0.0805580825805664, 0.08056217956542969, 0.08008649444580078, 0.08005241394042968, 0.08062806701660157, 0.08008294677734375, 0.08078931427001954, 0.08509664154052735, 0.08450374603271485, 0.08171196746826172, 0.08073382568359375, 0.080176513671875, 0.08070777893066407, 0.0805296630859375, 0.0800445098876953, 0.0800474853515625, 0.08010310363769531, 0.08099244689941407, 0.0815006103515625, 0.08292390441894532, 0.08347647857666016, 0.0823377914428711, 0.08121753692626953, 0.08085068511962891, 0.08082867431640625, 0.0803594207763672, 0.08116223907470703, 0.08022589111328125, 0.08075714874267578, 0.08137718200683594, 0.08126268768310548, 0.0844432601928711, 0.08291110229492188, 0.08164505767822265, 0.081244384765625, 0.0814551010131836, 0.08079718780517578, 0.08087123107910156, 0.08142332458496093, 0.0809079360961914, 0.08085858917236328, 0.08068390655517578, 0.08192819213867188, 0.08243405151367188, 0.08217705535888672, 0.0828712615966797, 0.08110489654541016, 0.08248870086669922, 0.08357129669189453, 0.0809653091430664, 0.08110115051269531, 0.08138925170898438, 0.08097148895263671, 0.08095750427246094, 0.08167068481445312, 0.08190156555175782, 0.08227779388427735, 0.08179180908203125, 0.08210819244384765, 0.0936655044555664, 0.07980662536621094, 0.07998982238769531, 0.08019039916992188, 0.08042086029052735, 0.08073414611816407, 0.08066259002685547, 0.08005203247070312, 0.08071183776855469, 0.08068099212646485, 0.08024269104003906, 0.0856657943725586, 0.08338835144042969, 0.0814817886352539, 0.0808058853149414, 0.08017919921875, 0.08106591796875, 0.08009939575195313, 0.08021401977539062, 0.08000511932373047, 0.0805125732421875, 0.0805370864868164, 0.0817142105102539, 0.08375868988037109, 0.08344179534912109, 0.08239334106445312, 0.08334553527832031, 0.08073811340332031, 0.08076649475097657, 0.08080230712890625, 0.08095702362060547, 0.08019347381591797, 0.08060972595214844, 0.08091970825195313, 0.08152690887451172, 0.08217215728759765, 0.08233769226074218, 0.08235897827148438, 0.08151248168945313, 0.08113136291503906, 0.08064205169677735, 0.0808526382446289, 0.08045807647705078, 0.08096669006347657, 0.08089289855957031, 0.08090009307861327, 0.08097586822509766, 0.08228431701660156, 0.0820547866821289, 0.0828946533203125, 0.08331539154052735, 0.08122601318359375, 0.08091958618164062, 0.08034384155273437, 0.08099366760253907, 0.0804010238647461, 0.08151449584960938, 0.08121497344970703, 0.08170492553710937, 0.08102543640136718, 0.08291248321533203, 0.08256336212158204, 0.08235395050048828]",tokens/s,12.30319861360233,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 615, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 160, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 282, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1173, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,881.729536,657.391616,0.0,262.144,258.163712,s,1,8.0430068359375,8.0430068359375,0.0,8.0430068359375,8.0430068359375,8.0430068359375,8.0430068359375,[8.0430068359375],,kWh,2.6612145262496746e-05,2.9283357066335143e-06,9.433063102012862e-06,3.897354407114312e-05,,MB,1172.221952,755.95776,0.0,348.127232,317.820928,s,10,0.20838342666625978,0.02083834266662598,0.0002567364207006522,0.020853376388549803,0.02121261806488037,0.021219717121124266,0.021225396366119385,"[0.021226816177368164, 0.020714208602905272, 0.02121104049682617, 0.020811904907226564, 0.02096156883239746, 0.02099456024169922, 0.020591007232666016, 0.020537759780883787, 0.020894847869873046, 0.020439712524414063]",tokens/s,12285.046085262884,kWh,6.035742818009889e-07,6.65637668998693e-08,4.0238668554542483e-07,1.072524734246283e-06,tokens/kWh,238689133.9899066,MB,1183.608832,781.123584,0.0,373.293056,317.823488,s,10,10.020481994628907,1.0020481994628907,0.010652903509280085,1.0036236572265627,1.015684619140625,1.0165244689941406,1.017196348876953,"[1.004661865234375, 0.9886724853515625, 1.0154979858398439, 0.9943750610351563, 1.00258544921875, 1.0109461669921875, 0.9888856201171875, 1.0173643188476562, 1.008752197265625, 0.9887408447265625]",tokens/s,62.871227186245854,kWh,2.9208369141114508e-05,3.221175133107925e-06,1.2214234518856399e-05,4.464377879307884e-05,tokens/kWh,1411170.8664269017,,s,630,10.01510766124725,0.015896996287694053,0.0004690713008814915,0.01582924795150757,0.016316352462768552,0.01651300172805786,0.017206590061187743,"[0.016197792053222658, 0.016232288360595704, 0.016127071380615234, 0.01612601661682129, 0.016044895172119142, 0.01597216033935547, 0.01581308841705322, 0.015744256019592284, 0.01567296028137207, 0.015479647636413574, 0.01549721622467041, 0.015454208374023438, 0.015538080215454102, 0.015534175872802734, 0.015527935981750488, 0.01571401596069336, 0.015755776405334473, 0.015605536460876465, 0.015478591918945312, 0.015444160461425781, 0.015376640319824218, 0.015672896385192873, 0.015435551643371582, 0.01535427188873291, 0.01536240005493164, 0.015480223655700684, 0.015542528152465821, 0.01576959991455078, 0.01568073558807373, 0.015721216201782226, 0.016295967102050782, 0.017291231155395506, 0.01617513656616211, 0.016330751419067382, 0.01627510452270508, 0.016594944000244142, 0.022169952392578126, 0.018266111373901366, 0.01623798370361328, 0.01572719955444336, 0.015545984268188477, 0.01549459171295166, 0.01565321636199951, 0.015553119659423829, 0.015564703941345214, 0.01600726318359375, 0.015694080352783205, 0.01569375991821289, 0.016228160858154296, 0.015629983901977538, 0.01576316833496094, 0.01586678409576416, 0.015830592155456544, 0.01559158420562744, 0.015557727813720702, 0.016085792541503906, 0.015579263687133789, 0.016134016036987303, 0.015759231567382813, 0.01577014446258545, 0.015695743560791016, 0.0157609920501709, 0.01595971202850342, 0.015769439697265623, 0.015710176467895506, 0.015570816040039062, 0.015528191566467285, 0.01551103973388672, 0.015547072410583496, 0.015563808441162109, 0.015512255668640137, 0.01562435245513916, 0.01576857566833496, 0.015600000381469727, 0.015484576225280761, 0.015526880264282226, 0.015510592460632324, 0.015522751808166504, 0.015433728218078613, 0.015482560157775879, 0.015483200073242187, 0.015344799995422364, 0.015393631935119629, 0.015468607902526856, 0.015439807891845703, 0.015330400466918945, 0.015469632148742675, 0.015626079559326173, 0.015638527870178224, 0.015577088356018067, 0.01562758445739746, 0.015673855781555175, 0.015691967964172365, 0.015775103569030762, 0.01676483154296875, 0.016123647689819335, 0.01583612823486328, 0.01562764835357666, 0.01555519962310791, 0.015474399566650391, 0.015567328453063965, 0.015574848175048828, 0.01558732795715332, 0.015505056381225586, 0.015405407905578613, 0.015460224151611328, 0.015353919982910157, 0.015641920089721678, 0.015636672019958497, 0.015450688362121582, 0.015433728218078613, 0.015586943626403808, 0.015716799736022947, 0.015835071563720705, 0.015835295677185057, 0.015684639930725098, 0.015696479797363282, 0.01601353645324707, 0.016946624755859375, 0.015904576301574706, 0.016505151748657226, 0.01609769630432129, 0.016074560165405274, 0.016111839294433595, 0.015984064102172853, 0.015935744285583496, 0.015714240074157716, 0.015853311538696287, 0.015574848175048828, 0.015575488090515137, 0.0155217924118042, 0.015561727523803711, 0.015551360130310058, 0.015628416061401366, 0.0157327356338501, 0.015976384162902833, 0.01609347152709961, 0.016144256591796875, 0.016123807907104493, 0.016115007400512697, 0.016074623107910156, 0.01613417625427246, 0.016089887619018556, 0.016134143829345703, 0.016033504486083986, 0.016050464630126954, 0.016070432662963867, 0.016109216690063478, 0.01610767936706543, 0.016101919174194335, 0.01614569664001465, 0.016182912826538085, 0.01623958396911621, 0.016300031661987305, 0.016519424438476562, 0.016210720062255858, 0.016206815719604493, 0.016205087661743164, 0.016182016372680665, 0.016127840042114257, 0.01615679931640625, 0.01603296089172363, 0.016057151794433594, 0.016107072830200197, 0.01621881675720215, 0.01619049644470215, 0.01608518409729004, 0.016116031646728514, 0.016091360092163085, 0.016457183837890625, 0.01609782409667969, 0.01675811195373535, 0.016206272125244142, 0.016279199600219726, 0.01624140739440918, 0.016445247650146485, 0.01621401596069336, 0.016193248748779296, 0.016328704833984374, 0.01625753593444824, 0.01621388816833496, 0.01656150436401367, 0.016468544006347657, 0.01620377540588379, 0.016130048751831053, 0.016087039947509766, 0.016111328125, 0.016127904891967772, 0.016111583709716798, 0.01550284767150879, 0.015867775917053222, 0.0157128963470459, 0.015638527870178224, 0.015560447692871094, 0.015511103630065918, 0.015439616203308106, 0.015505951881408691, 0.015372703552246094, 0.015350943565368652, 0.0153504638671875, 0.015372447967529297, 0.01553983974456787, 0.01574934387207031, 0.016052383422851563, 0.015711551666259767, 0.015710720062255858, 0.015636063575744628, 0.01569993591308594, 0.015796863555908203, 0.015882240295410157, 0.015943391799926758, 0.015868191719055177, 0.015879199981689452, 0.015754207611083985, 0.0172126407623291, 0.015982815742492676, 0.0162412166595459, 0.015811936378479004, 0.015744895935058595, 0.01585760021209717, 0.015811327934265137, 0.015599712371826172, 0.015437824249267578, 0.015368351936340332, 0.015379584312438965, 0.015366656303405762, 0.015548480033874513, 0.015524255752563476, 0.015432671546936035, 0.015462592124938964, 0.015673664093017577, 0.015900959968566896, 0.01597856044769287, 0.015868895530700682, 0.015535231590270995, 0.015603551864624024, 0.015540224075317383, 0.01562009620666504, 0.015560704231262207, 0.015793312072753907, 0.015712672233581543, 0.015601183891296387, 0.015676320075988768, 0.015931679725646974, 0.016498176574707032, 0.016636255264282227, 0.01637593650817871, 0.01633459281921387, 0.01627462387084961, 0.016259904861450195, 0.016158559799194335, 0.016071136474609376, 0.016126399993896486, 0.016113664627075194, 0.015970303535461427, 0.01599219226837158, 0.015743616104125977, 0.015550463676452637, 0.01551961612701416, 0.015409279823303222, 0.01545792007446289, 0.01566553592681885, 0.0163450870513916, 0.01633839988708496, 0.015810463905334474, 0.015671327590942384, 0.01563055992126465, 0.015986111640930174, 0.015471551895141602, 0.015453215599060058, 0.015413887977600097, 0.01546070384979248, 0.015495295524597169, 0.015781760215759277, 0.015892479896545412, 0.015769887924194335, 0.01573359966278076, 0.015844511985778808, 0.015875359535217286, 0.015847871780395508, 0.015773695945739748, 0.01571664047241211, 0.01557494354248047, 0.015646528244018555, 0.015523200035095214, 0.015546272277832032, 0.015528672218322754, 0.015564800262451172, 0.01557094383239746, 0.01560534381866455, 0.015481247901916503, 0.015482879638671876, 0.015648096084594727, 0.015672096252441405, 0.01572646427154541, 0.01572441577911377, 0.01576972770690918, 0.015779840469360353, 0.015834272384643553, 0.017191776275634764, 0.016029472351074218, 0.016107744216918945, 0.0165295352935791, 0.01626128005981445, 0.01657756805419922, 0.016689855575561522, 0.01661929512023926, 0.016582080841064453, 0.016177951812744142, 0.01617919921875, 0.016156095504760742, 0.017500415802001953, 0.01629420852661133, 0.016332319259643555, 0.016230016708374023, 0.015903039932250975, 0.01613884735107422, 0.016056352615356446, 0.016139392852783204, 0.0161102409362793, 0.015992863655090332, 0.016054464340209962, 0.016067840576171874, 0.016086816787719727, 0.016316640853881837, 0.0161779842376709, 0.016178495407104494, 0.01605081558227539, 0.01608687973022461, 0.01612406349182129, 0.016052223205566405, 0.016203647613525392, 0.016156543731689454, 0.016121280670166015, 0.01624982452392578, 0.016215904235839844, 0.016312320709228514, 0.016210208892822264, 0.016256736755371093, 0.016463872909545898, 0.01680335998535156, 0.016239072799682615, 0.01611782455444336, 0.016043968200683593, 0.016054271697998047, 0.016076608657836913, 0.015976832389831542, 0.016024927139282226, 0.016058847427368163, 0.016052127838134766, 0.016048288345336913, 0.01607468795776367, 0.016046079635620117, 0.01613209533691406, 0.0160501766204834, 0.01616089630126953, 0.016268960952758788, 0.016022783279418945, 0.015864864349365234, 0.015789183616638183, 0.01561849594116211, 0.015644607543945314, 0.015495488166809082, 0.015509056091308595, 0.015398528099060059, 0.015693056106567384, 0.01567695999145508, 0.015669407844543456, 0.01648134422302246, 0.015991456031799318, 0.015999263763427734, 0.016107519149780272, 0.01613209533691406, 0.015945119857788084, 0.01611612892150879, 0.015853759765625, 0.015699968338012696, 0.015751168251037596, 0.015431584358215332, 0.01575945568084717, 0.016082944869995116, 0.015696127891540526, 0.01554201602935791, 0.016002559661865236, 0.015493632316589356, 0.015361632347106934, 0.015494720458984375, 0.015571807861328125, 0.015742783546447755, 0.01564896011352539, 0.015567071914672851, 0.015460127830505372, 0.01553206443786621, 0.015456319808959961, 0.015487968444824219, 0.015547327995300293, 0.01575539207458496, 0.015883423805236815, 0.015913120269775392, 0.01594425582885742, 0.01637785530090332, 0.015923456192016603, 0.01592416000366211, 0.015911168098449707, 0.01587990379333496, 0.01594480037689209, 0.015686495780944824, 0.015915936470031738, 0.015550463676452637, 0.015480992317199706, 0.015387807846069336, 0.01547539234161377, 0.015519840240478516, 0.01570191955566406, 0.015736096382141112, 0.015788384437561035, 0.015598176002502441, 0.015525535583496093, 0.01562764835357666, 0.015433600425720215, 0.015443039894104003, 0.015398880004882813, 0.015331135749816895, 0.015500703811645507, 0.01568230438232422, 0.0156212797164917, 0.015572863578796387, 0.015626239776611327, 0.015584223747253418, 0.01561193561553955, 0.01572364807128906, 0.016351423263549804, 0.015996895790100097, 0.01596649646759033, 0.01607526397705078, 0.0159366397857666, 0.015766143798828125, 0.01567948818206787, 0.01565286445617676, 0.015601471900939941, 0.015511167526245118, 0.015323328018188477, 0.015689855575561525, 0.01575376033782959, 0.015654080390930174, 0.015565279960632325, 0.015520064353942872, 0.015470784187316894, 0.015484416007995605, 0.01557759952545166, 0.016189023971557616, 0.01604240036010742, 0.01591267204284668, 0.015669568061828613, 0.015784000396728514, 0.01577494430541992, 0.01573344039916992, 0.01583513641357422, 0.015930463790893554, 0.016055423736572264, 0.016330528259277343, 0.01635686492919922, 0.016421375274658204, 0.016830656051635744, 0.016834367752075197, 0.01626316833496094, 0.01617286491394043, 0.016108863830566405, 0.016137088775634766, 0.01701888084411621, 0.01604732894897461, 0.016103519439697265, 0.016746816635131837, 0.016250463485717775, 0.019272480010986328, 0.016350784301757813, 0.016225791931152343, 0.01614329528808594, 0.015933600425720215, 0.015970239639282225, 0.015927167892456056, 0.015829024314880372, 0.015800448417663576, 0.015928319931030274, 0.015947839736938477, 0.01612451171875, 0.01616908836364746, 0.016269407272338866, 0.016115999221801756, 0.016153791427612304, 0.016200223922729493, 0.016216064453125, 0.016325855255126955, 0.01696175956726074, 0.016360000610351564, 0.016342975616455077, 0.016228607177734375, 0.016219968795776366, 0.016119808197021485, 0.016151552200317384, 0.016288768768310546, 0.016244607925415037, 0.01619161605834961, 0.01616092872619629, 0.015939680099487305, 0.01618307113647461, 0.01627462387084961, 0.01623628807067871, 0.016142175674438475, 0.01609129524230957, 0.016668672561645507, 0.016300031661987305, 0.016144351959228517, 0.016108608245849608, 0.016573408126831054, 0.01649260711669922, 0.017542272567749023, 0.01627622413635254, 0.016404287338256836, 0.01631158447265625, 0.016356319427490235, 0.016436672210693358, 0.016765504837036132, 0.016373151779174804, 0.016316320419311522, 0.016441152572631835, 0.016270431518554687, 0.016558015823364258, 0.016205663681030272, 0.016707584381103514, 0.016186431884765626, 0.016008224487304688, 0.015841183662414552, 0.015755295753479004, 0.01589859199523926, 0.016106943130493163, 0.015665568351745606, 0.015847519874572755, 0.015515647888183593, 0.015511615753173828, 0.015900671958923338, 0.015736831665039062, 0.015726943969726563, 0.016909984588623046, 0.015659008026123047, 0.01586355209350586, 0.015569151878356934, 0.015558688163757325, 0.015701984405517577, 0.015644864082336427, 0.015527359962463379, 0.015829471588134764, 0.01553603172302246, 0.015590847969055176, 0.01605232048034668, 0.01570864009857178, 0.01570201587677002, 0.015609439849853515, 0.01555292797088623, 0.015617216110229492, 0.015584063529968262, 0.015604928016662597, 0.015508543968200684, 0.015490400314331054, 0.015578911781311036, 0.015522144317626952, 0.015537983894348144, 0.015133983612060547, 0.015426464080810547, 0.01535587215423584, 0.015413056373596192, 0.01544211196899414, 0.015388575553894043, 0.015451583862304687, 0.015343839645385743, 0.015425984382629394, 0.015396863937377929, 0.015405055999755859, 0.015581184387207032, 0.0155316801071167, 0.015560352325439453, 0.01546275234222412, 0.015447999954223633, 0.01552950382232666, 0.015532928466796875, 0.015828991889953615, 0.01552998447418213, 0.01603379249572754, 0.015588640213012695, 0.01561257553100586, 0.015893888473510743, 0.016078527450561524, 0.016076959609985352, 0.015809215545654298, 0.015685791969299315, 0.015544192314147949, 0.015488639831542968, 0.01561190414428711, 0.015532544136047363, 0.01570406436920166, 0.015871583938598634, 0.01566262435913086, 0.015754015922546386, 0.015904447555541993, 0.015888416290283203, 0.01576707172393799, 0.01565987205505371, 0.015638208389282225, 0.01574124813079834, 0.015830240249633788, 0.015864831924438477, 0.015916831970214845, 0.01591872024536133, 0.015804287910461425, 0.015849984169006348, 0.01592319965362549, 0.015917056083679198, 0.016377119064331053, 0.01608163261413574, 0.01599622440338135, 0.015835807800292968, 0.015830495834350585, 0.015768128395080565, 0.015977984428405763, 0.01577830410003662, 0.015624192237854004, 0.015602687835693359, 0.015530176162719726, 0.015534976005554198, 0.015529919624328613]",tokens/s,62.90496530933364,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1029.533696,1613.692928,0.0,1218.445312,1206.173696,s,1,9.286396484375,9.286396484375,0.0,9.286396484375,9.286396484375,9.286396484375,9.286396484375,[9.286396484375],,kWh,6.412064128747564e-05,7.065838385538754e-06,2.272446262401029e-05,9.391094229702468e-05,,MB,1264.78336,1911.488512,0.0,1503.657984,1463.228416,s,10,1.9235433197021485,0.19235433197021484,0.0011884075023120932,0.19304808044433594,0.19332053985595704,0.19348340530395508,0.1936136976623535,"[0.19133229064941407, 0.1917051544189453, 0.19364627075195312, 0.19322828674316406, 0.19320034790039062, 0.19289581298828126, 0.1932348175048828, 0.1898927001953125, 0.191123291015625, 0.1932843475341797]",tokens/s,1330.8772273433406,kWh,5.887339673249622e-06,6.492632882767605e-07,3.903458678319871e-06,1.0440061639846254e-05,tokens/kWh,24520928.02047575,MB,1282.797568,1911.488512,0.0,1503.657984,1463.230976,s,10,19.328538940429688,1.9328538940429687,0.014617236633712626,1.9286599731445313,1.950318566894531,1.9520762145996093,1.953482332763672,"[1.9538338623046876, 1.948485595703125, 1.949927978515625, 1.9435340576171876, 1.9235828857421875, 1.9234945068359375, 1.932079833984375, 1.90653173828125, 1.921828369140625, 1.9252401123046874]",tokens/s,32.59428981888657,kWh,5.5863144591333356e-05,6.161526894123156e-06,3.236473700287956e-05,9.438940848833609e-05,tokens/kWh,667447.767805273,,s,630,19.324281274795513,0.030673462340945288,0.0006182519787459518,0.03063547134399414,0.03119975719451904,0.03146232471466064,0.033014284744262705,"[0.03193935966491699, 0.031149696350097657, 0.031248767852783202, 0.031381376266479494, 0.031077791213989257, 0.030824480056762697, 0.030999040603637694, 0.03083692741394043, 0.031040639877319337, 0.030936031341552733, 0.031209375381469725, 0.03102505683898926, 0.0310784969329834, 0.03102083206176758, 0.03119740867614746, 0.031133184432983397, 0.0311234245300293, 0.03129769515991211, 0.03108697509765625, 0.03099465560913086, 0.031038528442382814, 0.031042272567749024, 0.030817792892456054, 0.030840959548950195, 0.03092313575744629, 0.030785535812377928, 0.03071753692626953, 0.031951263427734376, 0.03122380828857422, 0.03105295944213867, 0.030931808471679687, 0.030823711395263673, 0.031011552810668946, 0.03097577667236328, 0.030938623428344726, 0.030558015823364256, 0.030684032440185548, 0.03052864074707031, 0.03101081657409668, 0.03107904052734375, 0.030738719940185545, 0.030607551574707032, 0.030889791488647463, 0.030642175674438478, 0.030662656784057617, 0.03060326385498047, 0.03059868812561035, 0.030370271682739258, 0.03100054359436035, 0.03077903938293457, 0.03085919952392578, 0.03078764724731445, 0.03053401565551758, 0.030683135986328124, 0.030838783264160157, 0.030596416473388673, 0.03049235153198242, 0.03050547218322754, 0.030487039566040038, 0.030818304061889647, 0.03225395202636719, 0.03427072143554687, 0.03180390357971191, 0.03107766342163086, 0.03127964782714844, 0.03068671989440918, 0.030993375778198242, 0.03072991943359375, 0.031406080245971676, 0.030779424667358397, 0.030735519409179686, 0.030576864242553712, 0.030505664825439455, 0.03088755226135254, 0.03079315185546875, 0.030740608215332033, 0.031095136642456056, 0.030859487533569336, 0.031346752166748044, 0.03075209617614746, 0.030596992492675782, 0.030626783370971678, 0.030613439559936523, 0.03049251174926758, 0.030511135101318358, 0.030498912811279297, 0.031700544357299805, 0.030793792724609376, 0.03130204772949219, 0.031089920043945313, 0.030810432434082033, 0.030942880630493164, 0.03152518463134766, 0.030717824935913084, 0.03071343994140625, 0.030714879989624022, 0.03059507179260254, 0.03078144073486328, 0.03062716865539551, 0.03125545692443848, 0.031219455718994142, 0.030937183380126954, 0.030879648208618164, 0.030899232864379882, 0.03108963203430176, 0.031246335983276367, 0.03103116798400879, 0.030793855667114258, 0.030855167388916017, 0.030772960662841797, 0.03127734375, 0.0312476806640625, 0.031133951187133788, 0.030928991317749024, 0.031011167526245116, 0.03131999969482422, 0.030591039657592772, 0.03068454360961914, 0.030521440505981445, 0.03163190460205078, 0.030510368347167968, 0.030865888595581054, 0.031190912246704103, 0.031409727096557614, 0.030780223846435546, 0.03100057601928711, 0.03170259284973145, 0.030836511611938476, 0.030798175811767577, 0.030865568161010742, 0.030814079284667967, 0.031085983276367187, 0.03545161437988281, 0.030899744033813476, 0.030393856048583984, 0.030634975433349608, 0.030341119766235353, 0.030765087127685546, 0.030860448837280275, 0.03066143989562988, 0.03079167938232422, 0.03061939239501953, 0.030599424362182617, 0.030365695953369142, 0.030588159561157225, 0.03169356727600098, 0.03065353584289551, 0.030489471435546874, 0.030730272293090822, 0.031198688507080078, 0.03110966491699219, 0.030736576080322264, 0.03064575958251953, 0.030663007736206054, 0.0305511360168457, 0.03038912010192871, 0.030332927703857423, 0.03041279983520508, 0.03041814422607422, 0.03163593673706055, 0.030933311462402344, 0.030840831756591795, 0.03213312149047851, 0.030825792312622072, 0.03113772773742676, 0.030685184478759765, 0.030700288772583007, 0.030644256591796874, 0.030558176040649414, 0.03090185546875, 0.0314619197845459, 0.031014432907104494, 0.030826751708984374, 0.03099068832397461, 0.030715648651123046, 0.03088380813598633, 0.030816064834594727, 0.032008480072021485, 0.030816192626953124, 0.03093231964111328, 0.030886240005493164, 0.031324480056762694, 0.03160857582092285, 0.03093120002746582, 0.030855167388916017, 0.030926847457885744, 0.030863359451293947, 0.030705663681030275, 0.030793727874755858, 0.031332319259643554, 0.031185792922973632, 0.030764671325683595, 0.030676767349243163, 0.030568479537963867, 0.030425664901733398, 0.030551679611206056, 0.03044940757751465, 0.03057491111755371, 0.03210886383056641, 0.03291340637207031, 0.03057663917541504, 0.03140518379211426, 0.030505056381225585, 0.03152060890197754, 0.030688095092773437, 0.03045315170288086, 0.03052409553527832, 0.03036345672607422, 0.030603456497192382, 0.031088479995727537, 0.030588960647583006, 0.030861440658569335, 0.030713855743408205, 0.03041004753112793, 0.030870208740234373, 0.030443584442138672, 0.030262975692749022, 0.03010380744934082, 0.030627840042114256, 0.030467584609985353, 0.030845024108886718, 0.030916351318359375, 0.031265663146972655, 0.03058870315551758, 0.030965375900268554, 0.030585056304931642, 0.03066080093383789, 0.03046806335449219, 0.030812192916870117, 0.03047216033935547, 0.030414848327636718, 0.030830591201782227, 0.03146265602111816, 0.033055488586425784, 0.030476287841796876, 0.030723487854003906, 0.03152144050598144, 0.030500799179077148, 0.030589088439941407, 0.03032249641418457, 0.03044937515258789, 0.030777664184570314, 0.030623424530029298, 0.030951744079589845, 0.03070358467102051, 0.030611488342285158, 0.030500864028930662, 0.030414848327636718, 0.030332191467285156, 0.034527713775634766, 0.03066320037841797, 0.030447328567504883, 0.03129855918884277, 0.030702592849731446, 0.030822336196899416, 0.030776735305786132, 0.03064694404602051, 0.030756095886230468, 0.030618368148803712, 0.03074483108520508, 0.030727935791015626, 0.030702720642089842, 0.03173465538024902, 0.030705055236816405, 0.030895999908447266, 0.030634687423706054, 0.030855199813842774, 0.031153535842895506, 0.03101145553588867, 0.030814207077026368, 0.03095347213745117, 0.030904319763183592, 0.030494207382202147, 0.030544256210327147, 0.030516544342041017, 0.030260032653808593, 0.030406656265258788, 0.03037942314147949, 0.030209823608398436, 0.030255327224731444, 0.030333248138427735, 0.0306977596282959, 0.03033497619628906, 0.030494720458984374, 0.03036524772644043, 0.030357152938842773, 0.030101728439331055, 0.030089759826660158, 0.02990492820739746, 0.03043078422546387, 0.030320671081542967, 0.030384544372558595, 0.03012124824523926, 0.030194047927856446, 0.030441791534423827, 0.03126601600646973, 0.03012620735168457, 0.030314464569091797, 0.030141151428222657, 0.030060543060302734, 0.030625343322753906, 0.030757312774658204, 0.030480384826660156, 0.03041436767578125, 0.03031907272338867, 0.032696319580078126, 0.030203903198242187, 0.029980031967163086, 0.029765344619750975, 0.02974742317199707, 0.029896928787231446, 0.030038496017456055, 0.030400320053100584, 0.030422880172729493, 0.03044576072692871, 0.031105024337768555, 0.03103664016723633, 0.030429855346679687, 0.030050207138061523, 0.030120159149169923, 0.0303570556640625, 0.030480255126953126, 0.030326335906982423, 0.03037696075439453, 0.030736032485961913, 0.03075107192993164, 0.031100639343261717, 0.030785951614379883, 0.0306112003326416, 0.030468223571777343, 0.030449663162231445, 0.030232479095458984, 0.03036739158630371, 0.030532032012939452, 0.03058687973022461, 0.03107427215576172, 0.030882976531982423, 0.030563199996948242, 0.030829952239990233, 0.030816415786743163, 0.030998239517211913, 0.030880159378051757, 0.030681440353393555, 0.030646272659301758, 0.030676128387451172, 0.030503776550292967, 0.030681087493896485, 0.030472192764282226, 0.03054204750061035, 0.030822175979614258, 0.03057868766784668, 0.030535680770874023, 0.030627840042114256, 0.03066499137878418, 0.03187235260009766, 0.03351590347290039, 0.030826496124267577, 0.031231231689453125, 0.031242528915405274, 0.030554399490356446, 0.03014214324951172, 0.029775999069213868, 0.029433280944824218, 0.029615039825439452, 0.029750783920288085, 0.02986240005493164, 0.02972159957885742, 0.02958639907836914, 0.029600799560546873, 0.02950003242492676, 0.029663616180419922, 0.030043167114257814, 0.030362592697143555, 0.030826303482055666, 0.030253440856933593, 0.03036518478393555, 0.03053932762145996, 0.03043609619140625, 0.030784448623657226, 0.030197471618652345, 0.029794944763183593, 0.029920927047729494, 0.029939775466918946, 0.030177215576171874, 0.03015577507019043, 0.03039743995666504, 0.030629888534545898, 0.030117727279663085, 0.030763168334960938, 0.030037567138671874, 0.02983286476135254, 0.03019968032836914, 0.030153568267822266, 0.03004627227783203, 0.029945024490356444, 0.03035219192504883, 0.030412063598632813, 0.03049977684020996, 0.030524255752563477, 0.030616704940795898, 0.030946432113647462, 0.030443935394287108, 0.030280191421508788, 0.03020572853088379, 0.030048255920410157, 0.030500255584716796, 0.030558176040649414, 0.03028396797180176, 0.03013033676147461, 0.030271167755126952, 0.030124639511108397, 0.030676992416381835, 0.030352800369262696, 0.030300096511840822, 0.030536352157592775, 0.030543680191040038, 0.03068854331970215, 0.03095427131652832, 0.030979936599731445, 0.03088934326171875, 0.031060895919799804, 0.030901344299316406, 0.03096259117126465, 0.03079987144470215, 0.030883520126342774, 0.030957887649536133, 0.030775039672851563, 0.03091996765136719, 0.03082748794555664, 0.030964960098266603, 0.03117136001586914, 0.030974143981933593, 0.03108768081665039, 0.032895679473876956, 0.03597654342651367, 0.03103379249572754, 0.03098624038696289, 0.03071824073791504, 0.030818304061889647, 0.030806272506713868, 0.03085081672668457, 0.03130844879150391, 0.030642208099365235, 0.030466047286987305, 0.030930944442749023, 0.030590400695800782, 0.03060793685913086, 0.030473472595214844, 0.030313056945800783, 0.029954208374023437, 0.029782175064086914, 0.029740896224975586, 0.029689855575561523, 0.029909023284912108, 0.02999497604370117, 0.029714431762695313, 0.029794111251831054, 0.030002880096435546, 0.030128416061401368, 0.03032713508605957, 0.030076799392700197, 0.03013222312927246, 0.030103551864624024, 0.030089216232299806, 0.029828895568847658, 0.0299399356842041, 0.030065887451171874, 0.030010143280029298, 0.030585983276367187, 0.030186368942260743, 0.030663007736206054, 0.03018502426147461, 0.030046304702758788, 0.029968351364135743, 0.02994588851928711, 0.030472095489501954, 0.030543487548828126, 0.03049295997619629, 0.030257343292236328, 0.03031449508666992, 0.030281728744506835, 0.030097408294677733, 0.03009231948852539, 0.030200864791870115, 0.029879648208618163, 0.030091840744018553, 0.030234207153320314, 0.030261695861816405, 0.030213727951049804, 0.029981088638305665, 0.030068960189819336, 0.03038800048828125, 0.030717248916625976, 0.030468799591064452, 0.030419231414794922, 0.030553823471069337, 0.030470144271850585, 0.030453760147094725, 0.030537952423095704, 0.03038800048828125, 0.030226272583007814, 0.03125811195373535, 0.030377952575683594, 0.0303351993560791, 0.03139225578308105, 0.030742624282836913, 0.030685087203979493, 0.030742528915405274, 0.030652671813964843, 0.030658208847045898, 0.03081020736694336, 0.030519296646118164, 0.030504959106445313, 0.03047395133972168, 0.030502239227294923, 0.03048748779296875, 0.031145984649658204, 0.03058687973022461, 0.03061667251586914, 0.03079020881652832, 0.03052275276184082, 0.030554719924926758, 0.03053545570373535, 0.030403167724609374, 0.030318592071533205, 0.030392416000366212, 0.030620607376098632, 0.03066364860534668, 0.03172147178649903, 0.032428192138671874, 0.030704896926879884, 0.030882495880126953, 0.03051532745361328, 0.030471967697143554, 0.03038003158569336, 0.030367935180664062, 0.0306658878326416, 0.030306976318359376, 0.03003392028808594, 0.029822975158691405, 0.029728191375732422, 0.029669631958007814, 0.029907264709472657, 0.02980659294128418, 0.030216192245483397, 0.030208000183105467, 0.03017103958129883, 0.03005459213256836, 0.030293888092041015, 0.030795808792114257, 0.03196272087097168, 0.031117727279663086, 0.030910463333129884, 0.03073843193054199, 0.030533632278442382, 0.030208000183105467, 0.030002239227294922, 0.02990995216369629, 0.03014860725402832, 0.029949119567871094, 0.02993849563598633, 0.02993382453918457, 0.03029737663269043, 0.030556415557861327, 0.03039254379272461, 0.03008438491821289, 0.030268096923828126, 0.030947359085083007, 0.030005088806152345, 0.029749568939208985, 0.029777055740356446, 0.02991584014892578, 0.029818336486816407, 0.03317334365844726, 0.030820512771606447, 0.030173728942871094, 0.030269439697265626, 0.030316287994384766, 0.03057651138305664, 0.030595455169677734, 0.030626943588256836, 0.03063596725463867, 0.030720064163208008, 0.030573440551757813, 0.030288160324096678, 0.029955808639526366, 0.03073036766052246, 0.030771072387695313, 0.030856447219848634, 0.030895135879516603, 0.03068441581726074, 0.030853343963623048, 0.03089638328552246, 0.03083798408508301, 0.03072492790222168, 0.03081827163696289, 0.031329408645629886, 0.030720895767211914, 0.030898239135742186, 0.030657920837402344, 0.030904895782470704, 0.03077529525756836, 0.030906368255615234, 0.030883136749267577, 0.03075334358215332, 0.031346815109252926, 0.030863231658935546, 0.03088140869140625, 0.030924896240234374, 0.030691072463989257, 0.03130844879150391, 0.031180799484252928, 0.030699520111083983, 0.030435327529907227, 0.030918655395507814, 0.030045440673828125, 0.029753311157226563, 0.02957187271118164, 0.029708288192749024, 0.029659135818481445, 0.030083072662353515, 0.030528608322143554, 0.030221504211425783, 0.029923168182373047, 0.029823936462402344, 0.029893760681152345, 0.02994361686706543, 0.03020582389831543, 0.030463743209838866, 0.030859104156494142]",tokens/s,32.601471228930144,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1832.112128,2899.247104,0.0,2503.999488,2349.010944,s,1,10.43385546875,10.43385546875,0.0,10.43385546875,10.43385546875,10.43385546875,10.43385546875,[10.43385546875],,kWh,8.925259466669407e-05,9.8379611483825e-06,3.31683598679966e-05,0.00013225891568307316,,MB,1861.627904,3310.288896,0.0,2902.458368,2642.29888,s,10,2.0485160675048824,0.20485160675048827,0.0014639113538269938,0.2049086685180664,0.20690233612060546,0.2069673957824707,0.2070194435119629,"[0.20393798828125, 0.20473356628417969, 0.20267205810546876, 0.20703245544433593, 0.20374549865722658, 0.20508377075195314, 0.20688787841796874, 0.20291651916503906, 0.205378173828125, 0.20612815856933595]",tokens/s,1249.6850967432786,kWh,6.244455116046322e-06,6.886439501866311e-07,4.1591522634890845e-06,1.1092251329722038e-05,tokens/kWh,23079174.13609624,MB,1879.646208,3310.288896,0.0,2902.458368,2642.30144,s,10,26.158725097656248,2.615872509765625,0.02669456699365264,2.600468994140625,2.6611926513671875,2.6641700561523436,2.666551979980469,"[2.59824267578125, 2.632306640625, 2.6671474609375, 2.660531005859375, 2.597810546875, 2.6013447265625, 2.598972412109375, 2.59959326171875, 2.61577734375, 2.5869990234375]",tokens/s,24.08374252369227,kWh,7.543729570770369e-05,8.320425516245945e-06,4.1119217292509205e-05,0.00012487693851645883,tokens/kWh,504496.67287204176,,s,630,26.156115360260003,0.04151764342898414,0.0009361234186335529,0.04123801612854004,0.04229381065368652,0.04348384552001952,0.045189528236389165,"[0.04186492919921875, 0.04122652816772461, 0.04103760147094727, 0.04131657409667969, 0.04110784149169922, 0.041498241424560545, 0.041033313751220706, 0.04120412826538086, 0.04109910583496094, 0.0409623031616211, 0.040944671630859374, 0.041351966857910157, 0.04093552017211914, 0.04110927963256836, 0.04083919906616211, 0.04199852752685547, 0.04134265518188476, 0.04110982513427734, 0.04149571228027344, 0.04102844619750977, 0.04103372955322265, 0.04091865539550781, 0.0406798095703125, 0.041180286407470706, 0.041122688293457034, 0.041474048614501956, 0.04111705780029297, 0.04172876739501953, 0.04131110382080078, 0.04123955154418945, 0.041213024139404295, 0.041243553161621094, 0.040982112884521485, 0.04104438400268555, 0.04104502487182617, 0.04149327850341797, 0.04101987075805664, 0.04109897613525391, 0.04203424072265625, 0.04169814300537109, 0.041987552642822265, 0.041237342834472654, 0.04123583984375, 0.0411426887512207, 0.041148414611816404, 0.04139606475830078, 0.04099430465698242, 0.04137027359008789, 0.04108697509765625, 0.04107030487060547, 0.04103952026367187, 0.04097846221923828, 0.04143929672241211, 0.04146435165405273, 0.04114022445678711, 0.04130985641479492, 0.041175071716308596, 0.04144678497314453, 0.04125913619995117, 0.041834976196289064, 0.041140575408935544, 0.04089984130859375, 0.041014015197753904, 0.041705406188964844, 0.04106006240844726, 0.04095475387573242, 0.04138940811157227, 0.04096387100219727, 0.04108969497680664, 0.04085318374633789, 0.04096441650390625, 0.04099440002441406, 0.04099507141113281, 0.04089023971557617, 0.041302303314208984, 0.041866912841796874, 0.043305023193359375, 0.04122652816772461, 0.041867263793945314, 0.04118492889404297, 0.04124502563476563, 0.04098867034912109, 0.04105420684814453, 0.04170751953125, 0.041266590118408206, 0.04099046325683594, 0.0410939826965332, 0.040855552673339846, 0.04120371246337891, 0.04120489501953125, 0.04112665557861328, 0.041168991088867186, 0.04114009475708008, 0.041119232177734374, 0.04122841644287109, 0.04109363174438477, 0.04149248123168945, 0.0411357421875, 0.04152336120605469, 0.041414878845214845, 0.04168646240234375, 0.04149919891357422, 0.04133679962158203, 0.04169116973876953, 0.04150076675415039, 0.041253887176513675, 0.04128969573974609, 0.04130297470092773, 0.0421580810546875, 0.04152012634277344, 0.04154995346069336, 0.04115446472167969, 0.04155491256713867, 0.04135251235961914, 0.04124697494506836, 0.04136131286621094, 0.043536895751953124, 0.049504287719726564, 0.04478524780273437, 0.04494742584228516, 0.044902847290039065, 0.044802047729492187, 0.044951553344726565, 0.041488254547119144, 0.041600704193115234, 0.041403839111328125, 0.046214622497558595, 0.04475344085693359, 0.044951553344726565, 0.044686622619628906, 0.044634624481201174, 0.04434524917602539, 0.044681537628173826, 0.04374844741821289, 0.04186531066894531, 0.042082656860351564, 0.04227324676513672, 0.04208025741577148, 0.042394687652587894, 0.04214470291137695, 0.04253900909423828, 0.042420223236083986, 0.042298881530761716, 0.042402305603027345, 0.04173209762573242, 0.04200243377685547, 0.04137321472167969, 0.041523681640625, 0.04141056060791016, 0.0413322868347168, 0.041912769317626955, 0.04142691040039063, 0.041406112670898436, 0.04125734329223633, 0.041430561065673825, 0.04178992080688477, 0.041506046295166014, 0.0418056640625, 0.04169206237792969, 0.04170751953125, 0.041430526733398435, 0.04147251129150391, 0.041656158447265626, 0.04128908920288086, 0.041396350860595704, 0.041212158203125, 0.04144579315185547, 0.04229324722290039, 0.04127849578857422, 0.04174947357177734, 0.041875457763671874, 0.041885696411132815, 0.0420711669921875, 0.04162035369873047, 0.04215193557739258, 0.04154150390625, 0.040986942291259765, 0.04100076675415039, 0.04088422393798828, 0.040978431701660156, 0.04090838241577149, 0.04109148788452149, 0.04213862228393555, 0.04388764953613281, 0.044763103485107425, 0.04426137542724609, 0.04429385757446289, 0.04473680114746094, 0.044730369567871096, 0.04576448059082031, 0.04506547164916992, 0.04521567916870117, 0.04585744094848633, 0.04274310302734375, 0.04158355331420899, 0.04161663818359375, 0.04143795013427734, 0.04160921478271484, 0.04300614547729492, 0.04137532806396484, 0.0413199348449707, 0.041759456634521484, 0.04210483169555664, 0.04229939270019531, 0.04254719924926758, 0.044943359375, 0.04259775924682617, 0.04247615814208985, 0.04211711883544922, 0.04208380889892578, 0.04246716690063477, 0.042393566131591794, 0.04161814498901367, 0.0420882568359375, 0.04157846450805664, 0.04251670455932617, 0.042229759216308595, 0.042646656036376955, 0.046682430267333985, 0.04197548675537109, 0.04311872100830078, 0.04288486480712891, 0.04238643264770508, 0.04196966552734375, 0.04213145446777344, 0.04168716812133789, 0.041770881652832034, 0.04177695846557617, 0.04187919998168945, 0.04182479858398438, 0.04192873764038086, 0.04154774475097656, 0.041041439056396484, 0.04102396774291992, 0.041344863891601566, 0.041524383544921876, 0.04122281646728516, 0.041535518646240235, 0.04156447982788086, 0.0413260498046875, 0.04147776031494141, 0.042119937896728514, 0.04152131271362305, 0.04164217758178711, 0.04187936019897461, 0.041177055358886716, 0.04161539077758789, 0.041110591888427736, 0.041374622344970705, 0.04145475387573242, 0.04177091217041016, 0.04190307235717773, 0.04189091110229492, 0.04141120147705078, 0.041224193572998044, 0.041369888305664064, 0.0413034553527832, 0.041129600524902346, 0.041226688385009765, 0.0410588493347168, 0.04148857498168945, 0.04206572723388672, 0.04151910400390625, 0.04134905624389648, 0.041433151245117185, 0.0415579833984375, 0.04109286499023437, 0.04107497787475586, 0.04089606475830078, 0.041142719268798825, 0.0409804801940918, 0.04101020812988281, 0.04106444931030274, 0.04096713638305664, 0.040860992431640625, 0.04112044906616211, 0.041127296447753904, 0.041172897338867184, 0.04146623992919922, 0.04071664047241211, 0.04089641571044922, 0.04114761734008789, 0.04152204895019531, 0.04115990447998047, 0.04103247833251953, 0.041568321228027345, 0.041067550659179684, 0.04097468948364258, 0.04106092834472656, 0.04106444931030274, 0.04094976043701172, 0.04064230346679688, 0.040869152069091794, 0.04110230255126953, 0.040767105102539065, 0.04117747116088867, 0.04099686431884766, 0.04095296096801758, 0.040834049224853515, 0.04341900634765625, 0.041256832122802733, 0.041175647735595705, 0.04122000122070312, 0.04095139312744141, 0.04106089782714844, 0.04118732833862305, 0.04148223876953125, 0.04193689727783203, 0.041484001159667966, 0.041236766815185545, 0.04165836715698242, 0.04125395202636719, 0.0411385612487793, 0.041273952484130856, 0.041310081481933596, 0.041942337036132815, 0.04124947357177734, 0.041279167175292966, 0.04127097702026367, 0.041291553497314455, 0.0424332160949707, 0.0410805778503418, 0.04165603256225586, 0.04096684646606445, 0.041250656127929684, 0.04115785598754883, 0.04114527893066406, 0.0411822395324707, 0.041333919525146486, 0.04161075210571289, 0.04142931365966797, 0.04101939010620117, 0.041283550262451174, 0.04089859390258789, 0.041134078979492186, 0.04091222381591797, 0.04095657730102539, 0.04533606338500976, 0.041195102691650394, 0.04091791915893555, 0.041176353454589844, 0.04120383834838867, 0.04122889709472656, 0.04127878570556641, 0.041210784912109374, 0.04192233657836914, 0.04129792022705078, 0.04095795059204101, 0.04109113693237305, 0.04099020767211914, 0.04117513656616211, 0.041369632720947264, 0.041174335479736326, 0.04108803176879883, 0.041861087799072265, 0.04140380859375, 0.041087776184082034, 0.04155152130126953, 0.040951969146728516, 0.04074812698364258, 0.040979007720947265, 0.0408436164855957, 0.04127961730957031, 0.040777599334716794, 0.04091904067993164, 0.0412421760559082, 0.04166086578369141, 0.04082601547241211, 0.04098748779296875, 0.04112319946289063, 0.04143942260742187, 0.04106428909301758, 0.04129622268676758, 0.04102783966064453, 0.041095169067382815, 0.041491840362548826, 0.0409865608215332, 0.04133958435058594, 0.04172851181030274, 0.04114659118652344, 0.04092006301879883, 0.04126512145996094, 0.04126188659667969, 0.04111715316772461, 0.0429901123046875, 0.041339969635009764, 0.041102272033691406, 0.041339935302734374, 0.04111663818359375, 0.041651294708251956, 0.04107561492919922, 0.04118732833862305, 0.041289726257324216, 0.04111273574829102, 0.041632606506347654, 0.04166598510742187, 0.04194566345214844, 0.04102143859863281, 0.04175468826293945, 0.041840576171875, 0.041618751525878905, 0.04106515121459961, 0.040940799713134766, 0.041055103302001954, 0.04094553756713867, 0.04111769485473633, 0.04095180892944336, 0.04101849746704102, 0.04078067016601562, 0.04105833435058594, 0.04107465744018555, 0.0409989128112793, 0.0409700813293457, 0.04135692977905273, 0.04106908798217773, 0.040938785552978516, 0.040723041534423826, 0.04084339141845703, 0.04093952178955078, 0.04125289535522461, 0.04106972885131836, 0.04096195220947266, 0.04256182479858398, 0.04120844650268555, 0.04115769577026367, 0.04203615951538086, 0.04082483291625977, 0.04097228622436523, 0.040861217498779294, 0.04109142303466797, 0.0410830078125, 0.04129587173461914, 0.042958782196044924, 0.04114633560180664, 0.0409150390625, 0.04096409606933594, 0.04097571182250977, 0.041263393402099606, 0.04096243286132813, 0.04108697509765625, 0.0410928955078125, 0.04179148864746094, 0.0441712646484375, 0.04512550354003906, 0.041660320281982424, 0.0407977294921875, 0.04086608123779297, 0.04079782485961914, 0.041075328826904296, 0.04106406402587891, 0.04103631973266601, 0.04088934326171875, 0.04115865707397461, 0.040847423553466794, 0.041011199951171876, 0.04094454574584961, 0.04112380981445313, 0.04128947067260742, 0.041301502227783206, 0.04093948745727539, 0.04091747283935547, 0.04090288162231445, 0.041369758605957034, 0.04127884674072266, 0.04242086410522461, 0.041104896545410156, 0.04233587265014648, 0.041232769012451174, 0.041320255279541016, 0.04106515121459961, 0.04089158248901367, 0.04105503845214844, 0.041199520111083986, 0.04141884613037109, 0.041017345428466793, 0.040844833374023434, 0.04116105651855469, 0.0409288330078125, 0.040872512817382814, 0.04083302307128906, 0.04102950286865235, 0.04126019287109375, 0.040970558166503905, 0.04075945663452148, 0.04126464080810547, 0.04089548873901367, 0.04111328125, 0.04134051132202148, 0.04119625473022461, 0.0409169921875, 0.040879390716552735, 0.0416426887512207, 0.04129385757446289, 0.04140236663818359, 0.0410398063659668, 0.04091910552978516, 0.04109827041625977, 0.04098553466796875, 0.041084190368652344, 0.04104793548583984, 0.04139302444458008, 0.041215999603271485, 0.04132352066040039, 0.04120383834838867, 0.04252467346191406, 0.04122943878173828, 0.04180217742919922, 0.041443264007568356, 0.04148688125610352, 0.04136460876464844, 0.04185993576049805, 0.04140031814575195, 0.04169676971435547, 0.04127596664428711, 0.04132447814941406, 0.041191425323486325, 0.041398273468017575, 0.041280960083007814, 0.04153401565551758, 0.041455520629882815, 0.04149033737182617, 0.042753982543945315, 0.04289152145385742, 0.041240577697753904, 0.04202905654907227, 0.041442913055419923, 0.04129219055175781, 0.04107001495361328, 0.0415032958984375, 0.04118732833862305, 0.04215155029296875, 0.04116313552856445, 0.041635841369628904, 0.04145142364501953, 0.04132812881469727, 0.04152105712890625, 0.041366207122802735, 0.04138393783569336, 0.04106230545043945, 0.04131375885009766, 0.04118387222290039, 0.04123648071289063, 0.04260790252685547, 0.041158878326416015, 0.04130031967163086, 0.04123868942260742, 0.041192958831787106, 0.041361919403076174, 0.041137535095214846, 0.041378337860107424, 0.04118947219848633, 0.04129974365234375, 0.04113225555419922, 0.04157196807861328, 0.041650558471679686, 0.04118707275390625, 0.04461081695556641, 0.04124160003662109, 0.04099814224243164, 0.04104268646240234, 0.04108515167236328, 0.04110313415527344, 0.040936511993408205, 0.04110784149169922, 0.04118316650390625, 0.043580127716064454, 0.04125788879394531, 0.04158035278320313, 0.04111529541015625, 0.0409106559753418, 0.041021984100341795, 0.04106428909301758, 0.041070751190185543, 0.041051200866699215, 0.04112879943847656, 0.04107049560546875, 0.04124179077148438, 0.04163481521606445, 0.04106444931030274, 0.04117913436889648, 0.04093244934082031, 0.04105104064941406, 0.04113564682006836, 0.04088265609741211, 0.04082198333740234, 0.04106729507446289, 0.041139518737792966, 0.04099862289428711, 0.041231327056884766, 0.04072447967529297, 0.040974273681640624, 0.041054271697998045, 0.041510814666748046, 0.04095347213745117, 0.041030303955078125, 0.040918464660644534, 0.04081919860839844, 0.04092502212524414, 0.04121807861328125, 0.04074905776977539, 0.04116889572143555, 0.04118022537231445, 0.04102169418334961, 0.04095657730102539, 0.040796192169189456, 0.040869056701660154, 0.04114924621582031, 0.0414453125, 0.04088966369628906, 0.04101599884033203, 0.04098668670654297, 0.040804351806640625, 0.04108489608764648, 0.04080028915405273, 0.04081459045410156, 0.04117647933959961, 0.04230335998535156, 0.0407861442565918, 0.04096803283691406, 0.04073257446289062, 0.04086819076538086, 0.04083248138427734, 0.04114118576049805, 0.041371646881103515, 0.040827999114990236, 0.04099728012084961, 0.04092979049682617, 0.04165740966796875, 0.04099760055541992, 0.04089263916015625]",tokens/s,24.086145489218296,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3792, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1531, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1697, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 615, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 160, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 282, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1051.111424,5046.730752,0.0,4651.483136,4638.22848,s,1,14.167484375,14.167484375,0.0,14.167484375,14.167484375,14.167484375,14.167484375,[14.167484375],,kWh,0.00020650797157087102,2.2772040955236278e-05,7.706478387400906e-05,0.0003063447964001164,,MB,1251.110912,6172.901376,0.0,5765.070848,5418.661888,s,10,10.221631225585938,1.0221631225585939,0.007044564666882446,1.0222241821289062,1.0298222412109377,1.0307274047851562,1.0314515356445313,"[1.0081405029296875, 1.0177132568359375, 1.017791259765625, 1.0163858642578125, 1.019808837890625, 1.0292550048828124, 1.0246395263671875, 1.02962109375, 1.026643310546875, 1.031632568359375]",tokens/s,250.44926230482866,kWh,2.973801881458409e-05,3.2795504039942414e-06,1.9803376953803075e-05,5.282094617238141e-05,tokens/kWh,4846562.179415393,MB,1274.10176,6172.901376,0.0,5765.070848,5418.664448,s,10,52.74841650390624,5.274841650390624,0.017688636116568155,5.276070068359376,5.29464609375,5.295215625,5.29567125,"[5.24542919921875, 5.252671875, 5.26422802734375, 5.2634521484375, 5.2657001953125, 5.28643994140625, 5.2933076171875, 5.2868828125, 5.29578515625, 5.29451953125]",tokens/s,11.943486492212442,kWh,0.0001548616572941675,1.708246161493997e-05,0.00010273677663379388,0.00027468089554290136,tokens/kWh,229357.05038925895,,s,630,52.74474177551269,0.08372181234208365,0.0016123843966272529,0.08350317001342773,0.08451623992919921,0.08499251632690429,0.09415560745239258,"[0.09373286437988282, 0.08188425445556641, 0.08277494049072266, 0.08255078125, 0.08146883392333984, 0.08222502136230468, 0.08254438018798828, 0.08226850891113281, 0.08222566223144531, 0.08239523315429688, 0.08211869049072265, 0.08367922973632813, 0.0836648941040039, 0.08332864379882812, 0.083159423828125, 0.08293990325927734, 0.08295219421386718, 0.08243199920654297, 0.08239513397216797, 0.08242572784423828, 0.0826103057861328, 0.08262973022460937, 0.08279535675048828, 0.08468281555175781, 0.08369971466064453, 0.08407360076904297, 0.08286707305908203, 0.08282931518554687, 0.08255030059814453, 0.08345600128173829, 0.08336022186279297, 0.08388960266113281, 0.08249606323242188, 0.08229865264892579, 0.0836171875, 0.08336380767822266, 0.08358521270751954, 0.08457417297363282, 0.0827992935180664, 0.08306278228759766, 0.0833062744140625, 0.08291964721679687, 0.08293888092041016, 0.08280323028564453, 0.08288508605957032, 0.083378173828125, 0.08305628967285156, 0.08392124938964844, 0.08400272369384766, 0.08334524536132812, 0.08345420837402344, 0.0834326400756836, 0.08384288024902344, 0.08277693176269531, 0.08317938995361328, 0.08288591766357421, 0.08290406036376953, 0.08331257629394531, 0.08328195190429688, 0.08376937866210937, 0.0842260513305664, 0.08376028442382813, 0.08324585723876952, 0.09474403381347657, 0.08245712280273437, 0.0822838363647461, 0.08258834838867188, 0.08222675323486328, 0.08281951904296875, 0.08237184143066406, 0.08274406433105469, 0.08244838714599609, 0.08250326538085938, 0.08283123016357422, 0.0852750701904297, 0.08408803558349609, 0.08409897613525391, 0.0830248031616211, 0.08250588989257812, 0.08279235076904297, 0.08234700775146485, 0.0827585906982422, 0.08258268737792969, 0.08262448120117187, 0.08277024078369141, 0.08378841400146485, 0.0840244140625, 0.08388819122314453, 0.08336653137207031, 0.08340838623046876, 0.08259248352050781, 0.08261734771728516, 0.08312934112548828, 0.08275113677978516, 0.08275593566894532, 0.0827146224975586, 0.0833628158569336, 0.0834420166015625, 0.0834464340209961, 0.08394927978515625, 0.08349314880371093, 0.08389990234375, 0.08259839630126953, 0.08308121490478515, 0.08254838562011718, 0.08253215789794922, 0.08416515350341797, 0.08279039764404297, 0.08401878356933594, 0.0833089599609375, 0.08357039642333984, 0.08373481750488282, 0.08353791809082031, 0.08342118072509766, 0.08315081787109375, 0.0827449951171875, 0.08274278259277344, 0.08333958435058594, 0.08254534149169922, 0.08296636962890624, 0.08361094665527344, 0.08418946838378906, 0.08419987487792968, 0.08418656158447266, 0.08418319702148437, 0.08361615753173827, 0.09531391906738282, 0.08266342163085938, 0.08249932861328126, 0.08223881530761719, 0.08286239624023438, 0.08204723358154296, 0.08270883178710937, 0.0821860122680664, 0.08229199981689453, 0.082430908203125, 0.08282521820068359, 0.08570230102539063, 0.08478755187988281, 0.08360108947753907, 0.08312223815917968, 0.08263814544677735, 0.0827742691040039, 0.08268051147460938, 0.0826060791015625, 0.08261631774902344, 0.08297779083251954, 0.08302285003662109, 0.08396364593505859, 0.08368339538574218, 0.08384121704101563, 0.08333516693115234, 0.08339250946044922, 0.0830750732421875, 0.08344985961914063, 0.08257855987548827, 0.08258854675292969, 0.0828006362915039, 0.08269209289550782, 0.08280604553222656, 0.08355289459228515, 0.08343116760253906, 0.08400931549072266, 0.0841195526123047, 0.0833597412109375, 0.08349612426757813, 0.08328479766845703, 0.08298601531982422, 0.08394786834716797, 0.08350131225585937, 0.08236444854736329, 0.08387824249267578, 0.08419020843505859, 0.08362432098388672, 0.08404188537597657, 0.08434121704101563, 0.08422156524658203, 0.08327926635742187, 0.08301372528076172, 0.08393119812011719, 0.08326172637939454, 0.08341078186035156, 0.0833148193359375, 0.08365318298339844, 0.08426290893554687, 0.08564326477050781, 0.08353177642822265, 0.08437904357910156, 0.0850370864868164, 0.093502685546875, 0.08262319946289062, 0.08272697448730469, 0.08185855865478515, 0.08269596862792969, 0.08230524444580078, 0.08299314880371093, 0.08247062683105469, 0.0826370849609375, 0.08252620697021484, 0.08317282867431641, 0.08695657348632813, 0.08456396484375, 0.0847093734741211, 0.0830525131225586, 0.08315702056884766, 0.08279596710205078, 0.08251853179931641, 0.08263276672363282, 0.08300112152099609, 0.08184435272216797, 0.08306288146972657, 0.0831448974609375, 0.08414803314208984, 0.08451622772216796, 0.08310198211669922, 0.08334166717529297, 0.08253440093994141, 0.08271459197998046, 0.08262659454345703, 0.08343551635742187, 0.08301158142089844, 0.08328524780273437, 0.08290787506103516, 0.08414825439453125, 0.08377497863769531, 0.08368998718261719, 0.08449219512939453, 0.0836278076171875, 0.08283168029785157, 0.08412764739990235, 0.08323193359375, 0.0832437744140625, 0.08290525054931641, 0.08354608154296875, 0.08372140502929687, 0.08433545684814453, 0.08415805053710937, 0.08418345642089843, 0.0841928939819336, 0.08401548767089843, 0.08317132568359376, 0.08376866912841797, 0.08383554840087891, 0.08307917022705077, 0.08286822509765625, 0.08324294281005859, 0.08347241973876954, 0.0833656997680664, 0.08403782653808593, 0.08403097534179688, 0.08350323486328125, 0.08395174407958984, 0.09424813079833984, 0.08263107299804688, 0.08235049438476562, 0.08203469085693359, 0.08314662170410156, 0.08288182067871094, 0.08285065460205078, 0.0826429443359375, 0.08223232269287109, 0.0826497573852539, 0.08272489929199218, 0.08669580841064453, 0.08457430267333985, 0.08375299072265625, 0.0823967056274414, 0.08287484741210938, 0.0827886734008789, 0.08164940643310546, 0.0830047378540039, 0.0823447036743164, 0.0831219482421875, 0.08300790405273438, 0.08436736297607422, 0.08464383697509766, 0.08426496124267578, 0.08397209930419922, 0.08342733001708984, 0.08272064208984375, 0.08317324829101562, 0.08349065399169922, 0.08281298828125, 0.08227996826171875, 0.08366073608398437, 0.08352243041992187, 0.08410050964355469, 0.08394608306884765, 0.08352687835693359, 0.08399542236328125, 0.08322361755371094, 0.08305350494384765, 0.08349081420898438, 0.08285903930664062, 0.08314115142822266, 0.0830665283203125, 0.08406095886230469, 0.08399052429199219, 0.08406416320800782, 0.08346633911132813, 0.08402329254150391, 0.08387789154052734, 0.08379801940917969, 0.08333055877685547, 0.08345241546630859, 0.08350310516357422, 0.083019775390625, 0.08354611206054688, 0.08403148651123046, 0.0842072296142578, 0.08413426971435548, 0.08406832122802735, 0.08382208251953124, 0.08403408050537109, 0.08356249237060547, 0.09464048004150391, 0.08275360107421875, 0.08243170928955078, 0.08230844879150391, 0.08279750061035156, 0.0830382080078125, 0.08288870239257813, 0.08281702423095703, 0.08288003540039063, 0.08283948516845703, 0.08289311981201172, 0.08619235229492188, 0.08539110565185547, 0.08424678039550781, 0.08404505920410156, 0.0832577896118164, 0.08309161376953125, 0.08295645141601563, 0.08341011047363281, 0.08302674865722656, 0.08274508666992188, 0.08358118438720703, 0.08449024200439453, 0.08418099212646485, 0.08396940612792969, 0.08395225524902344, 0.08351744079589844, 0.08359635162353515, 0.08263775634765624, 0.08344576263427735, 0.08328806304931641, 0.08296857452392578, 0.08333106994628907, 0.08411545562744141, 0.0843563232421875, 0.08447875213623048, 0.08434073638916016, 0.08426207733154296, 0.0841346206665039, 0.08322672271728515, 0.0828310089111328, 0.08379542541503907, 0.08343436431884765, 0.08353913879394531, 0.08412038421630859, 0.0849731216430664, 0.0843411865234375, 0.08442880249023438, 0.0841890869140625, 0.08400870513916016, 0.08481158447265626, 0.0839234848022461, 0.08333277130126954, 0.08311228942871093, 0.08377318572998046, 0.084500732421875, 0.08421689605712891, 0.08454351806640625, 0.08374291229248047, 0.084457763671875, 0.08406416320800782, 0.0841016616821289, 0.08527590179443359, 0.09516851043701172, 0.08265727996826172, 0.08262044525146485, 0.08278550720214843, 0.08243276977539063, 0.08337129974365234, 0.08266825866699219, 0.08340444946289062, 0.08307542419433593, 0.08304025268554688, 0.08301363372802735, 0.08621414184570313, 0.08516044616699218, 0.08294121551513672, 0.08338505554199219, 0.08289673614501954, 0.08354217529296876, 0.08328396606445312, 0.0830013427734375, 0.08354351806640625, 0.08281683349609376, 0.0836956787109375, 0.08421443176269532, 0.08514790344238281, 0.08464358520507813, 0.08396390533447265, 0.08329945373535157, 0.08472598266601562, 0.08347456359863281, 0.08437814331054687, 0.08338547515869141, 0.08357772827148438, 0.08367922973632813, 0.08362592315673828, 0.08453126525878907, 0.08423014068603515, 0.08380745697021484, 0.0846012191772461, 0.08380210876464844, 0.08429535675048828, 0.0836487045288086, 0.08342550659179687, 0.08331295776367187, 0.08421945953369141, 0.08322000122070312, 0.08412973022460937, 0.08420223999023438, 0.08401299285888672, 0.08435945892333985, 0.08454342651367187, 0.08477005004882812, 0.08379270172119141, 0.08424652862548829, 0.08394137573242187, 0.08443289947509766, 0.08412156677246094, 0.08451634979248047, 0.08407001495361328, 0.08514211273193359, 0.08414553833007812, 0.08437763214111328, 0.0839197769165039, 0.08428953552246093, 0.09392908477783203, 0.08289129638671874, 0.08263382720947265, 0.08307798767089844, 0.08284166717529297, 0.08329567718505859, 0.08275411224365234, 0.08307472229003907, 0.08289929962158203, 0.08225791931152343, 0.08451686096191406, 0.0861338882446289, 0.0846192626953125, 0.08428224182128906, 0.08369939422607423, 0.08298902130126953, 0.08269859313964843, 0.08315904235839844, 0.08288665771484376, 0.08291942596435548, 0.08283084869384766, 0.08316316986083984, 0.08372886657714844, 0.08420146942138672, 0.08523538970947266, 0.08371561431884765, 0.08368787384033204, 0.08320832061767579, 0.08395388793945313, 0.08356012725830078, 0.0835771484375, 0.08306278228759766, 0.08309283447265625, 0.08400086212158203, 0.08376582336425781, 0.08492237091064453, 0.08413129425048828, 0.08426140594482422, 0.08372153472900391, 0.08392774200439453, 0.08409907531738281, 0.08411750030517579, 0.08356249237060547, 0.08320745849609375, 0.08424505615234375, 0.08415001678466796, 0.08441680145263672, 0.08429376220703125, 0.084168701171875, 0.08416595458984374, 0.08390262603759766, 0.08414784240722656, 0.08365872192382813, 0.08387577819824218, 0.08397926330566406, 0.08385740661621094, 0.08415846252441406, 0.08427836608886718, 0.08441744232177735, 0.0841295394897461, 0.08408882904052735, 0.084216064453125, 0.0840847396850586, 0.09591603088378907, 0.08282316589355469, 0.08293170928955078, 0.08317747497558593, 0.08296857452392578, 0.0830135040283203, 0.08340902709960937, 0.08342323303222657, 0.08300482940673828, 0.08303472137451172, 0.08328601837158203, 0.08601805114746094, 0.08506813049316406, 0.08354166412353516, 0.08297792053222657, 0.08307392120361329, 0.08327372741699218, 0.08322866821289063, 0.08354611206054688, 0.0837201919555664, 0.08268185424804687, 0.08351129913330078, 0.08463884735107421, 0.08422284698486328, 0.08500838470458984, 0.08376525115966797, 0.08428749084472656, 0.08339584350585938, 0.08365033721923829, 0.08385635375976562, 0.0840327377319336, 0.08320899200439454, 0.08331791687011719, 0.08413654327392578, 0.08425286102294922, 0.08424419403076172, 0.08418748474121093, 0.0837918701171875, 0.0840002212524414, 0.08358326721191406, 0.08315110778808593, 0.08414733123779297, 0.08385330963134766, 0.08356047821044922, 0.08436208343505859, 0.084242431640625, 0.08447404479980469, 0.08486441802978516, 0.0841480941772461, 0.0841938247680664, 0.08421311950683594, 0.08370035552978515, 0.08382669067382813, 0.08425167846679688, 0.0842208023071289, 0.08402748870849609, 0.08433869171142579, 0.0840847396850586, 0.08571084594726562, 0.08439926147460937, 0.08427811431884766, 0.0842977294921875, 0.08384307098388671, 0.096570556640625, 0.08266819000244141, 0.08274524688720702, 0.08321663665771484, 0.08266130828857422, 0.0829573745727539, 0.08254566192626953, 0.08302735900878906, 0.0825898208618164, 0.08306121826171875, 0.08286402893066407, 0.08687567901611327, 0.08484921264648437, 0.08342095947265625, 0.08356204986572266, 0.08309622192382812, 0.08263597106933594, 0.08283993530273437, 0.08373462677001953, 0.08279821014404297, 0.08317001342773438, 0.08344739532470703, 0.08518492889404297, 0.08490937805175781, 0.08434758758544922, 0.08422182464599609, 0.08392867279052735, 0.08348694610595703, 0.08338361358642578, 0.08401769256591797, 0.08336227416992187, 0.0832034912109375, 0.08292601776123047, 0.08379804992675781, 0.0849420166015625, 0.0846447982788086, 0.08409420776367188, 0.08451148986816406, 0.08368128204345703, 0.08322649383544922, 0.08468252563476562, 0.08388028717041016, 0.08450048065185548, 0.08355760192871094, 0.0834543685913086, 0.08367926025390625, 0.08439228820800782, 0.08402861022949219, 0.08584889221191407, 0.08421785736083984, 0.08421171569824219, 0.08411254119873048, 0.08425965118408203, 0.08483229064941407, 0.08315833282470703, 0.08396819305419922, 0.08337664031982422, 0.08394054412841796, 0.08453987121582031, 0.08447216033935546, 0.08451891326904297, 0.0843345947265625, 0.08496742248535157]",tokens/s,11.94431859542223,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1300.811776,1093.599232,0.0,698.351616,690.178048,s,1,9.0914990234375,9.0914990234375,0.0,9.0914990234375,9.0914990234375,9.0914990234375,9.0914990234375,[9.0914990234375],,kWh,4.987506294163874e-05,5.4943037951315234e-06,1.7437791727969998e-05,7.280715846474025e-05,,MB,1435.410432,1408.172032,0.0,1000.341504,957.775872,s,10,0.6234354209899903,0.062343542098999026,0.0003477882052090371,0.062364255905151364,0.06268343505859375,0.06280832557678223,0.06290823799133301,"[0.06265568161010743, 0.06167334365844727, 0.06215894317626953, 0.06193344116210937, 0.06265401458740234, 0.0624372787475586, 0.0629332160949707, 0.06226099014282226, 0.062372673034667966, 0.06235583877563477]",tokens/s,4106.279357587388,kWh,1.909804073257004e-06,2.104694802594633e-07,1.2647159718300294e-06,3.3849895253464964e-06,tokens/kWh,75628003.59738046,MB,1462.214656,1420.754944,0.0,1012.924416,957.778432,s,10,27.223863525390627,2.7223863525390626,0.009427315305820406,2.7192373046875,2.733354736328125,2.7382030029296875,2.7420816162109376,"[2.719857666015625, 2.74305126953125, 2.718616943359375, 2.712888671875, 2.7300703125, 2.73227734375, 2.7155078125, 2.71289599609375, 2.714979248046875, 2.72371826171875]",tokens/s,23.141461880031237,kWh,7.949106879882261e-05,8.767922032024084e-06,3.395020108897235e-05,0.00012220919191981903,tokens/kWh,515509.50472967734,,s,630,27.221254787445105,0.04320834093245249,0.000528114719663762,0.04307913780212402,0.043606554794311525,0.04398572731018066,0.045682334403991706,"[0.04331292724609375, 0.04353705596923828, 0.04330876922607422, 0.04297286224365234, 0.04332556915283203, 0.04328905487060547, 0.043525375366210935, 0.04314966583251953, 0.04310467147827148, 0.04306262588500977, 0.04283824157714844, 0.04278726577758789, 0.04292748641967774, 0.043000446319580075, 0.042723232269287106, 0.04281967926025391, 0.04296192169189453, 0.04319302368164062, 0.04285984039306641, 0.04284108734130859, 0.04309401702880859, 0.043036670684814454, 0.04317113494873047, 0.04297798538208008, 0.043749183654785154, 0.04318207931518555, 0.04293593597412109, 0.0432624626159668, 0.04321900939941406, 0.044125953674316404, 0.043983104705810544, 0.043259361267089846, 0.04342428970336914, 0.043177982330322266, 0.04291142272949219, 0.04317740631103516, 0.04305964660644531, 0.04289785766601562, 0.04282598495483399, 0.042845279693603515, 0.0428243522644043, 0.043062335968017576, 0.04314822387695313, 0.043169792175292966, 0.0433889274597168, 0.04375542449951172, 0.04366723251342773, 0.04313119888305664, 0.04322035217285156, 0.04311715316772461, 0.04311151885986328, 0.04311088180541992, 0.0431129264831543, 0.043222881317138674, 0.04331660842895508, 0.04331804656982422, 0.043046913146972655, 0.04304399871826172, 0.04364988708496094, 0.04316556930541992, 0.04310371017456055, 0.04298614501953125, 0.04307129669189453, 0.04358121490478516, 0.04322601699829102, 0.043093311309814454, 0.042998462677001956, 0.04298342514038086, 0.04320665740966797, 0.04326009750366211, 0.04308176040649414, 0.04334979248046875, 0.04304230499267578, 0.042797569274902345, 0.04351385498046875, 0.04329372787475586, 0.04331414413452148, 0.04313497543334961, 0.04342281723022461, 0.04294902420043945, 0.042895870208740236, 0.04317184066772461, 0.04345446395874023, 0.04315913772583008, 0.04325827026367188, 0.04321484756469727, 0.04322089767456055, 0.042948703765869144, 0.043038719177246096, 0.043597824096679685, 0.043243518829345705, 0.04331235122680664, 0.043408096313476564, 0.04340073776245117, 0.04339766311645508, 0.04352000045776367, 0.04429414367675781, 0.04410508728027344, 0.04393638229370117, 0.04432486343383789, 0.04337667083740234, 0.04334982299804688, 0.04304003143310547, 0.04301001739501953, 0.04333353424072266, 0.04302707290649414, 0.0455601921081543, 0.043480064392089846, 0.043240447998046876, 0.043235328674316405, 0.04336844635009766, 0.044423168182373046, 0.04343952178955078, 0.04313763046264649, 0.043218944549560545, 0.043142814636230466, 0.04346505737304687, 0.04343369674682617, 0.04901286315917969, 0.04343619155883789, 0.045797279357910156, 0.044500225067138674, 0.043916126251220704, 0.04352000045776367, 0.04396819305419922, 0.0432061767578125, 0.04354339218139648, 0.04353023910522461, 0.04313497543334961, 0.043140705108642576, 0.04313731384277344, 0.04325593566894531, 0.042985183715820316, 0.04349900817871094, 0.04306537628173828, 0.043340286254882815, 0.04307942581176758, 0.043143871307373044, 0.042968894958496096, 0.04340326309204102, 0.043053054809570314, 0.04294819259643555, 0.043006305694580076, 0.04301942443847656, 0.04306403350830078, 0.04291392135620117, 0.04290943908691406, 0.04313958358764648, 0.04306515121459961, 0.04350886535644531, 0.04284288024902344, 0.04305897521972656, 0.042928417205810546, 0.042942527770996095, 0.04389068984985352, 0.04332313537597656, 0.04322739028930664, 0.0429486083984375, 0.04315135955810547, 0.04291788864135742, 0.042995712280273435, 0.04340550231933594, 0.043111743927001955, 0.043364864349365234, 0.04288716888427734, 0.0429027214050293, 0.04285862350463867, 0.043005790710449215, 0.043299678802490235, 0.04647222518920899, 0.04318038558959961, 0.042870944976806644, 0.04300009536743164, 0.0429917106628418, 0.04297475051879883, 0.043149505615234375, 0.042772254943847655, 0.04297500610351562, 0.04275008010864258, 0.042947265625, 0.04288694381713867, 0.04284147262573242, 0.04283014297485352, 0.04289590454101563, 0.04319641494750977, 0.04319641494750977, 0.04328857421875, 0.043020320892333985, 0.04319638442993164, 0.04338278579711914, 0.043046913146972655, 0.042949790954589846, 0.04304326248168945, 0.042970783233642576, 0.04273638534545898, 0.042643329620361325, 0.04326768112182617, 0.04267385482788086, 0.044544864654541015, 0.043257217407226566, 0.04276287841796875, 0.044203231811523434, 0.04311324691772461, 0.042979328155517575, 0.04294377517700195, 0.0428100814819336, 0.04300799942016602, 0.0430489616394043, 0.044076671600341795, 0.04280767822265625, 0.04281958389282227, 0.042864543914794925, 0.043140480041503906, 0.042909950256347654, 0.04300233459472656, 0.04299980926513672, 0.04300764846801758, 0.04282198333740234, 0.042799102783203126, 0.04255081558227539, 0.04291823959350586, 0.04311257553100586, 0.0432988166809082, 0.04283343887329102, 0.04346928024291992, 0.0426776008605957, 0.04306358337402344, 0.042946945190429686, 0.043009342193603514, 0.04275651168823242, 0.04286707305908203, 0.04303766250610352, 0.04277958297729492, 0.04321279907226563, 0.04302188873291016, 0.04343427276611328, 0.04293257522583008, 0.042854209899902344, 0.04310537719726563, 0.04291676712036133, 0.042698879241943356, 0.04298124694824219, 0.04295270538330078, 0.042995712280273435, 0.0430489616394043, 0.042935871124267576, 0.04336051177978516, 0.043853759765625, 0.043398944854736325, 0.04289788818359375, 0.043063297271728515, 0.04301615905761719, 0.04342950439453125, 0.043686176300048826, 0.04339279937744141, 0.043112319946289064, 0.04310844802856445, 0.043450721740722655, 0.04312188720703125, 0.04293711853027344, 0.04311856079101563, 0.0431016960144043, 0.04301375961303711, 0.0430294075012207, 0.04290764617919922, 0.0430571517944336, 0.04295065689086914, 0.043364158630371095, 0.04347859191894531, 0.04414323043823242, 0.04336819076538086, 0.04299599838256836, 0.042958465576171875, 0.04307612609863281, 0.04299142456054687, 0.043087390899658205, 0.0431416015625, 0.04309196853637695, 0.0440709114074707, 0.0432182731628418, 0.04351990509033203, 0.043610527038574216, 0.04306774520874023, 0.043140670776367185, 0.04336240005493164, 0.04652431869506836, 0.04355920028686523, 0.043233440399169924, 0.04339494323730469, 0.043469024658203126, 0.043267871856689455, 0.043094142913818356, 0.04362444686889649, 0.04385177612304687, 0.04328243255615234, 0.04350566482543945, 0.043261951446533206, 0.043200511932373044, 0.04305920028686523, 0.04333315277099609, 0.042923904418945315, 0.04351587295532226, 0.04314380645751953, 0.04310630416870117, 0.043104255676269534, 0.04324723052978516, 0.04347942352294922, 0.04353843307495117, 0.04329203033447265, 0.04327619171142578, 0.04382547378540039, 0.043413921356201174, 0.04303664016723633, 0.043078849792480466, 0.043053184509277344, 0.0435316162109375, 0.04356121444702148, 0.04343235015869141, 0.043409374237060545, 0.044272960662841795, 0.043686622619628905, 0.04333363342285156, 0.04520959854125976, 0.04346060943603516, 0.043225086212158204, 0.04300566482543945, 0.043098209381103515, 0.04302608108520508, 0.0434672966003418, 0.04306243133544922, 0.04299043273925781, 0.04484719848632813, 0.04319551849365234, 0.043635009765625, 0.043467231750488285, 0.043278175354003905, 0.04308803176879883, 0.04317753601074219, 0.043199935913085935, 0.04315785598754883, 0.043251583099365234, 0.04360611343383789, 0.04343840026855469, 0.043176319122314455, 0.04327219009399414, 0.04302345657348633, 0.04430665588378906, 0.04368864059448242, 0.04332940673828125, 0.042903678894042965, 0.04308377456665039, 0.04295430374145508, 0.04397715377807617, 0.04289263916015625, 0.042789215087890624, 0.042936641693115236, 0.04312268829345703, 0.04296438217163086, 0.043072097778320315, 0.043071487426757815, 0.043757312774658205, 0.042938625335693356, 0.04282483291625976, 0.042875232696533205, 0.0430695686340332, 0.042961055755615235, 0.043456768035888674, 0.04512768173217773, 0.04321279907226563, 0.04338390350341797, 0.04312771224975586, 0.04374118423461914, 0.043159358978271486, 0.04323142242431641, 0.043374591827392575, 0.04364492797851562, 0.043415550231933595, 0.0430301742553711, 0.043464702606201173, 0.043587039947509766, 0.04299216079711914, 0.04273779296875, 0.04291929626464844, 0.04280985641479492, 0.04284524917602539, 0.04270585632324219, 0.04292393493652344, 0.04301193618774414, 0.04262937545776367, 0.04298652648925781, 0.042646495819091794, 0.04301811218261719, 0.042821758270263674, 0.04291788864135742, 0.04281686401367187, 0.04616463851928711, 0.04294460678100586, 0.04307958221435547, 0.04362444686889649, 0.042967041015625, 0.043103649139404294, 0.04319049453735352, 0.04335577774047852, 0.04310009765625, 0.0429444808959961, 0.043404193878173826, 0.04290460968017578, 0.04308198547363281, 0.04296732711791992, 0.04308211135864258, 0.04290150451660156, 0.042893310546875, 0.043151168823242186, 0.043270336151123044, 0.04348928070068359, 0.0428928337097168, 0.04288876724243164, 0.0429202880859375, 0.04272800064086914, 0.042819263458251954, 0.04301446533203125, 0.04298950576782227, 0.042700672149658205, 0.0426740493774414, 0.04279299163818359, 0.044973728179931644, 0.043171489715576175, 0.04297830581665039, 0.04313699340820312, 0.04308915328979492, 0.04289779281616211, 0.04301968002319336, 0.042937313079833984, 0.04354048156738281, 0.04287612915039062, 0.04281238555908203, 0.043736385345458983, 0.04322140884399414, 0.04288726425170898, 0.043063297271728515, 0.043020286560058595, 0.04394128036499023, 0.04323705673217773, 0.04304777526855469, 0.042958911895751954, 0.04312460708618164, 0.04356927871704101, 0.04311228942871094, 0.042931934356689454, 0.04291628646850586, 0.04331315231323242, 0.04278793716430664, 0.04287376022338867, 0.042913791656494144, 0.04338687896728516, 0.04290764617919922, 0.0428642578125, 0.04534214401245117, 0.04285740661621094, 0.04300595092773438, 0.04288476943969727, 0.04316732788085938, 0.04296371078491211, 0.04296192169189453, 0.04295372772216797, 0.04302403259277344, 0.04320086288452148, 0.04312255859375, 0.04303007888793945, 0.043033153533935546, 0.04300163269042969, 0.04294697570800781, 0.04270371246337891, 0.04403094482421875, 0.04295212936401367, 0.04383318328857422, 0.04317871856689453, 0.04277248001098633, 0.04288003158569336, 0.042855392456054686, 0.042962944030761716, 0.043053054809570314, 0.04324726486206055, 0.042924385070800784, 0.042821632385253904, 0.04273971176147461, 0.042643455505371096, 0.042828895568847655, 0.04291267013549805, 0.04302438354492188, 0.04287897491455078, 0.0431629753112793, 0.04278054428100586, 0.04274665451049805, 0.04290457534790039, 0.042787841796875, 0.04315964889526367, 0.042831775665283206, 0.04279203033447266, 0.04288358306884766, 0.04282614517211914, 0.04301728057861328, 0.0430621452331543, 0.04305417633056641, 0.043278335571289066, 0.042999168395996094, 0.04303116989135742, 0.04303244781494141, 0.043014015197753906, 0.043016384124755856, 0.04287289428710937, 0.04285955047607422, 0.042912734985351565, 0.042796222686767575, 0.04293305587768555, 0.042978401184082034, 0.042793632507324215, 0.04269068908691406, 0.042762081146240236, 0.043010337829589844, 0.0429417610168457, 0.04293292617797852, 0.04284620666503906, 0.0429219856262207, 0.04295065689086914, 0.04294220733642578, 0.04281779098510742, 0.04283801651000976, 0.04319609451293945, 0.04308614349365234, 0.043499519348144534, 0.04290150451660156, 0.0431800651550293, 0.0429951057434082, 0.04386377716064453, 0.04373385620117187, 0.04396236801147461, 0.0432657585144043, 0.043296993255615236, 0.043320926666259765, 0.043065536499023435, 0.043355873107910156, 0.04292870330810547, 0.042689697265625, 0.04293427276611328, 0.04292256164550781, 0.042942752838134764, 0.043090110778808595, 0.04313065719604492, 0.044378143310546875, 0.04294831848144531, 0.04303244781494141, 0.04281590270996094, 0.043345569610595706, 0.04263955307006836, 0.043159713745117186, 0.04272742462158203, 0.042660896301269534, 0.04355753707885742, 0.04448281478881836, 0.04276025772094726, 0.04283564758300781, 0.04304313659667969, 0.04314217758178711, 0.043987873077392575, 0.04300316619873047, 0.0426644172668457, 0.0430362548828125, 0.04306179046630859, 0.04347804641723633, 0.04297987365722656, 0.04308329772949219, 0.04576950454711914, 0.04435968017578125, 0.04356915283203125, 0.042897407531738284, 0.043068992614746095, 0.04286918258666992, 0.04297727966308594, 0.042942272186279294, 0.04310371017456055, 0.042963550567626956, 0.04291551971435547, 0.042856094360351565, 0.042928993225097654, 0.04279289627075195, 0.04353551864624024, 0.0431808967590332, 0.04286646270751953, 0.042899070739746095, 0.04286323165893555, 0.04283184051513672, 0.0429136962890625, 0.04300352096557617, 0.04308816146850586, 0.04351961517333985, 0.04313350296020508, 0.04307558441162109, 0.0432988166809082, 0.04390092849731445, 0.04332748794555664, 0.043328510284423825, 0.04436067199707031, 0.04342284774780274, 0.043289505004882815, 0.04304076766967774, 0.042990718841552734, 0.04297203063964844, 0.04298137664794922, 0.04309196853637695, 0.04287286376953125, 0.04294652938842773, 0.042897151947021483, 0.04314751815795898, 0.0433070068359375, 0.04314278411865234, 0.0429428482055664, 0.04294192123413086, 0.04319855880737305, 0.04299753570556641, 0.042877601623535155, 0.04314089584350586, 0.043001697540283206, 0.04573222351074219, 0.043284481048583984, 0.043804672241210936, 0.04327654266357422, 0.04304886245727539, 0.043326366424560545, 0.04299462509155273]",tokens/s,23.14367963267319,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1091.4816,4520.3456,0.0,4125.097984,4116.435456,s,1,13.485150390625,13.485150390625,0.0,13.485150390625,13.485150390625,13.485150390625,13.485150390625,[13.485150390625],,kWh,0.000186849221679167,2.060324561585591e-05,6.737783168000305e-05,0.00027483029897502594,,MB,1256.615936,5392.760832,0.0,4984.930304,4693.31456,s,10,9.26958074951172,0.926958074951172,0.008434019192174591,0.9293541870117188,0.9364100158691406,0.9370015472412109,0.9374747723388671,"[0.9067501220703125, 0.9198154907226562, 0.925846923828125, 0.9234560546875, 0.9296069946289063, 0.9291013793945313, 0.9299220581054688, 0.9312100830078125, 0.9375930786132812, 0.936278564453125]",tokens/s,276.17214512477807,kWh,2.684464992575723e-05,2.9604662414399143e-06,1.7821150620545642e-05,4.762626678774279e-05,tokens/kWh,5375185.108270648,MB,1278.758912,5392.760832,0.0,4984.930304,4693.31712,s,10,46.71592431640626,4.671592431640624,0.012588643260299189,4.676452880859374,4.681121142578125,4.686071801757812,4.690032329101562,"[4.64633837890625, 4.65569677734375, 4.66561376953125, 4.6656298828125, 4.6793896484375, 4.67459765625, 4.67830810546875, 4.679306640625, 4.68002099609375, 4.6910224609375]",tokens/s,13.485765490435758,kWh,0.0001374168171438261,1.5158070464900814e-05,9.117996435805415e-05,0.00024375485196678095,tokens/kWh,258456.39375656686,,s,630,46.71254718780524,0.07414690029810345,0.0016710346581399445,0.07393638610839844,0.0749411003112793,0.0753974910736084,0.08525176269531251,"[0.08837117004394532, 0.07261116790771484, 0.07196534729003906, 0.073152099609375, 0.07257129669189454, 0.07215952301025391, 0.07194137573242188, 0.07395990753173828, 0.0728656005859375, 0.07329606628417969, 0.07274086761474609, 0.07288524627685547, 0.07379679870605468, 0.07413536071777344, 0.07380515289306641, 0.07404611206054687, 0.07358214569091796, 0.07343762969970703, 0.07395680236816406, 0.07297901153564453, 0.0736789779663086, 0.07239984130859375, 0.07370195007324219, 0.07276099395751953, 0.07346835327148438, 0.07243801879882812, 0.07345532989501953, 0.07415837097167968, 0.07313839721679688, 0.07420649719238281, 0.07389250946044922, 0.07356121826171876, 0.07269245147705078, 0.07333888244628907, 0.07302476501464844, 0.07450399780273438, 0.07421024322509766, 0.07345356750488281, 0.0732040023803711, 0.07345881652832031, 0.0739559326171875, 0.07403084564208984, 0.07372415924072266, 0.07443865966796875, 0.0738971176147461, 0.0732491226196289, 0.07307689666748046, 0.07399795532226562, 0.07387820434570312, 0.07325027465820312, 0.07363820648193359, 0.07403107452392578, 0.07421788787841797, 0.07366233825683594, 0.07408204650878907, 0.07435263824462891, 0.07362175750732422, 0.07410892486572265, 0.07376924896240235, 0.07408406066894531, 0.07394303894042968, 0.07430553436279297, 0.07371075439453124, 0.08566802978515625, 0.07299282836914063, 0.07308089447021485, 0.07306240081787109, 0.07300653076171874, 0.07226790618896485, 0.07324700927734375, 0.07263043212890626, 0.07360921478271484, 0.07316051483154297, 0.07298067474365234, 0.07307263946533203, 0.07331849670410157, 0.07536153411865235, 0.07459923553466796, 0.07400835418701172, 0.0742150421142578, 0.07391686248779297, 0.072957763671875, 0.07392265319824219, 0.07295600128173828, 0.07337165069580077, 0.07377536010742188, 0.0732128677368164, 0.0736363525390625, 0.073168701171875, 0.0750918731689453, 0.07428361511230469, 0.07489561462402344, 0.07401036834716797, 0.07370035552978516, 0.07401113891601563, 0.07343465423583985, 0.07294649505615235, 0.0733656005859375, 0.07346959686279297, 0.07401897430419922, 0.07386956787109375, 0.07396761322021485, 0.07433363342285157, 0.07388819122314454, 0.07435810852050781, 0.07404009246826172, 0.0738776626586914, 0.07355155181884766, 0.07418681335449219, 0.0738716812133789, 0.07363561248779296, 0.07317715454101563, 0.07326937866210938, 0.07401583862304688, 0.07411510467529298, 0.07398675537109375, 0.07359487915039062, 0.07397782135009766, 0.07354950714111329, 0.07395378875732422, 0.07424803161621094, 0.07358464050292969, 0.07408233642578126, 0.07417443084716797, 0.07375785827636719, 0.07384559631347656, 0.086623779296875, 0.07315071868896485, 0.07343692779541015, 0.07323107147216797, 0.0726398696899414, 0.07361734771728516, 0.07289619445800781, 0.07287696075439454, 0.07300080108642579, 0.07309622192382813, 0.07296918487548829, 0.07292723083496094, 0.07632217407226563, 0.07695833587646485, 0.07327696228027344, 0.0742825927734375, 0.07336844635009766, 0.07326105499267578, 0.07333663940429687, 0.07298681640625, 0.07370137786865234, 0.07277772521972656, 0.07372799682617187, 0.07321571350097657, 0.07395970916748047, 0.07369522857666015, 0.0743773422241211, 0.07491571044921876, 0.07425638580322266, 0.07387923431396484, 0.07390032196044923, 0.07409394836425781, 0.07371190643310546, 0.07383692932128906, 0.07303139495849609, 0.07337299346923828, 0.07436787414550781, 0.07373532867431641, 0.07420409393310547, 0.07410380554199218, 0.07472844696044922, 0.07445913696289062, 0.07431283569335938, 0.07396620941162109, 0.07407772827148437, 0.07359766387939454, 0.07396147155761719, 0.0739653778076172, 0.07374253082275391, 0.07321913909912109, 0.07359410858154297, 0.07377069091796876, 0.07460431671142578, 0.07348678588867187, 0.074115966796875, 0.07534889221191406, 0.07423993682861328, 0.07386294555664062, 0.07405187225341797, 0.07400886535644531, 0.07374320220947266, 0.07446002960205078, 0.0748620834350586, 0.08477935791015626, 0.07343724822998046, 0.07312140655517578, 0.07282902526855468, 0.07332688140869141, 0.07328768157958984, 0.0727224349975586, 0.07335913848876953, 0.07351728057861329, 0.07271001434326171, 0.07309324645996093, 0.07335084533691406, 0.07516159820556641, 0.07574710083007813, 0.07442515563964844, 0.07432787322998047, 0.07323385620117187, 0.07295638275146485, 0.07372799682617187, 0.0739163818359375, 0.07379766082763672, 0.07242546844482421, 0.07409664154052735, 0.07427385711669922, 0.07322937774658203, 0.074010498046875, 0.07490259552001953, 0.0745831069946289, 0.07448153686523437, 0.0743013458251953, 0.07363184356689453, 0.07432192230224609, 0.07392205047607422, 0.07383910369873047, 0.07346377563476562, 0.0728719711303711, 0.07293952178955078, 0.07469261169433594, 0.07393062591552735, 0.0742085723876953, 0.07454297637939453, 0.07443961334228516, 0.074231201171875, 0.07380159759521485, 0.07406992340087891, 0.07393567657470704, 0.07407823944091797, 0.07367472076416015, 0.07451599884033203, 0.07351958465576172, 0.0741297607421875, 0.07424988555908203, 0.0737259521484375, 0.07397686767578125, 0.07512499237060546, 0.07403507232666015, 0.07408924865722656, 0.0745536651611328, 0.07383628845214844, 0.07397990417480468, 0.07394713592529296, 0.07395516967773437, 0.07393651580810547, 0.08691849517822266, 0.07333350372314452, 0.07352925109863281, 0.07409664154052735, 0.07479705810546874, 0.07314022064208985, 0.07348326110839844, 0.07254927825927734, 0.07322428894042969, 0.07275724792480469, 0.072947998046875, 0.07294070434570313, 0.07577353668212891, 0.07594729614257813, 0.0757019500732422, 0.07364425659179688, 0.07404022216796875, 0.07368793487548828, 0.07334912109375, 0.07420722961425781, 0.07346819305419922, 0.07360892486572265, 0.07362969970703125, 0.07305420684814454, 0.07335935974121094, 0.07522633361816407, 0.07583753967285156, 0.07494521331787109, 0.07404924774169921, 0.07388188934326172, 0.07334928131103516, 0.07387324523925781, 0.07386953735351562, 0.07298371124267578, 0.07348697662353515, 0.07396729278564453, 0.07323795318603515, 0.07435558319091796, 0.07405516815185546, 0.07593958282470703, 0.07468902587890625, 0.07379974365234375, 0.07481362915039062, 0.07384255981445312, 0.07393292999267578, 0.07432355499267577, 0.07354000091552734, 0.07423798370361329, 0.07393305969238281, 0.07371923065185547, 0.0747451171875, 0.07530598449707031, 0.07470489501953125, 0.07478281402587891, 0.07474575805664062, 0.07528243255615234, 0.07422313690185547, 0.07481391906738281, 0.07395532989501953, 0.07311145782470703, 0.07454102325439453, 0.07355619049072265, 0.07421737670898437, 0.08532991790771484, 0.0731504669189453, 0.07333273315429688, 0.07407820892333984, 0.07358393859863281, 0.07340306854248047, 0.07366822052001953, 0.07324082946777344, 0.07292889404296875, 0.0732873306274414, 0.07300614166259765, 0.07278521728515625, 0.07521539306640625, 0.07610070037841797, 0.07424617767333984, 0.07386809539794922, 0.0738685760498047, 0.07415289306640625, 0.07371715545654296, 0.07319795227050781, 0.07416636657714844, 0.07391426849365235, 0.07347020721435547, 0.07316675567626953, 0.07343708801269531, 0.0746495361328125, 0.07468236541748047, 0.07447142028808594, 0.07416361236572265, 0.07388457489013672, 0.0743842544555664, 0.0740544662475586, 0.07370499420166016, 0.07446498870849609, 0.07371033477783204, 0.0742760009765625, 0.07437398529052734, 0.07378943634033203, 0.0742481918334961, 0.07377030181884765, 0.07418876647949219, 0.073781982421875, 0.07392400360107422, 0.074553955078125, 0.07419673919677734, 0.07405964660644532, 0.07375091552734375, 0.074351806640625, 0.07377519989013671, 0.07386595153808594, 0.07425433349609376, 0.07379334259033203, 0.07466531372070312, 0.07426338958740235, 0.07411113739013672, 0.07520649719238282, 0.07433216094970703, 0.07394694519042969, 0.07455558776855468, 0.07448576354980468, 0.07477862548828125, 0.07386831665039062, 0.07460963439941407, 0.08506041717529297, 0.07347404479980468, 0.07317696380615234, 0.07317670440673828, 0.07319193267822266, 0.07329753875732421, 0.07355142211914062, 0.07333766174316406, 0.07298880004882813, 0.0728139877319336, 0.07293385314941406, 0.07427670288085937, 0.07591542053222657, 0.07650508880615234, 0.07424736022949219, 0.07413228607177734, 0.07406095886230468, 0.07371421051025391, 0.07310678100585938, 0.0733255386352539, 0.07410704040527344, 0.074131103515625, 0.07349472045898438, 0.0739871063232422, 0.07387619018554688, 0.07534617614746093, 0.07510809326171874, 0.07448601531982423, 0.07436083221435547, 0.07371311950683594, 0.07380636596679688, 0.0736786880493164, 0.07434249877929687, 0.0739362564086914, 0.07415468597412109, 0.07371158599853515, 0.07483014678955079, 0.07431753540039063, 0.07470489501953125, 0.0740492172241211, 0.07628627014160157, 0.07411507415771484, 0.07411244964599609, 0.07376703643798828, 0.07410527801513672, 0.07370304107666016, 0.07437741088867188, 0.07428524780273438, 0.0737791976928711, 0.07378534698486328, 0.07402700805664063, 0.07433417510986329, 0.07490383911132813, 0.07460371398925782, 0.07478534698486328, 0.07416422271728515, 0.07362355041503907, 0.07403314971923829, 0.07377510070800782, 0.07478495788574219, 0.0743128662109375, 0.07394371032714844, 0.07391232299804687, 0.08630681610107421, 0.07307859039306641, 0.07370771026611328, 0.07356963348388672, 0.07290486145019531, 0.07306905364990235, 0.07310921478271484, 0.07397628784179687, 0.07295980834960937, 0.07400038146972657, 0.07364985656738281, 0.07299922943115235, 0.07534329223632813, 0.07727747344970703, 0.07428921508789063, 0.07393302154541016, 0.07400249481201172, 0.074052734375, 0.07345439910888672, 0.07304192352294922, 0.07321190643310548, 0.07318732452392578, 0.07346371459960938, 0.07342704010009765, 0.07396556854248047, 0.07525091552734375, 0.07545843505859375, 0.07490188598632813, 0.0747176284790039, 0.0734208984375, 0.07429312133789062, 0.07369757080078125, 0.07361110687255859, 0.07337369537353515, 0.07361084747314453, 0.0736072006225586, 0.07411135864257813, 0.07415577697753906, 0.07459251403808594, 0.0752432632446289, 0.07501849365234375, 0.07462911987304688, 0.07352665710449219, 0.07467385864257813, 0.0742696304321289, 0.07399014282226563, 0.07389417266845703, 0.074401123046875, 0.07364236450195312, 0.07394735717773437, 0.07416207885742188, 0.0745818862915039, 0.07420492553710938, 0.07518016052246093, 0.07469840240478516, 0.07478953552246094, 0.07512989044189453, 0.07439440155029296, 0.07395942687988281, 0.07379484558105469, 0.07422434997558594, 0.07375049591064453, 0.07404342651367188, 0.08432003021240235, 0.07329420471191406, 0.0731927032470703, 0.07346387481689454, 0.0728255386352539, 0.07348633575439453, 0.07334912109375, 0.07426662445068359, 0.07361945343017579, 0.07369670104980469, 0.07363542175292968, 0.0733071060180664, 0.07571417236328125, 0.07592588806152344, 0.074010498046875, 0.07509004974365234, 0.07334111785888672, 0.0734735336303711, 0.07440415954589844, 0.07310765075683594, 0.07386707305908204, 0.07323648071289063, 0.0734203872680664, 0.07433379364013672, 0.07414979553222656, 0.07523625946044922, 0.07516687774658203, 0.07403353881835938, 0.07487935638427734, 0.07406774139404297, 0.07375116729736328, 0.07385171508789062, 0.07382514953613281, 0.07414374542236328, 0.07411833953857422, 0.07375545501708984, 0.07396691131591797, 0.07450870513916015, 0.07475382232666015, 0.07490201568603516, 0.07471920013427734, 0.07489036560058594, 0.07379596710205077, 0.07391900634765625, 0.07393049621582032, 0.07421977233886719, 0.07351500701904297, 0.07449600219726563, 0.07464694213867187, 0.07400713348388673, 0.0750223388671875, 0.07443456268310547, 0.07438438415527343, 0.07387872314453126, 0.0741246109008789, 0.07484835052490234, 0.07391999816894532, 0.07449897766113281, 0.07443660736083985, 0.07363603210449218, 0.07486217498779296, 0.07386748504638672, 0.07411920166015624, 0.08634966278076171, 0.07336566162109374, 0.07328892517089844, 0.07327414703369141, 0.07329615783691407, 0.073244384765625, 0.07332454681396484, 0.07329548645019532, 0.07369971466064452, 0.07357440185546875, 0.0737318115234375, 0.0733864974975586, 0.07632396697998046, 0.07608179473876953, 0.07451165008544922, 0.07446969604492187, 0.07365676879882813, 0.07414383697509766, 0.0738502426147461, 0.0737490234375, 0.07352092742919922, 0.07383798217773438, 0.0732639389038086, 0.07373619079589844, 0.07463343811035156, 0.0751749725341797, 0.07511103820800781, 0.07501663970947266, 0.07457142639160157, 0.07533548736572265, 0.07524291229248047, 0.07546959686279296, 0.0736727066040039, 0.07423782348632812, 0.07397379302978516, 0.07416432189941406, 0.07396351623535156, 0.07412531280517579, 0.07504908752441407, 0.07542691040039062, 0.07505567932128906, 0.07494064331054688, 0.07346591949462891, 0.07468592071533203, 0.074225341796875, 0.07378205108642578, 0.07420944213867188, 0.07404294586181641, 0.0740621109008789, 0.07366413116455078, 0.07445919799804687, 0.07401299285888673, 0.07501200103759766, 0.07550752258300782, 0.07549801635742187, 0.07466265869140624, 0.07381913757324218, 0.07444684600830079, 0.07405712127685547, 0.0740225601196289, 0.07415904235839844, 0.07452419281005859, 0.07422000122070313]",tokens/s,13.486740456843863,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1045.479424,2578.382848,0.0,2183.135232,2081.564672,s,1,10.5770791015625,10.5770791015625,0.0,10.5770791015625,10.5770791015625,10.5770791015625,10.5770791015625,[10.5770791015625],,kWh,0.00010194741089165973,1.1238392177900514e-05,3.7579752286007184e-05,0.00015076555535556744,,MB,1333.641216,3134.128128,0.0,2726.2976,2478.999552,s,10,3.938599304199219,0.3938599304199219,0.0017934917827119408,0.3933694305419922,0.39635830993652343,0.3964963394165039,0.3966067630004883,"[0.3938117370605469, 0.39111972045898435, 0.3923058166503906, 0.39292218017578123, 0.39435189819335936, 0.3929271240234375, 0.3923180541992187, 0.3966343688964844, 0.3958807678222656, 0.39632763671875]",tokens/s,649.977264067102,kWh,1.18270040326679e-05,1.304301034693343e-06,7.834928490159654e-06,2.0966233557520897e-05,tokens/kWh,12210109.140378674,MB,1336.283136,3134.128128,0.0,2726.2976,2479.002112,s,10,29.3740537109375,2.93740537109375,0.008384870051051997,2.9368660888671876,2.9470647216796877,2.9493862426757813,2.951243459472656,"[2.931232666015625, 2.943474609375, 2.942299072265625, 2.951707763671875, 2.9376552734375, 2.936076904296875, 2.92938134765625, 2.946548828125, 2.933615966796875, 2.922061279296875]",tokens/s,21.447499422438177,kWh,8.554523406858199e-05,9.43588446525466e-06,5.5108344086640394e-05,0.00015008946262047703,tokens/kWh,419749.65397340804,,s,630,29.3706841659546,0.04662013359675332,0.0010946513975805792,0.04650251197814942,0.046934130477905275,0.047460646247863766,0.053646552581787126,"[0.055274463653564455, 0.04717689514160156, 0.04647200012207031, 0.046667934417724606, 0.04673110580444336, 0.04661814498901367, 0.046505664825439455, 0.04643849563598633, 0.046566078186035156, 0.046538177490234374, 0.04629766464233399, 0.04651580810546875, 0.04624220657348633, 0.04637696075439453, 0.04657356643676758, 0.046706687927246096, 0.04660784149169922, 0.04659568023681641, 0.04652742385864258, 0.04671078491210937, 0.046898303985595705, 0.04666419219970703, 0.04669683074951172, 0.04677983856201172, 0.04677280044555664, 0.04683270263671875, 0.046795711517333985, 0.046683616638183593, 0.0466910400390625, 0.04672198486328125, 0.04665977478027344, 0.04673411178588867, 0.046415775299072266, 0.04641996765136719, 0.04639670562744141, 0.04617903900146485, 0.04619878387451172, 0.04626153564453125, 0.04617113494873047, 0.04588246536254883, 0.04555948638916016, 0.04559503936767578, 0.0457938232421875, 0.045486209869384765, 0.04540790557861328, 0.04561100769042969, 0.04621657562255859, 0.0463246078491211, 0.04655628967285156, 0.04625084686279297, 0.047179489135742186, 0.047085758209228515, 0.046155872344970705, 0.04644659042358398, 0.0461187858581543, 0.04601663970947266, 0.04620083236694336, 0.046380126953125, 0.04631228637695312, 0.04603696060180664, 0.04574745559692383, 0.045652801513671876, 0.0457933120727539, 0.05317043304443359, 0.04674764633178711, 0.04611481475830078, 0.046482719421386716, 0.04663894271850586, 0.046777217864990235, 0.047067134857177735, 0.046929920196533206, 0.046822689056396485, 0.046887649536132815, 0.04668236923217774, 0.04716313552856445, 0.04681942367553711, 0.04751353454589844, 0.047610145568847656, 0.04680672073364258, 0.046787681579589846, 0.046776737213134766, 0.04670515060424805, 0.04679884719848633, 0.04666995239257812, 0.046884735107421874, 0.04641296005249024, 0.04611932754516602, 0.046026657104492184, 0.04617020797729492, 0.04643475341796875, 0.046473567962646484, 0.04660134506225586, 0.04678915023803711, 0.047500415802001955, 0.04629721450805664, 0.046418014526367186, 0.04607657623291016, 0.045719520568847656, 0.046366111755371094, 0.0456808967590332, 0.04589920043945313, 0.046591007232666015, 0.046454689025878904, 0.046364223480224606, 0.04667030334472656, 0.04705072021484375, 0.048023551940917966, 0.04751795196533203, 0.0461841926574707, 0.04625420761108399, 0.045894718170166014, 0.04598255920410156, 0.04637488174438477, 0.04639916610717774, 0.046502559661865235, 0.046507678985595706, 0.04635238265991211, 0.046350017547607425, 0.047669151306152346, 0.046520736694335936, 0.04705811309814453, 0.04649657440185547, 0.04643635177612305, 0.046443870544433594, 0.04643292617797851, 0.04671078491210937, 0.0531517448425293, 0.04692089462280274, 0.046750720977783204, 0.04663065719604492, 0.04665507125854492, 0.04672876739501953, 0.0469922866821289, 0.04702406311035156, 0.04661868667602539, 0.04665753555297852, 0.04635174560546875, 0.046473567962646484, 0.046596641540527346, 0.04647091293334961, 0.0462042236328125, 0.04593670272827149, 0.04610240173339844, 0.046051361083984374, 0.045856544494628906, 0.04601103973388672, 0.04621500778198242, 0.04623193740844726, 0.046146720886230466, 0.04636127853393555, 0.0461080322265625, 0.04627132797241211, 0.046171424865722656, 0.04638560104370117, 0.046653728485107425, 0.04651007843017578, 0.04660134506225586, 0.047352703094482425, 0.046577247619628906, 0.04649001693725586, 0.04689715194702149, 0.046268062591552736, 0.04622489547729492, 0.04626512145996094, 0.0460882568359375, 0.046452320098876954, 0.04612752151489258, 0.04610367965698242, 0.046504833221435546, 0.04641094589233399, 0.04657833480834961, 0.046792606353759765, 0.04641203308105469, 0.05075961685180664, 0.04893443298339844, 0.046271007537841795, 0.04631177520751953, 0.046706336975097656, 0.04658585739135742, 0.04681235122680664, 0.0466126708984375, 0.04705731201171875, 0.04673763275146484, 0.04692892837524414, 0.04660732650756836, 0.04663296127319336, 0.04647670364379883, 0.04652412796020508, 0.046578559875488285, 0.053878719329833985, 0.04689686584472656, 0.046040481567382815, 0.04676492691040039, 0.0466431999206543, 0.050100223541259765, 0.04675529479980469, 0.04656387329101563, 0.04658790588378906, 0.04649369430541992, 0.04673535919189453, 0.046663681030273435, 0.04648729705810547, 0.046735614776611326, 0.04653468704223633, 0.046630878448486325, 0.04680534362792969, 0.04664115142822266, 0.04652934265136719, 0.04639625549316406, 0.047083518981933595, 0.04674764633178711, 0.046615745544433596, 0.04669462585449219, 0.046618560791015624, 0.046676639556884766, 0.04672480010986328, 0.046681854248046876, 0.04711888122558594, 0.047048736572265625, 0.04653247833251953, 0.046773536682128906, 0.04650675201416016, 0.04658595275878906, 0.04702207946777344, 0.046837760925292966, 0.046780193328857425, 0.04669257736206055, 0.04666163253784179, 0.0468047981262207, 0.04668230438232422, 0.04666147232055664, 0.04684406280517578, 0.04671619033813477, 0.046871265411376956, 0.04680089569091797, 0.046663681030273435, 0.046657470703125, 0.04680710220336914, 0.046565376281738284, 0.04677344131469727, 0.04676051330566406, 0.04664713668823242, 0.04664976119995117, 0.04665135955810547, 0.04662275314331055, 0.046868480682373044, 0.04661248016357422, 0.04650393676757812, 0.04650723266601563, 0.04638595199584961, 0.04641996765136719, 0.04661270523071289, 0.053278942108154294, 0.047230400085449216, 0.04696886444091797, 0.046320159912109374, 0.04601241683959961, 0.04637491226196289, 0.04706304168701172, 0.046622718811035156, 0.04655513763427734, 0.04652236938476562, 0.04671920013427734, 0.04635145568847656, 0.04640652847290039, 0.047839038848876955, 0.046153728485107424, 0.046009662628173825, 0.046195392608642576, 0.046222816467285155, 0.046373409271240236, 0.046647296905517575, 0.04621516799926758, 0.046059200286865234, 0.04614393615722656, 0.04647910308837891, 0.04640131378173828, 0.046070110321044924, 0.04576976013183594, 0.046726112365722654, 0.04682547378540039, 0.04806278228759766, 0.047351486206054685, 0.046862335205078126, 0.04665340805053711, 0.04648553466796875, 0.04661862564086914, 0.04681670379638672, 0.04662931060791015, 0.04686985778808594, 0.04687478256225586, 0.04680563354492188, 0.04680031967163086, 0.046707263946533205, 0.04672691345214844, 0.04836172866821289, 0.048070655822753904, 0.046300830841064455, 0.04581343841552735, 0.046030815124511716, 0.045763233184814456, 0.04655241775512695, 0.04630729675292969, 0.04649852752685547, 0.04611670303344727, 0.04663107299804688, 0.045494529724121095, 0.0453752326965332, 0.04572345733642578, 0.046081630706787106, 0.04611894226074219, 0.045969696044921876, 0.04633536148071289, 0.04613580703735352, 0.04680745697021484, 0.05426502227783203, 0.04705974578857422, 0.04629094314575195, 0.046174144744873045, 0.045776958465576174, 0.04534716796875, 0.04581763076782226, 0.045636768341064456, 0.04623215866088867, 0.046159999847412106, 0.046321727752685546, 0.04657478332519531, 0.04656534576416015, 0.0465948486328125, 0.04638544082641602, 0.046120670318603514, 0.04619820785522461, 0.046617153167724606, 0.046497791290283204, 0.046274208068847654, 0.046598495483398436, 0.04653081512451172, 0.04658560180664063, 0.04642403030395508, 0.0466473274230957, 0.046330974578857424, 0.04636156845092773, 0.0463647346496582, 0.04688883209228516, 0.04652019119262695, 0.04645286560058594, 0.047110145568847656, 0.04661455917358399, 0.04657183837890625, 0.04653430557250977, 0.04639968109130859, 0.04644025421142578, 0.04658585739135742, 0.04676028823852539, 0.04649526214599609, 0.04625625610351562, 0.046145183563232425, 0.046426464080810546, 0.04617334365844727, 0.04643721771240234, 0.04625532913208008, 0.04666009521484375, 0.04792348861694336, 0.050051071166992187, 0.04649356842041016, 0.046017982482910155, 0.04681590270996094, 0.04646656036376953, 0.0466126708984375, 0.04659872055053711, 0.04675302505493164, 0.046502464294433596, 0.04659811019897461, 0.046165599822998046, 0.04633414459228516, 0.04589779281616211, 0.04589968109130859, 0.04618230438232422, 0.05379670333862305, 0.04705295944213867, 0.0465541763305664, 0.04670969772338867, 0.04680310440063477, 0.04692361450195313, 0.04660380935668945, 0.046806655883789065, 0.04657852935791015, 0.04652646255493164, 0.0466328010559082, 0.0464447021484375, 0.04642406463623047, 0.046853664398193356, 0.046711166381835936, 0.04641756820678711, 0.04655763244628906, 0.046442497253417966, 0.04661676788330078, 0.04640729522705078, 0.047607425689697266, 0.04633785629272461, 0.04632038497924805, 0.045921344757080075, 0.04639839935302734, 0.046542526245117184, 0.04653292846679687, 0.046292991638183595, 0.04679244613647461, 0.045919551849365234, 0.04603385543823242, 0.04597126388549805, 0.0458856315612793, 0.04591820907592774, 0.04600377655029297, 0.046319873809814456, 0.046345600128173826, 0.04615670394897461, 0.04625507354736328, 0.0460010871887207, 0.04596652984619141, 0.04573267364501953, 0.046448863983154294, 0.04647488021850586, 0.04584431838989258, 0.04603526306152344, 0.04635622406005859, 0.0465164794921875, 0.04666739273071289, 0.046422401428222654, 0.04604927825927734, 0.0469700813293457, 0.046051361083984374, 0.0460063362121582, 0.045714111328125, 0.04583187103271484, 0.046061790466308594, 0.04661695861816406, 0.046387168884277345, 0.04643392181396484, 0.04644038391113281, 0.046401153564453124, 0.04619327926635742, 0.05415302276611328, 0.046733505249023435, 0.046814849853515625, 0.04660876846313477, 0.046663902282714845, 0.046685985565185543, 0.04678451156616211, 0.046755840301513675, 0.04689452743530274, 0.04678303909301758, 0.04673535919189453, 0.046778144836425783, 0.04653452682495117, 0.04693027114868164, 0.04719830322265625, 0.04617820739746094, 0.046004222869873046, 0.04616499328613281, 0.04659251022338867, 0.04668371200561523, 0.0465786247253418, 0.04628275299072265, 0.04630745697021484, 0.0465775375366211, 0.04620671844482422, 0.04613132858276367, 0.046702014923095704, 0.04661932754516602, 0.04686454391479492, 0.04741247940063477, 0.04827324676513672, 0.04704742431640625, 0.04636467361450195, 0.045806655883789064, 0.046301185607910154, 0.04625680160522461, 0.04587753677368164, 0.04561100769042969, 0.045891422271728516, 0.04624819183349609, 0.04667382431030274, 0.0505489616394043, 0.046739166259765624, 0.047457408905029294, 0.04746329498291016, 0.04634758377075195, 0.045970176696777346, 0.046010433197021486, 0.04616799926757813, 0.046160961151123045, 0.04663814544677734, 0.04661644744873047, 0.04659609603881836, 0.04631552124023437, 0.046159999847412106, 0.0464956169128418, 0.04667529678344726, 0.04692649459838867, 0.046622718811035156, 0.046706302642822266, 0.046782081604003906, 0.04714368057250977, 0.04692172622680664, 0.05454275131225586, 0.04701593780517578, 0.04641011047363281, 0.04595059204101563, 0.045827167510986325, 0.046088897705078125, 0.04599420928955078, 0.04572979354858398, 0.045564064025878905, 0.04577654266357422, 0.045809856414794924, 0.0465524787902832, 0.046674591064453125, 0.04648758316040039, 0.046550399780273435, 0.04633571243286133, 0.046226238250732424, 0.04600012969970703, 0.04588544082641602, 0.04590959930419922, 0.04586665725708008, 0.04614643096923828, 0.04626992034912109, 0.04617631912231445, 0.04604451370239258, 0.04556492614746094, 0.045663360595703126, 0.04574860763549805, 0.04614604949951172, 0.04577004623413086, 0.04577964782714844, 0.04595257568359375, 0.04619228744506836, 0.04671123123168945, 0.04655686569213867, 0.04660812759399414, 0.0464716796875, 0.04661699295043945, 0.046647296905517575, 0.04658585739135742, 0.04685356903076172, 0.046047679901123045, 0.046233985900878904, 0.046350078582763674, 0.04647116851806641, 0.04645808029174805, 0.04639401626586914, 0.04907952117919922, 0.047012767791748046, 0.051078975677490236, 0.04701196670532227, 0.046593822479248044, 0.04673116683959961, 0.04669478225708008, 0.04662681579589844, 0.046669151306152346, 0.04670140838623047, 0.04667168045043945, 0.04670259094238281, 0.046502174377441405, 0.046599903106689454, 0.046638751983642576, 0.04630876922607422, 0.05410128021240234, 0.04685094451904297, 0.04615753555297852, 0.04612928009033203, 0.04614348983764648, 0.04659366226196289, 0.046328094482421874, 0.04647366333007812, 0.04615542221069336, 0.04589363098144531, 0.04579935836791992, 0.04587116622924805, 0.04557823944091797, 0.045686847686767576, 0.046196670532226564, 0.04599593734741211, 0.04611660766601562, 0.04633020782470703, 0.04620083236694336, 0.04595302581787109, 0.04573308944702149, 0.045756671905517576, 0.04572243118286133, 0.04566191864013672, 0.04571955108642578, 0.045985790252685545, 0.04609356689453125, 0.04638307189941406, 0.04644134521484375, 0.04640143966674805, 0.04673126220703125, 0.04643814468383789, 0.04631196975708008, 0.046388961791992187, 0.046439872741699216, 0.04656185531616211, 0.04669235229492188, 0.046515872955322266, 0.04650368118286133, 0.046723392486572264, 0.046446880340576174, 0.04651200103759766, 0.046517887115478516, 0.04631196975708008, 0.04639740753173828, 0.04658160018920898, 0.04640576171875, 0.04653897476196289, 0.04660614395141602, 0.04661862564086914, 0.04630454254150391, 0.04616300964355469, 0.045819198608398434, 0.04579568099975586, 0.045873153686523435, 0.04593862533569336, 0.04598585510253906, 0.048132095336914066, 0.04631289672851562, 0.046259040832519534, 0.04630857467651367, 0.04624435043334961, 0.045914112091064455]",tokens/s,21.449959981874464,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1078.571008,9784.19712,0.0,9388.949504,9304.608768,s,1,33.56804296875,33.56804296875,0.0,33.56804296875,33.56804296875,33.56804296875,33.56804296875,[33.56804296875],,kWh,0.0007570145398666909,8.349687089604282e-05,0.00027338716315400413,0.0011138985739167378,,MB,1442.779136,10197.336064,0.0,9789.505536,9597.896704,s,10,8.024159423828126,0.8024159423828126,0.0037759130374792073,0.8023334350585938,0.8048954711914063,0.8080676086425781,0.8106053186035157,"[0.8041905517578125, 0.803518798828125, 0.7991083984375, 0.81123974609375, 0.8016114501953125, 0.8036441650390626, 0.8002913818359375, 0.7958880615234375, 0.8020947265625, 0.8025721435546875]",tokens/s,319.036532648885,kWh,2.3606744060574178e-05,2.603420795459561e-06,1.4281101168460964e-05,4.0491266024494706e-05,tokens/kWh,6322351.092829152,MB,1471.410176,10197.336064,0.0,9789.505536,9597.899264,s,10,376.05490625,37.605490625,0.1423211389298986,37.6388125,37.76045625,37.78117734375,37.79775421875,"[37.7558515625, 37.45809375, 37.6530859375, 37.8018984375, 37.680734375, 37.5471328125, 37.35794921875, 37.436078125, 37.73954296875, 37.6245390625]",tokens/s,1.6752872772817333,kWh,0.001104775240299009,0.00012186504322589423,0.0004200425518457404,0.0016466828353706438,tokens/kWh,38258.73364728408,,s,630,376.05202130126935,0.5969079703194754,0.00458039882502152,0.5970689392089843,0.6014270935058594,0.6045695404052734,0.6133359204101563,"[0.6004793090820313, 0.59568603515625, 0.5983150024414062, 0.5971712646484375, 0.5964830322265625, 0.5946448364257813, 0.6069822998046875, 0.603926513671875, 0.5973463134765625, 0.6034421997070313, 0.5990032348632812, 0.600175048828125, 0.600375732421875, 0.601417724609375, 0.5993401489257812, 0.598215576171875, 0.5988536376953125, 0.6002418212890624, 0.5966739501953126, 0.5974447631835937, 0.5969735107421875, 0.5988904418945312, 0.5986395263671875, 0.6071869506835937, 0.60400390625, 0.5985402221679688, 0.5982113647460937, 0.5988589477539062, 0.597866943359375, 0.5977251586914063, 0.602064453125, 0.597414306640625, 0.5975443115234375, 0.59811083984375, 0.5987000122070313, 0.5957073974609375, 0.5968655395507813, 0.59815283203125, 0.596900146484375, 0.5984912109375, 0.60626123046875, 0.6044815063476563, 0.600195068359375, 0.5990802001953125, 0.5992078857421875, 0.598090087890625, 0.5971746215820313, 0.6007172241210937, 0.5994456787109375, 0.5960455932617188, 0.59719921875, 0.5961911010742188, 0.5984378662109375, 0.60060986328125, 0.5966236572265625, 0.5973423461914062, 0.603420166015625, 0.605086669921875, 0.6004592895507812, 0.5994905395507812, 0.5986683959960938, 0.5983241577148437, 0.5999329223632812, 0.5987039794921875, 0.6004655151367188, 0.595375732421875, 0.596560302734375, 0.5953699951171875, 0.594818603515625, 0.5980304565429687, 0.5979303588867187, 0.5962640380859375, 0.6027191162109375, 0.5994303588867187, 0.59736962890625, 0.59692236328125, 0.5960028076171875, 0.591583251953125, 0.5895291137695312, 0.5947446899414063, 0.5901292724609375, 0.5896566162109375, 0.5879984130859375, 0.5889564819335937, 0.5868004150390626, 0.5895543823242188, 0.5913001098632813, 0.5909202880859376, 0.5916732177734375, 0.5997586059570312, 0.5964392700195312, 0.5928029174804688, 0.5908571166992187, 0.5948597412109375, 0.5899429931640625, 0.5903914184570312, 0.5945098266601563, 0.59052392578125, 0.5895480346679688, 0.590845947265625, 0.59912939453125, 0.597419921875, 0.5991373901367187, 0.60061669921875, 0.60052685546875, 0.6007255249023438, 0.6045711059570312, 0.6095562133789062, 0.5999685668945313, 0.5974201049804687, 0.5981220092773437, 0.5915159912109375, 0.5926892700195312, 0.5878067016601562, 0.5925120239257813, 0.5952144165039063, 0.58997607421875, 0.5930389404296875, 0.5917305297851563, 0.5916942749023437, 0.588666748046875, 0.5904390258789063, 0.5925928955078125, 0.6007597045898437, 0.5906499633789063, 0.5920354614257812, 0.59857275390625, 0.5967245483398438, 0.6008688354492188, 0.5964656372070313, 0.5970862426757813, 0.5971251220703125, 0.59636328125, 0.596336669921875, 0.5975347290039063, 0.6004019165039063, 0.59886181640625, 0.599341064453125, 0.6138900756835938, 0.6055706176757812, 0.5977914428710938, 0.5973641967773438, 0.5989378051757812, 0.597885009765625, 0.596038818359375, 0.5986885986328125, 0.59691748046875, 0.5977874145507812, 0.5956680908203125, 0.59686181640625, 0.5982411499023438, 0.5929468383789063, 0.5946557006835937, 0.59449365234375, 0.5981058959960938, 0.606013427734375, 0.5958430786132812, 0.59447705078125, 0.5954662475585938, 0.5960722045898438, 0.5968630981445312, 0.59592919921875, 0.59464501953125, 0.5944805297851562, 0.6029273071289063, 0.60008642578125, 0.59781787109375, 0.5953638305664063, 0.5948907470703125, 0.595585205078125, 0.5950830688476563, 0.597391357421875, 0.6060230102539063, 0.5968836669921875, 0.5955005493164063, 0.5992354736328125, 0.5962216186523438, 0.599525390625, 0.6015245361328125, 0.5963460693359375, 0.5948171997070313, 0.595548828125, 0.5993286743164062, 0.5966597900390626, 0.595879638671875, 0.5945802001953125, 0.5940714721679687, 0.5949031372070313, 0.5992950439453125, 0.5966018676757813, 0.5981992797851563, 0.5979851684570312, 0.59512255859375, 0.597765625, 0.5947230834960937, 0.5960410766601563, 0.5953389282226562, 0.5958194580078126, 0.5947756958007813, 0.5951156005859375, 0.6132478637695312, 0.6152252807617188, 0.6057738037109375, 0.6098370361328125, 0.6183765869140625, 0.6193934936523438, 0.6091259765625, 0.6064729614257812, 0.603739013671875, 0.6000545043945312, 0.6011077270507813, 0.5995889282226563, 0.5968084716796875, 0.59753466796875, 0.5952074584960938, 0.59660986328125, 0.5948067626953125, 0.5954007568359375, 0.5988023071289063, 0.5995245971679688, 0.6012404174804687, 0.603479248046875, 0.597592529296875, 0.5988560180664062, 0.5971099853515625, 0.598666015625, 0.5959555053710938, 0.6005142822265624, 0.5991405029296875, 0.599218505859375, 0.5973622436523438, 0.5999540405273438, 0.5991586303710937, 0.5985156860351563, 0.597485595703125, 0.598877685546875, 0.5990526123046875, 0.6079464111328124, 0.5997179565429688, 0.59822119140625, 0.6002606811523438, 0.6012161865234374, 0.598807373046875, 0.5991383056640625, 0.5984088134765625, 0.5982332763671875, 0.596343017578125, 0.5982119140625, 0.598999755859375, 0.5978890380859375, 0.5959164428710938, 0.5959904174804688, 0.6044815063476563, 0.6069657592773438, 0.6004791259765625, 0.5995720825195312, 0.5961649780273437, 0.598435791015625, 0.5975311889648437, 0.5995951538085937, 0.5956484985351562, 0.6011754760742187, 0.5952840576171875, 0.5979202880859374, 0.5974712524414062, 0.5964451293945312, 0.6007623901367187, 0.59650634765625, 0.5972930297851563, 0.6051753540039062, 0.600068359375, 0.6002447509765625, 0.59643701171875, 0.5965925903320313, 0.59482470703125, 0.5983768920898438, 0.596989990234375, 0.5959901733398437, 0.5953162841796875, 0.5955325317382812, 0.59702685546875, 0.5968765869140625, 0.5952971801757813, 0.5957722778320312, 0.5971724853515625, 0.60042919921875, 0.6080061645507813, 0.5949615478515625, 0.5953770141601562, 0.5987880859375, 0.5982637939453125, 0.5975543212890625, 0.5996019897460938, 0.595951416015625, 0.5963368530273437, 0.5959393310546875, 0.594935791015625, 0.5952304077148437, 0.597755859375, 0.604116943359375, 0.5960966796875, 0.5985327758789063, 0.6015114135742188, 0.6076749267578125, 0.5980487670898438, 0.6006435546875, 0.5979259033203125, 0.595861328125, 0.5966128540039063, 0.5995729370117188, 0.5943173217773438, 0.597823486328125, 0.5940162353515624, 0.5958881225585938, 0.5932373657226563, 0.6018928833007813, 0.5966554565429687, 0.5999664306640625, 0.6031705322265625, 0.5981074829101563, 0.5961983032226562, 0.5982576904296875, 0.59667578125, 0.5979472045898437, 0.5966356201171875, 0.5988740844726562, 0.5950750732421874, 0.59464501953125, 0.5959188232421875, 0.597173828125, 0.6030330810546874, 0.59887060546875, 0.5971211547851563, 0.5971533203125, 0.6003432006835937, 0.6133718872070313, 0.595173583984375, 0.5976878662109375, 0.5946596069335938, 0.5977293090820313, 0.590903076171875, 0.5955565185546875, 0.5908944091796875, 0.5900397338867187, 0.5872164916992187, 0.5898387451171875, 0.588695068359375, 0.5992227783203125, 0.5991546630859375, 0.6000045776367188, 0.5997272338867188, 0.6043222045898438, 0.6048569946289063, 0.5994698486328125, 0.6001105346679687, 0.5976450805664062, 0.597142333984375, 0.5974384765625, 0.5970759887695313, 0.5905541381835937, 0.593681396484375, 0.5903206176757813, 0.5895465087890625, 0.5894717407226563, 0.589731689453125, 0.5914810180664063, 0.5898090209960938, 0.5908190307617187, 0.59707421875, 0.5995111694335937, 0.5907072143554688, 0.5919744262695312, 0.58979736328125, 0.594145263671875, 0.5923717041015625, 0.59804296875, 0.5930797119140625, 0.593076171875, 0.5949239501953125, 0.592190673828125, 0.592826416015625, 0.5938594970703125, 0.5946546630859375, 0.5976929321289063, 0.6159707641601563, 0.595388427734375, 0.5930552978515625, 0.596017578125, 0.5924225463867188, 0.5903302001953125, 0.5951565551757813, 0.5902269287109375, 0.5919303588867187, 0.5909688110351563, 0.5929512939453125, 0.5906862182617187, 0.5879285888671875, 0.591690673828125, 0.5901394653320312, 0.592201171875, 0.5956367797851563, 0.6000435180664062, 0.5915443115234374, 0.5951054077148438, 0.591812744140625, 0.5918026123046874, 0.58849072265625, 0.595726318359375, 0.5919313354492187, 0.5972828369140625, 0.5893836669921875, 0.593006591796875, 0.59108740234375, 0.590979248046875, 0.5911411743164062, 0.5916771240234375, 0.5907025756835937, 0.59791357421875, 0.5995593872070313, 0.5906926879882812, 0.5898677368164063, 0.593170166015625, 0.5920870361328125, 0.589391845703125, 0.5958204956054688, 0.590388427734375, 0.5910286254882813, 0.5901746826171875, 0.5900157470703125, 0.5913583374023438, 0.5913128662109375, 0.590477783203125, 0.5902556762695312, 0.5918931884765625, 0.5957220458984375, 0.600690673828125, 0.590388671875, 0.59058642578125, 0.5907967529296875, 0.5913981323242188, 0.59211083984375, 0.5902474365234375, 0.587992919921875, 0.5887446899414063, 0.5863717041015625, 0.5877821655273437, 0.5867615966796875, 0.5886040649414063, 0.5885358276367187, 0.5941903076171875, 0.5941514282226562, 0.5992528686523437, 0.590245361328125, 0.587977294921875, 0.58908984375, 0.5893883056640625, 0.5897506713867188, 0.58909521484375, 0.587362060546875, 0.5910056762695313, 0.5893345336914062, 0.5890919189453125, 0.5894024658203125, 0.58707568359375, 0.588322509765625, 0.5886635131835938, 0.5929758911132812, 0.5966889038085937, 0.5970636596679687, 0.5937377319335938, 0.5945128784179687, 0.5954426879882813, 0.59519970703125, 0.6000829467773438, 0.5966046752929688, 0.5942167358398438, 0.5963052978515625, 0.5975029907226562, 0.5953101196289062, 0.5963593139648438, 0.5953899536132813, 0.594767578125, 0.592407470703125, 0.5954293823242187, 0.60524951171875, 0.600276123046875, 0.5980428466796875, 0.5945698852539063, 0.5956915283203125, 0.5968141479492187, 0.599828125, 0.596664306640625, 0.59563134765625, 0.5962001953125, 0.595198974609375, 0.5977835693359375, 0.5982815551757813, 0.5990254516601563, 0.5986903076171874, 0.6004895629882813, 0.60535888671875, 0.6024540405273437, 0.6028369750976562, 0.598262939453125, 0.598118408203125, 0.59943115234375, 0.5952921752929687, 0.598362060546875, 0.5996151733398437, 0.5961767578125, 0.5982315063476562, 0.5971517333984375, 0.5977986450195313, 0.5980674438476562, 0.5999363403320312, 0.6010046997070313, 0.6022569580078125, 0.6052492065429688, 0.5988505859375, 0.5998998413085938, 0.5965784912109375, 0.5971251220703125, 0.599250732421875, 0.5977110595703125, 0.59770263671875, 0.598725830078125, 0.6026717529296876, 0.5997733764648437, 0.60202392578125, 0.5994004516601562, 0.5970841674804688, 0.59827197265625, 0.6030213012695312, 0.61406005859375, 0.6005678100585937, 0.5983573608398437, 0.6053334350585937, 0.59907666015625, 0.5979268188476563, 0.6012926025390625, 0.5956641845703124, 0.5959152221679688, 0.597344482421875, 0.596909423828125, 0.5984203491210938, 0.5971619873046875, 0.5981572875976563, 0.5977435913085938, 0.597190673828125, 0.60233935546875, 0.604567626953125, 0.5984596557617188, 0.5972875366210938, 0.5958082275390625, 0.5963690795898438, 0.5995562133789063, 0.5963605346679688, 0.5962469482421875, 0.595840576171875, 0.5970195922851562, 0.5972930297851563, 0.5991724243164063, 0.5969491577148438, 0.5959049682617188, 0.5978124389648437, 0.6027620849609375, 0.603598876953125, 0.6010101928710937, 0.595933349609375, 0.5975206909179688, 0.594540283203125, 0.5983009643554688, 0.5924844360351562, 0.5947349853515626, 0.5926602172851563, 0.5940177612304688, 0.59410693359375, 0.5937256469726563, 0.595998779296875, 0.5968008422851563, 0.5963701171875, 0.6013972778320312, 0.6001024780273437, 0.5975904541015625, 0.59519384765625, 0.5953491821289062, 0.595202392578125, 0.5945548706054687, 0.5977111206054687, 0.5951444702148437, 0.5948168334960937, 0.5969163818359375, 0.5929103393554688, 0.5976002807617188, 0.5947371215820313, 0.5980469360351562, 0.5974894409179687, 0.599140380859375, 0.60188671875, 0.6043787231445312, 0.6006676025390625, 0.59879931640625, 0.5957635498046875, 0.5973761596679688, 0.5968289184570312, 0.5975631713867188, 0.5970902099609375, 0.5986216430664062, 0.5950140380859374, 0.5960767822265625, 0.5955543212890625, 0.597099609375, 0.596447265625, 0.5972276000976563, 0.5998764038085938, 0.60377294921875, 0.605216796875, 0.5977272338867188, 0.5971763305664063, 0.5968251342773437, 0.596970458984375, 0.599923828125, 0.5974495849609375, 0.5984010620117187, 0.5963182373046875, 0.5974935913085937, 0.595628173828125, 0.595881103515625, 0.5988564453125, 0.5982308959960938]",tokens/s,1.6753001295405434,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1231.44192,8448.311296,0.0,8053.06368,7930.605568,s,1,20.147826171875,20.147826171875,0.0,20.147826171875,20.147826171875,20.147826171875,20.147826171875,[20.147826171875],,kWh,0.0003734974021333187,4.1192099596527255e-05,0.00013842372184999507,0.0005531132235798411,,MB,1280.770048,10214.11328,0.0,9806.282752,9135.716352,s,10,18.62685852050781,1.8626858520507814,0.008198181861214697,1.8660625,1.8697125122070313,1.8699464660644531,1.8701336291503907,"[1.8432120361328126, 1.8550084228515624, 1.8570380859375, 1.864203125, 1.8626883544921875, 1.867921875, 1.8689832763671874, 1.86796240234375, 1.870180419921875, 1.8696605224609375]",tokens/s,137.43595019962646,kWh,5.421532625541544e-05,5.979594668753173e-06,3.600202880159956e-05,9.619694972576816e-05,tokens/kWh,2661207.0416971403,MB,1302.986752,10214.11328,0.0,9806.282752,9135.718912,s,10,94.5289873046875,9.45289873046875,0.03300749615594201,9.455719238281251,9.49055146484375,9.493977880859376,9.496719013671875,"[9.4092529296875, 9.4173203125, 9.40789453125, 9.438798828125, 9.4357744140625, 9.4804248046875, 9.4726396484375, 9.4796875, 9.4897900390625, 9.497404296875]",tokens/s,6.664622333987065,kWh,0.00027711843717833517,3.056830458298787e-05,0.00018413728619859972,0.0004918240279599228,tokens/kWh,128094.5956652888,,s,630,94.52455953979486,0.15003898339649988,0.001875406770582973,0.1499233856201172,0.15152191619873048,0.15219839096069337,0.15889560440063477,"[0.15854421997070312, 0.14693299865722656, 0.14741305541992186, 0.14709324645996094, 0.14787271118164064, 0.14706072998046876, 0.15467926025390624, 0.15004266357421875, 0.1489264678955078, 0.14833622741699218, 0.1476426544189453, 0.1476343078613281, 0.1486991424560547, 0.15006460571289063, 0.14923216247558593, 0.14806956481933595, 0.1482065887451172, 0.1475479736328125, 0.14836872863769532, 0.1491155548095703, 0.1513369598388672, 0.14916986083984374, 0.14808265686035157, 0.1494879608154297, 0.14759117126464844, 0.1497845764160156, 0.14995436096191406, 0.14902259826660155, 0.14852085876464843, 0.14935606384277345, 0.14895126342773438, 0.1481359405517578, 0.15096038818359375, 0.1497481231689453, 0.1502617645263672, 0.14804173278808594, 0.14938316345214844, 0.148231201171875, 0.14990182495117188, 0.1498867492675781, 0.1490885772705078, 0.14964553833007813, 0.1488426513671875, 0.14895907592773439, 0.14939053344726563, 0.15035423278808593, 0.15032386779785156, 0.14997885131835936, 0.14938922119140624, 0.14937464904785155, 0.14891484069824218, 0.14893670654296876, 0.15039900207519533, 0.1494896697998047, 0.15020442199707032, 0.15000941467285156, 0.14883680725097656, 0.1492229461669922, 0.1498505859375, 0.15049728393554687, 0.14987673950195313, 0.15002418518066407, 0.14994610595703126, 0.1584496612548828, 0.1466931915283203, 0.14688351440429687, 0.1472368621826172, 0.147654052734375, 0.14796246337890626, 0.15514755249023438, 0.14969091796875, 0.14807420349121095, 0.14830230712890624, 0.14861024475097656, 0.14789482116699218, 0.15093174743652343, 0.15089584350585938, 0.1492109375, 0.1481815948486328, 0.14759916687011718, 0.14816464233398438, 0.14881808471679686, 0.15072653198242186, 0.14980531311035156, 0.14910861206054687, 0.1488407745361328, 0.14732293701171875, 0.14835098266601562, 0.14897695922851562, 0.15124755859375, 0.15087615966796875, 0.1487477722167969, 0.14819378662109375, 0.15014093017578126, 0.14895893859863282, 0.14878749084472656, 0.15112602233886718, 0.14977996826171874, 0.1495716552734375, 0.14782643127441406, 0.14939360046386718, 0.14954652404785157, 0.14861529541015625, 0.150591552734375, 0.14975669860839844, 0.14852301025390624, 0.1493053436279297, 0.14988870239257812, 0.14947975158691407, 0.15011750793457032, 0.14948031616210938, 0.15034066772460938, 0.14879750061035157, 0.14923405456542968, 0.1499632568359375, 0.14945074462890626, 0.14956544494628907, 0.15033273315429688, 0.14984877014160156, 0.14942413330078125, 0.1496494140625, 0.1498419189453125, 0.15033139038085938, 0.15029656982421874, 0.14947943115234374, 0.15083724975585938, 0.15883468627929687, 0.14712348937988282, 0.1473582763671875, 0.14713871765136718, 0.1475625, 0.1480203857421875, 0.15394288635253905, 0.14968377685546874, 0.14885113525390625, 0.1478082275390625, 0.14778781127929688, 0.14760345458984375, 0.15012185668945313, 0.15080624389648437, 0.14987767028808593, 0.1475392608642578, 0.14777413940429687, 0.14762979125976564, 0.14804960632324218, 0.15000172424316408, 0.14989506530761718, 0.1495926971435547, 0.14783692932128906, 0.14928810119628907, 0.14761660766601561, 0.14889727783203124, 0.15015887451171875, 0.1493268127441406, 0.14894601440429686, 0.14766581726074218, 0.14871113586425783, 0.14815875244140625, 0.14938316345214844, 0.14897113037109375, 0.14930776977539062, 0.1483644104003906, 0.14922831726074218, 0.14824024963378907, 0.14879142761230468, 0.14986656188964845, 0.14854147338867188, 0.14899366760253907, 0.148991455078125, 0.14974864196777343, 0.1484962921142578, 0.14908364868164062, 0.1510467529296875, 0.15059353637695314, 0.15056486511230469, 0.14870527648925783, 0.14996470642089843, 0.14984725952148437, 0.15076797485351562, 0.14946357727050782, 0.14999705505371094, 0.14979327392578126, 0.149749755859375, 0.14939340209960938, 0.14963302612304688, 0.1512626190185547, 0.15061871337890625, 0.15026789855957032, 0.1501511688232422, 0.16048147583007813, 0.1478137969970703, 0.14684364318847656, 0.14750076293945313, 0.1481182098388672, 0.147625244140625, 0.15558543395996094, 0.15031295776367187, 0.15010202026367186, 0.1473228759765625, 0.14809840393066406, 0.14891690063476562, 0.15066709899902345, 0.1523625030517578, 0.1495528564453125, 0.15025657653808594, 0.14717951965332032, 0.1477015380859375, 0.14854576110839843, 0.15081053161621094, 0.14991574096679688, 0.14980709838867187, 0.14936863708496093, 0.14869113159179687, 0.14843289184570313, 0.1505497589111328, 0.1508167724609375, 0.14928973388671876, 0.14846726989746092, 0.14798419189453124, 0.14760362243652345, 0.14824208068847655, 0.1499185333251953, 0.15192268371582032, 0.14972518920898437, 0.15008335876464843, 0.1490160675048828, 0.1493173828125, 0.150193115234375, 0.14994003295898437, 0.1505341491699219, 0.15095826721191405, 0.1500897216796875, 0.14980300903320312, 0.14960179138183594, 0.14882662963867188, 0.15093875122070313, 0.15183961486816405, 0.14995864868164063, 0.14967808532714844, 0.1492111358642578, 0.1504192352294922, 0.15062448120117186, 0.15064678955078126, 0.14975999450683594, 0.14992723083496093, 0.15009767150878905, 0.15005177307128906, 0.14919270324707032, 0.15051065063476562, 0.15042655944824218, 0.14982322692871095, 0.1503541717529297, 0.15892048645019533, 0.14766339111328125, 0.147649658203125, 0.14745455932617188, 0.14792066955566407, 0.14772685241699218, 0.1545789489746094, 0.14918861389160157, 0.14881587219238282, 0.14828239440917967, 0.147767578125, 0.14780812072753907, 0.15152621459960938, 0.1516663360595703, 0.14877462768554686, 0.14945458984375, 0.14783168029785157, 0.14850457763671876, 0.1491631317138672, 0.15030157470703126, 0.1511710662841797, 0.14916403198242187, 0.14796131896972656, 0.14798233032226563, 0.14890652465820312, 0.1506727294921875, 0.15133334350585936, 0.15100947570800782, 0.14982733154296876, 0.14889369201660158, 0.14841587829589845, 0.1503773498535156, 0.15023670959472657, 0.150972900390625, 0.15039488220214844, 0.14894639587402345, 0.1489188232421875, 0.14809703063964844, 0.15040476989746093, 0.14948902893066407, 0.15060887145996094, 0.15013069152832031, 0.15026380920410157, 0.14938038635253906, 0.14961328125, 0.15043942260742188, 0.1501641845703125, 0.15000349426269532, 0.14944050598144532, 0.1500282897949219, 0.14859805297851564, 0.1498075866699219, 0.14956144714355468, 0.14955126953125, 0.1506856384277344, 0.1492419128417969, 0.1497716827392578, 0.14878781127929688, 0.15277027893066406, 0.1512610626220703, 0.15175926208496093, 0.15017327880859374, 0.149114501953125, 0.16288624572753907, 0.1477181396484375, 0.14761546325683594, 0.14845980834960937, 0.14872508239746093, 0.15000437927246094, 0.15573606872558593, 0.15141606140136718, 0.14867266845703125, 0.14755081176757812, 0.14773043823242188, 0.149042724609375, 0.15223001098632813, 0.15227264404296875, 0.14947142028808594, 0.148680419921875, 0.14868450927734375, 0.14776419067382812, 0.1512816619873047, 0.15114854431152344, 0.15097637939453126, 0.15034303283691405, 0.14924822998046874, 0.1480994873046875, 0.15076710510253907, 0.1502828826904297, 0.1508658905029297, 0.15182847595214843, 0.14930908203125, 0.14926681518554688, 0.14834402465820312, 0.15024003601074218, 0.15103999328613282, 0.15138316345214844, 0.14999005126953124, 0.14873008728027343, 0.14909849548339843, 0.1499279327392578, 0.1502471923828125, 0.15143548583984376, 0.15081446838378906, 0.14958003234863282, 0.1488170623779297, 0.14954173278808594, 0.15076069641113282, 0.15150355529785156, 0.15223603820800782, 0.15231800842285156, 0.15049728393554687, 0.15057510375976563, 0.1513492431640625, 0.1512489013671875, 0.15151922607421875, 0.1519964141845703, 0.15107667541503905, 0.14943251037597657, 0.15107891845703125, 0.15040835571289063, 0.15121405029296875, 0.15189286804199217, 0.15186067199707032, 0.1505996551513672, 0.151111328125, 0.16059103393554688, 0.14852359008789062, 0.14810488891601561, 0.1477718048095703, 0.1496065673828125, 0.14961048889160156, 0.15542066955566405, 0.14933811950683593, 0.14978770446777342, 0.1478082275390625, 0.14786204528808594, 0.14902473449707032, 0.151837158203125, 0.15117721557617186, 0.15020236206054688, 0.1484206085205078, 0.14789018249511718, 0.14916371154785157, 0.1492541046142578, 0.150806884765625, 0.15053184509277343, 0.150144775390625, 0.14877850341796875, 0.15022563171386719, 0.15060202026367187, 0.15170970153808594, 0.152453125, 0.15130604553222657, 0.14972128295898438, 0.14921302795410157, 0.150010009765625, 0.15065603637695313, 0.15075141906738282, 0.15193574523925782, 0.1505526123046875, 0.1509005126953125, 0.14931088256835937, 0.15039776611328126, 0.15007334899902344, 0.15094374084472656, 0.1516062774658203, 0.15031398010253907, 0.1499279327392578, 0.1499495086669922, 0.14997190856933593, 0.15113731384277343, 0.15154275512695312, 0.15045826721191408, 0.14896514892578125, 0.14913363647460937, 0.1492084503173828, 0.15082351684570314, 0.151297607421875, 0.1510236511230469, 0.15065907287597657, 0.15087251281738281, 0.1492991943359375, 0.14947065734863282, 0.15091722106933594, 0.15070870971679687, 0.15082701110839844, 0.15091670227050782, 0.15072662353515626, 0.15959231567382812, 0.14886105346679687, 0.14778778076171875, 0.14983538818359374, 0.1491922607421875, 0.14987551879882813, 0.1556634216308594, 0.1501907501220703, 0.1481136932373047, 0.14931695556640626, 0.14812838745117188, 0.14816668701171876, 0.15233842468261719, 0.15146563720703124, 0.14887699890136719, 0.14908892822265624, 0.1483120574951172, 0.14859878540039062, 0.15039077758789063, 0.15120335388183595, 0.15097850036621094, 0.14946754455566405, 0.14944790649414064, 0.14820649719238282, 0.1496678466796875, 0.1519964141845703, 0.15047599792480468, 0.15099481201171874, 0.15066986083984374, 0.15047103881835938, 0.15035968017578125, 0.1511182098388672, 0.15068130493164061, 0.1521093444824219, 0.15045222473144532, 0.14927462768554686, 0.1498862762451172, 0.15068006896972655, 0.15019847106933593, 0.15072419738769532, 0.15124111938476562, 0.15026789855957032, 0.15040614318847656, 0.1500877685546875, 0.15090985107421875, 0.15075328063964843, 0.15084748840332032, 0.15059666442871095, 0.14992041015625, 0.1497638397216797, 0.15109788513183595, 0.1503354949951172, 0.15112594604492188, 0.15078611755371094, 0.15057664489746095, 0.15072102355957032, 0.15090428161621094, 0.1504150390625, 0.15069987487792968, 0.1518204803466797, 0.1514442901611328, 0.15071408081054688, 0.15095794677734375, 0.1594798126220703, 0.14768946838378907, 0.1474086151123047, 0.14796194458007814, 0.14871980285644532, 0.1511751708984375, 0.1552052459716797, 0.14981773376464844, 0.14821128845214843, 0.14820323181152345, 0.14916677856445312, 0.14896742248535155, 0.15080825805664064, 0.15189616394042968, 0.14882736206054686, 0.14892665100097657, 0.14898463439941406, 0.15083059692382814, 0.15108352661132812, 0.15213772583007812, 0.1524715576171875, 0.15017575073242187, 0.14958181762695313, 0.15004057312011718, 0.15037164306640624, 0.1508236083984375, 0.1513135986328125, 0.15114118957519532, 0.14954229736328126, 0.149695068359375, 0.14996070861816407, 0.15055258178710937, 0.15043174743652343, 0.1514352569580078, 0.15094989013671875, 0.15007318115234375, 0.14993629455566407, 0.15067861938476562, 0.15130438232421875, 0.1506393280029297, 0.15247769165039063, 0.1504047088623047, 0.1506246795654297, 0.15124678039550782, 0.15129379272460938, 0.14995639038085937, 0.15152143859863282, 0.15201309204101562, 0.150329345703125, 0.15109939575195314, 0.14998074340820314, 0.15117327880859374, 0.15051394653320313, 0.15118861389160157, 0.15060012817382812, 0.15107868957519532, 0.15096450805664063, 0.1512484130859375, 0.1513419189453125, 0.1513861083984375, 0.150540283203125, 0.15215974426269532, 0.14958642578125, 0.16037628173828125, 0.14842933654785156, 0.14838374328613282, 0.1491242218017578, 0.14892326354980467, 0.1501709747314453, 0.1543555908203125, 0.1496727294921875, 0.14922137451171874, 0.14989312744140626, 0.1485779571533203, 0.150301025390625, 0.15237939453125, 0.15199392700195313, 0.14958956909179688, 0.14875474548339843, 0.14964384460449218, 0.1494644775390625, 0.15013743591308593, 0.1515335693359375, 0.15089596557617188, 0.14982826232910157, 0.14886679077148438, 0.14936093139648438, 0.14969651794433594, 0.15076870727539063, 0.15124166870117187, 0.14939546203613283, 0.1491958465576172, 0.14992636108398438, 0.1488585968017578, 0.1508871612548828, 0.1511299591064453, 0.1514286346435547, 0.14960263061523438, 0.1506829376220703, 0.14990847778320313, 0.15144514465332032, 0.1513882293701172, 0.15225180053710938, 0.1513173828125, 0.1515478973388672, 0.1519615936279297, 0.15199757385253906, 0.1519764862060547, 0.1515175323486328, 0.1520762939453125, 0.1513038787841797, 0.15061383056640626, 0.1510794219970703, 0.15177317810058594, 0.15131852722167968, 0.15134883117675782, 0.15165817260742187, 0.15170751953125, 0.14908502197265625, 0.15149215698242188, 0.15132716369628907, 0.15106185913085937, 0.15040988159179688, 0.1508311004638672, 0.15138406372070312, 0.15049462890625]",tokens/s,6.664934521432702,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,2549.338112,11826.823168,0.0,11431.575552,10953.091072,s,1,22.083626953125,22.083626953125,0.0,22.083626953125,22.083626953125,22.083626953125,22.083626953125,[22.083626953125],,kWh,0.00042438384648334394,4.6805431210738926e-05,0.00015918096067799037,0.0006303702383720733,,MB,1918.636032,12722.307072,0.0,12314.476544,11624.259584,s,10,19.116585571289065,1.9116585571289062,0.006993612625285195,1.9133917846679687,1.9185032348632811,1.919634002685547,1.9205386169433594,"[1.897379150390625, 1.902699462890625, 1.90814013671875, 1.909576904296875, 1.911366943359375, 1.9154166259765626, 1.91584521484375, 1.9171444091796874, 1.918251953125, 1.9207647705078126]",tokens/s,133.91512780633948,kWh,5.5732573974582165e-05,6.145645191757081e-06,3.701130738680281e-05,9.888952655314205e-05,tokens/kWh,2588747.351949639,MB,1922.7648,12724.404224,0.0,12316.573696,11624.262144,s,10,94.21018066406249,9.42101806640625,0.025053647449437427,9.42879052734375,9.44482841796875,9.445260400390625,9.445605986328124,"[9.3704345703125, 9.3852255859375, 9.40620703125, 9.4164833984375, 9.4247998046875, 9.43278125, 9.4416455078125, 9.4421787109375, 9.444732421875, 9.4456923828125]",tokens/s,6.687175372760116,kWh,0.00027596498239625583,3.0441381840470628e-05,0.00018339111893499736,0.0004897974831717238,tokens/kWh,128624.58906901344,,s,630,94.20603550720212,0.14953338969397165,0.0020462793337056094,0.14950914764404297,0.15111215820312499,0.15163519210815432,0.16009703063964842,"[0.16060806274414063, 0.14670208740234375, 0.14601875305175782, 0.14674493408203124, 0.14774313354492188, 0.1459621124267578, 0.15375244140625, 0.14825053405761718, 0.1471837158203125, 0.14752310180664063, 0.14631753540039064, 0.14619606018066406, 0.14904960632324218, 0.1505774383544922, 0.14821340942382813, 0.14878767395019532, 0.14591180419921876, 0.14713845825195312, 0.14888560485839844, 0.14901248168945314, 0.14934962463378906, 0.14868553161621093, 0.14756253051757812, 0.14836898803710938, 0.1469444122314453, 0.1486684112548828, 0.150724609375, 0.14895423889160156, 0.1495040588378906, 0.14795245361328124, 0.14724908447265625, 0.14920211791992188, 0.14875736999511718, 0.14877212524414063, 0.14951712036132814, 0.1475782470703125, 0.14903053283691406, 0.1475729217529297, 0.1484738311767578, 0.14982585144042967, 0.1487548828125, 0.1505536346435547, 0.14810585021972655, 0.1473161926269531, 0.14818342590332031, 0.14907373046875, 0.15020697021484375, 0.1489409942626953, 0.14891416931152343, 0.1492064666748047, 0.14695001220703124, 0.1493162841796875, 0.14908146667480468, 0.14946981811523438, 0.14889369201660158, 0.1488497314453125, 0.1505494384765625, 0.1476259765625, 0.1487626190185547, 0.1492900848388672, 0.14863043212890625, 0.15067074584960938, 0.14942678833007814, 0.15974668884277343, 0.1473144989013672, 0.145940673828125, 0.1466429443359375, 0.1480494384765625, 0.1463537902832031, 0.15428489685058594, 0.14886912536621094, 0.14752153015136718, 0.14823834228515625, 0.14589132690429688, 0.14756863403320314, 0.15134300231933595, 0.15032534790039062, 0.1501114501953125, 0.14701962280273437, 0.14706375122070312, 0.1485312042236328, 0.14752153015136718, 0.15014451599121093, 0.1499632568359375, 0.14844435119628907, 0.14915632629394532, 0.14640777587890624, 0.14795365905761718, 0.14988902282714844, 0.14902444458007813, 0.15089491271972658, 0.14842271423339845, 0.14776722717285157, 0.1468408966064453, 0.14810499572753907, 0.1500142059326172, 0.15020831298828125, 0.14902735900878905, 0.1482570495605469, 0.1474391326904297, 0.14824089050292968, 0.14802943420410156, 0.15064901733398436, 0.15023085021972657, 0.14874214172363281, 0.15001603698730467, 0.1470023651123047, 0.148179931640625, 0.15065203857421874, 0.1501766357421875, 0.150614013671875, 0.14891375732421874, 0.14783247375488281, 0.14728863525390626, 0.14922566223144532, 0.15096832275390626, 0.14965965270996093, 0.15020442199707032, 0.14783897399902343, 0.14780621337890626, 0.15004019165039062, 0.14899862670898437, 0.15063031005859376, 0.1492618865966797, 0.1489514923095703, 0.14838099670410157, 0.15995619201660155, 0.14741285705566406, 0.1466479949951172, 0.14689010620117188, 0.14781817626953125, 0.14709648132324218, 0.15391941833496095, 0.14886431884765625, 0.14756886291503907, 0.14874467468261718, 0.1461207733154297, 0.14679859924316407, 0.1520025634765625, 0.150476806640625, 0.15000973510742188, 0.1485323486328125, 0.147704833984375, 0.1464930877685547, 0.14855203247070312, 0.14992720031738282, 0.14944650268554688, 0.14984077453613281, 0.14753555297851562, 0.14751571655273438, 0.14724649047851562, 0.14990806579589844, 0.14971200561523437, 0.15064505004882814, 0.14916192626953126, 0.15003712463378907, 0.14657699584960937, 0.1478697204589844, 0.15068966674804687, 0.1502799072265625, 0.15042448425292967, 0.14940147399902343, 0.14886495971679686, 0.1467342987060547, 0.14909674072265625, 0.15073139953613282, 0.15011581420898437, 0.15108316040039063, 0.1487293701171875, 0.14904998779296874, 0.1473204803466797, 0.14937753295898437, 0.1514352264404297, 0.15026524353027343, 0.1499900207519531, 0.14873330688476563, 0.1485727996826172, 0.14837350463867188, 0.15057817077636718, 0.15104109191894532, 0.14954275512695311, 0.15053219604492188, 0.14835232543945312, 0.14807533264160155, 0.14945794677734375, 0.15097030639648437, 0.15024630737304687, 0.15054847717285155, 0.15010610961914062, 0.16076780700683593, 0.14740089416503907, 0.14671401977539061, 0.14782861328125, 0.14761964416503906, 0.14693263244628907, 0.15456646728515624, 0.14924986267089843, 0.14748463439941406, 0.1476848907470703, 0.14725619506835938, 0.1476259765625, 0.150761474609375, 0.15083059692382814, 0.15019865417480469, 0.14743916320800782, 0.14850502014160155, 0.14685606384277344, 0.1481031036376953, 0.15020448303222655, 0.15069325256347657, 0.14898255920410156, 0.14895872497558593, 0.14736367797851563, 0.14943238830566405, 0.14852549743652343, 0.14992790222167968, 0.15109738159179686, 0.14875238037109376, 0.15001190185546875, 0.14777958679199218, 0.14798439025878907, 0.14986239624023437, 0.150108154296875, 0.15065087890625, 0.15007334899902344, 0.1506570281982422, 0.146808837890625, 0.14803558349609375, 0.15090074157714845, 0.14996255493164062, 0.15091116333007812, 0.14929408264160157, 0.15004978942871094, 0.1475747833251953, 0.14901043701171876, 0.15021868896484375, 0.15079200744628907, 0.15038284301757812, 0.15061750793457032, 0.1501292419433594, 0.14777548217773437, 0.1487298583984375, 0.15075155639648438, 0.15004383850097655, 0.15093618774414064, 0.1493943328857422, 0.15111062622070312, 0.14844927978515626, 0.14958706665039062, 0.1490252227783203, 0.15041990661621094, 0.15023049926757812, 0.16084786987304686, 0.14708531188964843, 0.14729420471191407, 0.1472041015625, 0.14789836120605468, 0.14821334838867187, 0.15471075439453125, 0.14891180419921876, 0.14951423645019532, 0.1473597412109375, 0.1472368621826172, 0.14933811950683593, 0.1500037078857422, 0.15201895141601562, 0.14942562866210937, 0.14781494140625, 0.14778770446777345, 0.1471426544189453, 0.14931974792480468, 0.15128684997558595, 0.15012550354003906, 0.1489644775390625, 0.14848924255371093, 0.15055241394042967, 0.1466798095703125, 0.14954920959472656, 0.1494034881591797, 0.14997914123535155, 0.14966700744628905, 0.14913548278808594, 0.1509383087158203, 0.1468538818359375, 0.14921929931640626, 0.15028755187988282, 0.14996156311035155, 0.1506570281982422, 0.149494873046875, 0.14963600158691406, 0.14705459594726564, 0.1498091583251953, 0.1506078643798828, 0.15050460815429687, 0.15080703735351564, 0.14874774169921876, 0.15000665283203124, 0.14741299438476563, 0.1508922576904297, 0.15007096862792968, 0.15040982055664062, 0.1504500732421875, 0.1491005401611328, 0.15146783447265624, 0.1486801300048828, 0.14930415344238282, 0.14956480407714845, 0.149826171875, 0.15015936279296874, 0.1498927001953125, 0.1508356170654297, 0.14828282165527343, 0.15065965270996093, 0.14974566650390625, 0.1500958709716797, 0.16015455627441405, 0.14722047424316406, 0.1471056365966797, 0.14801705932617187, 0.14729434204101563, 0.14731805419921876, 0.15568162536621094, 0.14911628723144532, 0.149712646484375, 0.14771865844726562, 0.14823785400390624, 0.14828834533691407, 0.14956544494628907, 0.15234197998046875, 0.14890652465820312, 0.15017575073242187, 0.14777507019042968, 0.14834722900390626, 0.14940780639648438, 0.15077491760253905, 0.15159794616699218, 0.14888531494140625, 0.15043545532226563, 0.14744342041015626, 0.14846243286132813, 0.14993318176269532, 0.15106343078613282, 0.14993408203125, 0.1500010528564453, 0.15119017028808593, 0.1474722900390625, 0.14792819213867187, 0.15009065246582032, 0.15064012145996095, 0.15029231262207032, 0.14970742797851563, 0.15063449096679687, 0.14765875244140625, 0.14886912536621094, 0.1502019500732422, 0.14997123718261718, 0.15062754821777344, 0.14943125915527344, 0.15080441284179688, 0.14766079711914062, 0.14990121459960937, 0.1492706298828125, 0.1506385955810547, 0.1490403594970703, 0.15020492553710937, 0.15011561584472657, 0.1482425994873047, 0.15192256164550783, 0.14913020324707033, 0.15151046752929687, 0.1490946807861328, 0.1501510772705078, 0.14844912719726563, 0.15026435852050782, 0.14982144165039063, 0.150687744140625, 0.15027200317382813, 0.1495224304199219, 0.16215461730957031, 0.14909593200683594, 0.14641552734375, 0.14799728393554687, 0.148748291015625, 0.14641273498535157, 0.15466966247558595, 0.15010838317871095, 0.1508659210205078, 0.14722377014160157, 0.14810723876953125, 0.14874911499023438, 0.14871888732910157, 0.15131024169921875, 0.15025234985351563, 0.14909645080566405, 0.1474453430175781, 0.14836585998535157, 0.15063027954101563, 0.1488504638671875, 0.15085591125488282, 0.14958796691894533, 0.14915330505371094, 0.1477145233154297, 0.14880960083007813, 0.1509470977783203, 0.14991241455078125, 0.15116697692871095, 0.1496303405761719, 0.14852117919921876, 0.14806851196289061, 0.14984422302246095, 0.15040921020507814, 0.1501880340576172, 0.15113731384277343, 0.14910768127441407, 0.1507368927001953, 0.14773881530761718, 0.14952566528320313, 0.15034538269042969, 0.14996719360351562, 0.1510386199951172, 0.14994178771972655, 0.15044451904296874, 0.14890188598632811, 0.1501407012939453, 0.14945263671875, 0.15112594604492188, 0.1498853759765625, 0.14992166137695312, 0.14828556823730468, 0.15008111572265626, 0.15005941772460937, 0.15062956237792968, 0.15191737365722657, 0.14974156188964843, 0.15141888427734376, 0.1486929931640625, 0.14989859008789064, 0.14984463500976564, 0.15096421813964844, 0.1495653076171875, 0.15069970703125, 0.16067295837402343, 0.14796627807617188, 0.14791661071777343, 0.14806002807617188, 0.1470492858886719, 0.1481134033203125, 0.15501519775390624, 0.14963095092773437, 0.14951423645019532, 0.14811651611328125, 0.14753590393066407, 0.14831663513183593, 0.15114828491210938, 0.1519048614501953, 0.14974310302734375, 0.150442138671875, 0.14739039611816407, 0.14907830810546874, 0.14803286743164062, 0.15137033081054688, 0.150249755859375, 0.14963101196289064, 0.15034739685058593, 0.14729049682617187, 0.14910415649414063, 0.1489058837890625, 0.1513846435546875, 0.1504950714111328, 0.15006121826171875, 0.15045018005371094, 0.14760089111328126, 0.14965402221679688, 0.1491005401611328, 0.15047488403320314, 0.14957554626464845, 0.15042684936523437, 0.14875631713867188, 0.14877996826171874, 0.14998733520507812, 0.1498787841796875, 0.1510726776123047, 0.15014874267578124, 0.15019013977050782, 0.14839849853515624, 0.1491394500732422, 0.14930943298339844, 0.15014230346679688, 0.15135174560546874, 0.15020828247070311, 0.15068351745605468, 0.14807212829589844, 0.15022988891601563, 0.14904226684570313, 0.15133139038085938, 0.14960220336914062, 0.15011856079101563, 0.14935888671875, 0.15014912414550782, 0.15050880432128907, 0.15042636108398438, 0.1516477508544922, 0.14942665100097657, 0.15204296875, 0.15941836547851562, 0.14711958312988282, 0.14898348999023436, 0.14644291687011718, 0.14857395935058593, 0.150712158203125, 0.15332415771484376, 0.1503272705078125, 0.1482845458984375, 0.14819622802734375, 0.14686968994140626, 0.1490458221435547, 0.15335789489746093, 0.1511367950439453, 0.14981111145019532, 0.1480067901611328, 0.148172607421875, 0.14755247497558593, 0.1504085693359375, 0.15230435180664062, 0.15046861267089845, 0.14987826538085938, 0.14792950439453126, 0.14946517944335938, 0.14776316833496095, 0.15155807495117188, 0.15067916870117187, 0.15004083251953124, 0.14994435119628907, 0.14808493041992188, 0.15060992431640624, 0.14790640258789062, 0.1508717498779297, 0.1494342041015625, 0.15003692626953125, 0.14953082275390625, 0.14907379150390626, 0.1506793212890625, 0.14891030883789064, 0.15103581237792968, 0.14979647827148437, 0.15000198364257813, 0.14881794738769533, 0.14942233276367187, 0.14994432067871094, 0.14954086303710937, 0.1516195831298828, 0.14893875122070313, 0.1517690887451172, 0.1495572509765625, 0.15134515380859376, 0.14895225524902345, 0.1500618896484375, 0.1498787841796875, 0.15085494995117188, 0.1491871337890625, 0.15019334411621094, 0.14959100341796874, 0.1497266845703125, 0.15141737365722657, 0.15013186645507812, 0.1515262451171875, 0.1500913848876953, 0.1605960998535156, 0.14707347106933594, 0.14708358764648438, 0.1492139892578125, 0.14697305297851562, 0.14864979553222657, 0.15545330810546876, 0.1491721954345703, 0.14867709350585936, 0.1474534454345703, 0.1493731231689453, 0.14692965698242189, 0.1516198425292969, 0.15215664672851562, 0.14939459228515625, 0.14927548217773437, 0.14761984252929689, 0.14804374694824218, 0.1492532501220703, 0.15129078674316407, 0.15103794860839845, 0.1492500457763672, 0.15040080261230468, 0.14732514953613282, 0.14898367309570312, 0.15053651428222656, 0.1512807312011719, 0.15014749145507814, 0.14991725158691407, 0.14970489501953124, 0.14796624755859375, 0.15015965270996093, 0.15045529174804687, 0.15064166259765624, 0.15012454223632812, 0.1496268768310547, 0.14853529357910156, 0.14912821960449218, 0.15132931518554688, 0.15042604064941406, 0.15148646545410155, 0.1499279327392578, 0.14963711547851563, 0.14816029357910157, 0.14991792297363282, 0.1504965057373047, 0.1503445129394531, 0.1514126739501953, 0.14908210754394532, 0.1507060546875, 0.1488605194091797, 0.1499202880859375, 0.15023513793945312, 0.15087359619140625, 0.14966534423828126, 0.14958688354492186, 0.15028355407714844, 0.14994505310058595, 0.15148252868652343, 0.14961442565917968, 0.15116192626953126, 0.14994309997558594, 0.15027827453613282]",tokens/s,6.68746961495727,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,826.908672,554.631168,0.0,159.383552,142.313472,s,1,7.770806640625,7.770806640625,0.0,7.770806640625,7.770806640625,7.770806640625,7.770806640625,[7.770806640625],,kWh,2.044441207081036e-05,2.2443550499971597e-06,7.322505857998429e-06,3.001127297880595e-05,,MB,1169.485824,630.12864,0.0,222.298112,185.324544,s,16,0.20787631988525393,0.012992269992828369,0.00010642136649962598,0.012965983867645263,0.01313753604888916,0.01321061611175537,0.013223422050476075,"[0.013057024002075195, 0.012937984466552734, 0.012915424346923828, 0.012964415550231933, 0.013205280303955078, 0.012908672332763672, 0.012927007675170899, 0.01290614414215088, 0.01287775993347168, 0.012967552185058594, 0.012977215766906738, 0.01322662353515625, 0.013063615798950195, 0.012843647956848144, 0.013028160095214843, 0.013069791793823242]",tokens/s,19704.024018998218,kWh,3.869097831898266e-07,4.2669298453619785e-08,2.1264954465961984e-07,6.422286263030662e-07,tokens/kWh,398611942.0955151,MB,1203.05664,632.225792,0.0,224.395264,185.327104,s,16,10.13089453125,0.633180908203125,0.002444518329385662,0.6324399108886718,0.6361362304687499,0.6373408050537109,0.6391272918701172,"[0.6326646728515625, 0.6314248657226562, 0.63187353515625, 0.6320014038085937, 0.6395739135742188, 0.6317808227539062, 0.6325137329101562, 0.6321677856445312, 0.6285827026367188, 0.632541259765625, 0.635676025390625, 0.636596435546875, 0.63510498046875, 0.6323660888671875, 0.6337172241210938, 0.63230908203125]",tokens/s,99.49763043043228,kWh,1.8280941349102588e-05,2.0160788581939183e-06,7.300429382464556e-06,2.7597449589761054e-05,tokens/kWh,2282819.6422677287,,s,1008,10.12196175956727,0.010041628729729425,0.00013469295719927956,0.010021120071411132,0.01016208667755127,0.010236100769042968,0.010545873079299927,"[0.010080096244812011, 0.010078240394592285, 0.010184703826904297, 0.01003276824951172, 0.010035584449768067, 0.010023039817810058, 0.009983872413635254, 0.010057408332824707, 0.00999782371520996, 0.009972512245178223, 0.009994272232055665, 0.010015744209289551, 0.0099901762008667, 0.009982943534851074, 0.010076416015625, 0.010059295654296875, 0.010041567802429199, 0.010051039695739747, 0.010069791793823241, 0.01007487964630127, 0.010076160430908204, 0.010049535751342773, 0.010023967742919922, 0.009998815536499023, 0.010030912399291993, 0.009947392463684082, 0.009940640449523926, 0.010007552146911621, 0.009925567626953125, 0.010027104377746583, 0.010471648216247558, 0.010013216018676758, 0.010033023834228516, 0.010035296440124512, 0.0100797119140625, 0.009951168060302735, 0.0100131196975708, 0.00998198413848877, 0.01001471996307373, 0.009976320266723633, 0.010020511627197266, 0.010004351615905762, 0.010023296356201172, 0.010024991989135743, 0.010053152084350587, 0.00999135971069336, 0.010028160095214844, 0.010000255584716797, 0.010061823844909668, 0.010028703689575195, 0.01005350399017334, 0.010074591636657715, 0.010034527778625488, 0.010099360466003418, 0.010057472229003906, 0.010082559585571289, 0.010045439720153808, 0.010000672340393067, 0.010023903846740723, 0.009988224029541016, 0.010031200408935547, 0.009931167602539063, 0.01003974437713623, 0.01005827236175537, 0.01026198387145996, 0.010103679656982422, 0.009968799591064453, 0.01021020793914795, 0.009917887687683105, 0.00992454433441162, 0.009894016265869141, 0.009880000114440918, 0.010145279884338379, 0.009979488372802735, 0.00996835231781006, 0.010484031677246094, 0.010042431831359864, 0.010009471893310546, 0.009977855682373048, 0.010002367973327637, 0.009854047775268555, 0.00990499210357666, 0.00983084774017334, 0.009921631813049316, 0.009884384155273438, 0.010014304161071777, 0.00992899227142334, 0.009907999992370605, 0.009842368125915527, 0.009896479606628417, 0.009881600379943848, 0.009955072402954101, 0.010100992202758789, 0.009965824127197266, 0.009908991813659668, 0.010023712158203125, 0.009987808227539063, 0.009957375526428223, 0.009959936141967773, 0.009960607528686523, 0.009955519676208497, 0.010175135612487793, 0.010004320144653321, 0.010037247657775878, 0.009946399688720704, 0.010049823760986329, 0.010068256378173828, 0.010084192276000976, 0.009973440170288086, 0.010070367813110352, 0.009991871833801269, 0.010067744255065918, 0.010121376037597656, 0.010181440353393554, 0.010033151626586915, 0.010397695541381835, 0.009973759651184083, 0.010012127876281739, 0.010014528274536133, 0.010066847801208496, 0.009985856056213379, 0.010016672134399414, 0.010029215812683106, 0.010004511833190918, 0.010025152206420898, 0.010077407836914062, 0.00999014377593994, 0.00997811222076416, 0.010060832023620606, 0.010040032386779784, 0.009973471641540527, 0.010031167984008788, 0.009971936225891114, 0.010061280250549316, 0.010215007781982421, 0.010039615631103515, 0.010074751853942871, 0.010033087730407715, 0.010063712120056151, 0.009977215766906739, 0.009930784225463868, 0.009915200233459473, 0.009983424186706544, 0.009973823547363281, 0.010095135688781738, 0.00998192024230957, 0.009953280448913575, 0.009934847831726074, 0.010031455993652344, 0.009957023620605469, 0.010072352409362793, 0.009940320014953614, 0.009972448348999024, 0.009899680137634276, 0.010077695846557617, 0.009957247734069825, 0.009969311714172363, 0.009978560447692872, 0.010075519561767578, 0.00997283172607422, 0.010050880432128905, 0.010131967544555665, 0.010129216194152833, 0.01005615997314453, 0.010149503707885742, 0.010113120079040527, 0.009996288299560547, 0.009961471557617188, 0.010024959564208985, 0.009958815574645997, 0.010046112060546874, 0.009981760025024414, 0.010281087875366211, 0.009984000205993653, 0.010051584243774414, 0.009936800003051758, 0.009896032333374024, 0.009928607940673828, 0.009949567794799805, 0.00992579174041748, 0.009960000038146972, 0.009922335624694825, 0.009986559867858886, 0.009882719993591308, 0.010039903640747071, 0.009917792320251464, 0.01088582420349121, 0.009995488166809082, 0.009978655815124512, 0.010388768196105957, 0.010091487884521484, 0.009969056129455567, 0.009969504356384277, 0.009972703933715821, 0.01012492847442627, 0.0107357759475708, 0.010024959564208985, 0.010000384330749512, 0.01013766384124756, 0.010038944244384765, 0.010061183929443359, 0.00996390438079834, 0.00999385643005371, 0.009986080169677735, 0.00997871971130371, 0.010040384292602539, 0.010044384002685546, 0.010020928382873535, 0.010071680068969727, 0.009944864273071289, 0.010028639793395995, 0.009942975997924805, 0.009963871955871582, 0.009929375648498535, 0.01044809627532959, 0.009967519760131835, 0.009957759857177735, 0.009953056335449219, 0.009978591918945312, 0.01004150390625, 0.010051039695739747, 0.009952735900878907, 0.009947648048400879, 0.010096735954284668, 0.009927167892456054, 0.00993222427368164, 0.009914175987243652, 0.009861503601074219, 0.009957247734069825, 0.00986348819732666, 0.009975808143615723, 0.009926655769348144, 0.009931008338928222, 0.010016287803649902, 0.009920736312866212, 0.009869152069091797, 0.009939104080200196, 0.009762816429138184, 0.009883392333984374, 0.009894495964050292, 0.00993017578125, 0.009898207664489745, 0.009979904174804688, 0.010231807708740234, 0.010201087951660156, 0.010425760269165038, 0.01040777587890625, 0.010033920288085937, 0.00999014377593994, 0.009942784309387206, 0.010053536415100098, 0.009961215972900391, 0.010162176132202149, 0.010028415679931641, 0.01015449619293213, 0.010082079887390136, 0.01011081600189209, 0.010142208099365235, 0.010115424156188964, 0.010049344062805176, 0.010077695846557617, 0.010057727813720703, 0.010262880325317383, 0.010105119705200196, 0.010146656036376954, 0.010034048080444335, 0.010962016105651855, 0.010491999626159668, 0.010020704269409179, 0.010036319732666015, 0.010016639709472657, 0.010117119789123535, 0.010100735664367675, 0.01019264030456543, 0.010218784332275391, 0.010041567802429199, 0.010033920288085937, 0.010037247657775878, 0.009959615707397462, 0.009999648094177246, 0.009953824043273926, 0.009971487998962402, 0.009912320137023926, 0.009957152366638184, 0.009943488121032715, 0.009891424179077148, 0.009933216094970703, 0.010217472076416016, 0.009850655555725098, 0.009901887893676757, 0.009992159843444825, 0.009971263885498047, 0.010804096221923827, 0.00999830436706543, 0.010704671859741211, 0.011363615989685058, 0.010138591766357423, 0.010112128257751464, 0.010101696014404297, 0.010153951644897461, 0.01015993595123291, 0.010092576026916504, 0.010076319694519043, 0.010106528282165527, 0.01003551959991455, 0.01006345558166504, 0.010547264099121094, 0.010184191703796386, 0.010191712379455566, 0.010370047569274902, 0.010287455558776856, 0.009998592376708984, 0.010023327827453613, 0.010027008056640625, 0.010217472076416016, 0.010015904426574707, 0.01003996753692627, 0.009907327651977538, 0.009960320472717285, 0.009910271644592286, 0.009970911979675293, 0.009878399848937989, 0.00998799991607666, 0.009897983551025391, 0.009994048118591309, 0.00987564754486084, 0.010032832145690918, 0.01001318359375, 0.009981599807739258, 0.00994320011138916, 0.010028160095214844, 0.009929439544677735, 0.009971232414245605, 0.009943679809570312, 0.00996735954284668, 0.009912575721740723, 0.009993247985839844, 0.009975775718688965, 0.009976832389831543, 0.010002207756042481, 0.0101112003326416, 0.010013824462890624, 0.010042655944824218, 0.010014143943786621, 0.010026623725891113, 0.010125856399536132, 0.010012672424316407, 0.009951295852661133, 0.009940896034240723, 0.010027071952819825, 0.010067296028137208, 0.010041600227355956, 0.010082752227783202, 0.010090687751770019, 0.010233792304992675, 0.010153696060180665, 0.0101561279296875, 0.010079327583312989, 0.010116000175476075, 0.010032159805297852, 0.01005078411102295, 0.010077280044555664, 0.009931039810180665, 0.009963071823120118, 0.010002400398254395, 0.010072928428649902, 0.009998527526855469, 0.010325599670410156, 0.010276800155639648, 0.01004588794708252, 0.010190400123596191, 0.010073535919189453, 0.009984864234924316, 0.009973759651184083, 0.009971551895141602, 0.010009920120239258, 0.00993065643310547, 0.009923232078552247, 0.009963711738586426, 0.010039520263671876, 0.009981375694274902, 0.009996159553527832, 0.010037952423095704, 0.00997935962677002, 0.01031164836883545, 0.009951807975769044, 0.009991488456726073, 0.009922271728515624, 0.010007519721984864, 0.010045439720153808, 0.009954463958740235, 0.00986832046508789, 0.010155808448791505, 0.009941023826599121, 0.010065600395202637, 0.010195584297180175, 0.009957056045532226, 0.009932767868041993, 0.009913503646850586, 0.010164128303527833, 0.009930015563964844, 0.01005945587158203, 0.010005824089050293, 0.010312383651733398, 0.010337599754333496, 0.01001542377471924, 0.010156031608581542, 0.009947135925292968, 0.009904128074645996, 0.01002195167541504, 0.009986111640930176, 0.010068863868713378, 0.009973471641540527, 0.009944831848144531, 0.009996064186096191, 0.009890527725219726, 0.009960576057434082, 0.010148127555847168, 0.01001302433013916, 0.01020751953125, 0.00994918441772461, 0.01009171199798584, 0.00997049617767334, 0.009989888191223144, 0.010010880470275879, 0.010034912109375, 0.010064064025878906, 0.01009059238433838, 0.010081376075744629, 0.010260928153991699, 0.009970144271850586, 0.009979647636413574, 0.009938976287841798, 0.010422495841979981, 0.009987615585327148, 0.00998243236541748, 0.009946911811828614, 0.009948384284973145, 0.009976287841796876, 0.01002131175994873, 0.009973695755004883, 0.010036160469055175, 0.01002297592163086, 0.010003552436828614, 0.009898816108703613, 0.00994211196899414, 0.009949503898620605, 0.009945631980895996, 0.009950528144836426, 0.00997862434387207, 0.00994262409210205, 0.009959263801574707, 0.009937472343444824, 0.00997760009765625, 0.009988351821899414, 0.009968704223632812, 0.010005375862121583, 0.010073408126831055, 0.010031904220581055, 0.010045408248901367, 0.010005696296691895, 0.010152768135070801, 0.010227711677551269, 0.010043392181396485, 0.009963520050048828, 0.010045439720153808, 0.01001478385925293, 0.01004047966003418, 0.01063811206817627, 0.010142848014831543, 0.009988991737365723, 0.010065919876098632, 0.009861120223999023, 0.009873408317565918, 0.010079936027526855, 0.009865632057189941, 0.010037280082702637, 0.009916288375854492, 0.010100735664367675, 0.009934271812438965, 0.009968192100524902, 0.009924448013305665, 0.010090368270874023, 0.009965855598449707, 0.009942367553710937, 0.009921183586120605, 0.009960576057434082, 0.009941472053527831, 0.010010111808776855, 0.009994720458984375, 0.01010483169555664, 0.010020511627197266, 0.010066368103027344, 0.010026816368103028, 0.010039839744567871, 0.009991711616516113, 0.009992735862731934, 0.010043328285217284, 0.01016204833984375, 0.010082752227783202, 0.010082240104675294, 0.010195775985717774, 0.010172863960266114, 0.010243935585021973, 0.009994239807128906, 0.00994934368133545, 0.009987168312072754, 0.00993177604675293, 0.009983903884887695, 0.009971712112426758, 0.009894975662231445, 0.009872063636779786, 0.009883904457092285, 0.00992255973815918, 0.009830400466918946, 0.009822208404541016, 0.009836640357971192, 0.009872960090637208, 0.009902432441711425, 0.009945088386535645, 0.009817664146423339, 0.009761504173278809, 0.009874272346496581, 0.009923456192016601, 0.010098079681396484, 0.010019424438476563, 0.010088447570800782, 0.010010623931884765, 0.010063743591308594, 0.009932928085327149, 0.010174400329589843, 0.010009696006774902, 0.010039360046386718, 0.01002940845489502, 0.009999103546142578, 0.010015680313110351, 0.010034048080444335, 0.00991641616821289, 0.010033151626586915, 0.009930751800537109, 0.009967616081237793, 0.009881440162658692, 0.00999020767211914, 0.009944864273071289, 0.010117631912231445, 0.009971520423889161, 0.01001471996307373, 0.009981951713562011, 0.009973055839538574, 0.009964223861694335, 0.009997568130493163, 0.009949952125549317, 0.009959551811218261, 0.00993062400817871, 0.010055423736572266, 0.010010368347167968, 0.010009183883666992, 0.009985823631286622, 0.010030847549438476, 0.009940447807312012, 0.009996576309204102, 0.009986687660217285, 0.010166272163391114, 0.009943039894104003, 0.009969663619995118, 0.009906304359436036, 0.010002304077148437, 0.010103167533874512, 0.01006156826019287, 0.010104512214660645, 0.010009183883666992, 0.009942015647888184, 0.009925344467163085, 0.00984505558013916, 0.010026111602783203, 0.010039872169494628, 0.009992128372192382, 0.01002735996246338, 0.010075167655944824, 0.01009545612335205, 0.010280320167541504, 0.010119839668273926, 0.010053440093994141, 0.010160127639770507, 0.01014134407043457, 0.01001308822631836, 0.009975744247436523, 0.010032992362976074, 0.009994751930236816, 0.010031840324401856, 0.01002947235107422, 0.009986240386962891, 0.010057567596435547, 0.009968128204345703, 0.01005350399017334, 0.010026752471923828, 0.010038911819458008, 0.010035072326660156, 0.009997280120849609, 0.010198495864868163, 0.010084511756896973, 0.010324511528015137, 0.010237343788146972, 0.010080927848815919, 0.010016448020935058, 0.010022496223449707, 0.0100763521194458, 0.009992416381835938, 0.01002729606628418, 0.009965279579162597, 0.009897983551025391, 0.01000160026550293, 0.009961919784545898, 0.009924032211303712, 0.010001343727111816, 0.009928159713745117, 0.010045408248901367, 0.009955743789672851, 0.010006591796875, 0.00999456024169922, 0.010012063980102539, 0.010055295944213867, 0.010087167739868164, 0.009945088386535645, 0.01000483226776123, 0.009989791870117188, 0.009995936393737793, 0.009963808059692382, 0.009982015609741211, 0.009946528434753419, 0.009978303909301758, 0.00991049575805664, 0.00998076820373535, 0.009935711860656738, 0.009977696418762207, 0.009968000411987305, 0.010034175872802734, 0.010059935569763184, 0.010045087814331055, 0.010028127670288087, 0.009989343643188476, 0.009996928215026856, 0.010046496391296386, 0.01006486415863037, 0.010028415679931641, 0.010846847534179688, 0.01026598358154297, 0.010695199966430664, 0.010376992225646973, 0.01004371166229248, 0.01003321647644043, 0.01005356788635254, 0.010080256462097169, 0.010036704063415527, 0.010222111701965332, 0.010050975799560546, 0.01002064037322998, 0.010039775848388673, 0.01032636833190918, 0.010078207969665527, 0.010004799842834472, 0.010021984100341797, 0.010033760070800781, 0.010006272315979003, 0.010115679740905761, 0.010014592170715332, 0.010008352279663086, 0.009995840072631835, 0.009974368095397949, 0.010028960227966309, 0.01001260757446289, 0.010067968368530274, 0.010042880058288574, 0.009986111640930176, 0.010016192436218261, 0.010210304260253907, 0.01002905559539795, 0.010029312133789062, 0.010309439659118652, 0.010041279792785645, 0.010245311737060546, 0.010175135612487793, 0.010059904098510743, 0.009950336456298827, 0.00997868824005127, 0.010008671760559081, 0.01002086353302002, 0.010079680442810058, 0.010224191665649415, 0.010054752349853516, 0.010021439552307129, 0.010060128211975097, 0.010079327583312989, 0.010355392456054687, 0.01003228759765625, 0.01010159969329834, 0.010022303581237794, 0.010035455703735352, 0.01010927963256836, 0.010483712196350097, 0.010182656288146973, 0.010124320030212403, 0.010240192413330079, 0.010119199752807617, 0.010058496475219727, 0.01008249568939209, 0.010073920249938965, 0.010106176376342773, 0.010014623641967773, 0.010002559661865234, 0.010005151748657227, 0.009969663619995118, 0.010117119789123535, 0.010184639930725098, 0.009947199821472168, 0.010011712074279784, 0.009950143814086914, 0.01003929615020752, 0.010008895874023437, 0.00998367977142334, 0.009973183631896972, 0.01004963207244873, 0.010002528190612793, 0.01018051242828369, 0.010019200325012207, 0.010055007934570313, 0.010027071952819825, 0.010056672096252441, 0.009998047828674316, 0.010051839828491211, 0.010033920288085937, 0.009987263679504395, 0.010045248031616211, 0.010117119789123535, 0.010012800216674805, 0.010004096031188965, 0.010051072120666504, 0.010062591552734375, 0.010049152374267577, 0.01010643196105957, 0.010076191902160645, 0.010017024040222169, 0.010080063819885254, 0.010181344032287598, 0.010194944381713868, 0.010133503913879394, 0.010147232055664063, 0.010216032028198242, 0.010190848350524903, 0.010223615646362304, 0.01031116771697998, 0.01033785629272461, 0.01013856029510498, 0.010230048179626465, 0.010174176216125489, 0.010123583793640136, 0.010527392387390137, 0.009928192138671875, 0.010109439849853515, 0.010034912109375, 0.010055968284606934, 0.01012940788269043, 0.01002620792388916, 0.009893856048583984, 0.010000608444213866, 0.010023520469665528, 0.00996339225769043, 0.009951359748840332, 0.00999014377593994, 0.009928383827209473, 0.010048992156982422, 0.009942879676818847, 0.010118144035339355, 0.009989983558654785, 0.009996607780456543, 0.010164287567138672, 0.01005292797088623, 0.010065631866455078, 0.01009712028503418, 0.010016256332397461, 0.010029855728149415, 0.010143744468688964, 0.010186079978942872, 0.010082207679748535, 0.010056351661682129, 0.010106975555419923, 0.009985983848571777, 0.010120320320129394, 0.010018912315368653, 0.010069952011108399, 0.010095199584960938, 0.010172032356262206, 0.01015379238128662, 0.010133888244628906, 0.010035103797912597, 0.010191455841064453, 0.010051584243774414, 0.010122624397277832, 0.010416511535644532, 0.01023203182220459, 0.010033184051513672, 0.010090496063232422, 0.010160415649414063, 0.01011616039276123, 0.010209664344787598, 0.010174752235412598, 0.010128640174865722, 0.010099455833435059, 0.010125632286071778, 0.0099999361038208, 0.010042559623718262, 0.009911231994628907, 0.010011903762817383, 0.009921279907226563, 0.01002905559539795, 0.010064031600952148, 0.010034784317016602, 0.009982208251953124, 0.00998528003692627, 0.009955936431884766, 0.009906623840332031, 0.009952608108520508, 0.009917119979858398, 0.009972736358642579, 0.009852928161621094, 0.009888704299926759, 0.0098405122756958, 0.009862239837646485, 0.009911264419555664, 0.00994707202911377, 0.00999619197845459, 0.009973983764648437, 0.009992128372192382, 0.009981696128845214, 0.010051136016845702, 0.009999008178710937, 0.010153280258178712, 0.009998911857604981, 0.010057696342468262, 0.010016608238220214, 0.010153471946716308, 0.010100640296936036, 0.010015680313110351, 0.010069536209106446, 0.010011103630065918, 0.010104384422302246, 0.009994688034057618, 0.010106880187988282, 0.010007935523986817, 0.010048192024230957, 0.010110912322998046, 0.010067968368530274, 0.011309056282043458, 0.010070015907287597, 0.01004537582397461, 0.00998851203918457, 0.01005123233795166, 0.01004047966003418, 0.01012617588043213, 0.010128576278686524, 0.010193504333496094, 0.010010848045349121, 0.010043199539184571, 0.010125151634216309, 0.010019071578979492, 0.00997100830078125, 0.010070816040039062, 0.009989312171936035, 0.009963711738586426, 0.00990886402130127, 0.009926848411560059, 0.009840703964233399, 0.009862912178039552, 0.010123264312744141, 0.00998566436767578, 0.009941375732421875, 0.009975808143615723, 0.009930751800537109, 0.010059935569763184, 0.009982848167419433, 0.010136287689208984, 0.009953248023986816, 0.010096544265747071, 0.010003647804260253, 0.009958304405212403, 0.01002291202545166, 0.009955679893493653, 0.009985376358032226, 0.010051456451416016, 0.010141887664794923, 0.010014752388000488, 0.010047712326049806, 0.010134688377380371, 0.010056544303894042, 0.010078207969665527, 0.009991583824157714, 0.010320063591003417, 0.010178879737854004, 0.009965663909912109, 0.010120448112487792, 0.010245887756347657, 0.010159104347229005, 0.010109184265136718, 0.009997695922851562, 0.009984640121459961, 0.010018560409545898, 0.010073823928833008, 0.010084639549255371, 0.00994092845916748, 0.010018336296081542, 0.009988639831542969, 0.010086239814758301, 0.010076319694519043, 0.010077183723449706, 0.010150912284851075, 0.010126784324645996, 0.010179136276245117, 0.010120287895202636, 0.010009183883666992, 0.009928607940673828, 0.010049951553344727, 0.01006710433959961, 0.01010364818572998, 0.010023263931274414, 0.009954879760742188, 0.009846879959106445, 0.009904128074645996, 0.009946816444396973, 0.010084671974182129, 0.010080351829528808, 0.010025983810424804, 0.010062623977661132, 0.010006655693054199, 0.0100164155960083, 0.010115424156188964, 0.010250240325927735, 0.010010751724243164, 0.009980095863342284, 0.009989215850830077, 0.01005241584777832, 0.010112735748291015, 0.009948287963867188, 0.010039263725280762, 0.009937888145446777, 0.010053312301635742, 0.010089632034301757, 0.010132160186767578, 0.01011680030822754, 0.009981823921203612, 0.010037792205810548, 0.010039199829101562, 0.01005568027496338, 0.009953056335449219, 0.010023200035095214, 0.009908160209655762, 0.010036383628845215, 0.009964384078979493, 0.00998140811920166, 0.009945055961608888, 0.009968192100524902, 0.00995680046081543, 0.01001529598236084, 0.00994707202911377, 0.010065919876098632, 0.010040639877319336, 0.010058367729187011, 0.010064288139343262, 0.01003433609008789, 0.0101212797164917, 0.010105183601379394, 0.010104031562805175, 0.010088895797729492, 0.01001699161529541, 0.009986623764038086, 0.010045023918151855, 0.009945216178894043, 0.00999014377593994, 0.010057888031005859, 0.009989919662475586, 0.010064288139343262, 0.010005472183227539, 0.010085056304931641, 0.009932064056396484, 0.009891648292541504, 0.009973055839538574, 0.0100098237991333, 0.010073856353759765, 0.009986687660217285, 0.010029088020324707, 0.010014687538146973, 0.010035039901733399, 0.010034943580627441, 0.0100797119140625, 0.009981023788452148, 0.00995894432067871, 0.010339936256408692, 0.010226752281188965, 0.009983776092529296, 0.010042495727539062, 0.010005248069763184, 0.010038463592529297, 0.01008518409729004, 0.010026975631713867, 0.009981951713562011, 0.009946880340576171, 0.010042816162109376, 0.010007391929626465, 0.010006303787231446]",tokens/s,99.58543846969587,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1149.775872,2341.404672,0.0,1946.157056,1819.994112,s,1,10.570611328125,10.570611328125,0.0,10.570611328125,10.570611328125,10.570611328125,10.570611328125,[10.570611328125],,kWh,0.00010066060923333566,1.109633666215753e-05,3.623419565399952e-05,0.0001479911415494927,,MB,1378.922496,2582.577152,0.0,2174.746624,2099.202048,s,10,3.934668365478516,0.3934668365478516,0.0016876900708838235,0.3932472686767578,0.39559535522460937,0.39606524047851566,0.39644114868164065,"[0.3930092468261719, 0.39351248168945313, 0.3919287414550781, 0.3916224365234375, 0.393379638671875, 0.3931148986816406, 0.390976806640625, 0.39509805297851563, 0.3965351257324219, 0.3954909362792969]",tokens/s,650.6266252222415,kWh,1.1802210804999958e-05,1.3015767721691397e-06,7.819250699840074e-06,2.0923038277009173e-05,tokens/kWh,12235316.716946414,MB,1401.004032,2582.577152,0.0,2174.746624,2099.204608,s,10,29.687070068359375,2.9687070068359374,0.010928839747835784,2.9662825927734375,2.9838070068359377,2.9861058959960936,2.9879450073242184,"[2.9577314453125, 2.96910888671875, 2.983296142578125, 2.965572265625, 2.96279736328125, 2.979611572265625, 2.96007080078125, 2.966992919921875, 2.98840478515625, 2.95348388671875]",tokens/s,21.22135995735925,kWh,8.53399655324997e-05,9.413203049478244e-06,5.445724356576024e-05,0.0001492104121477382,tokens/kWh,422222.54528471915,,s,630,29.68362404251098,0.04711686355954125,0.0009809421800245752,0.04696473693847656,0.047550915908813475,0.04844057292938232,0.05204850360870361,"[0.05248166275024414, 0.046940319061279295, 0.04614553451538086, 0.04617379379272461, 0.0466357421875, 0.04670431900024414, 0.04694630432128906, 0.047247390747070316, 0.04696636962890625, 0.04696649551391602, 0.047030910491943356, 0.04713593673706055, 0.04674851226806641, 0.04682060623168945, 0.04716207885742187, 0.04681532669067383, 0.046973953247070314, 0.04722988891601562, 0.04713011169433594, 0.047695487976074216, 0.04701887893676758, 0.04705260848999023, 0.04649593734741211, 0.04603859329223633, 0.0465013427734375, 0.04679164886474609, 0.046723072052001956, 0.046614048004150394, 0.04717820739746094, 0.047179840087890626, 0.04708550262451172, 0.04699241638183594, 0.04692076873779297, 0.04689907073974609, 0.04699955368041992, 0.04674687957763672, 0.04654159927368164, 0.04635958480834961, 0.04656643295288086, 0.04690892791748047, 0.04752646255493164, 0.04671696090698242, 0.04640956878662109, 0.04680006408691406, 0.04652918243408203, 0.04664336013793945, 0.046831329345703124, 0.046958881378173827, 0.04688896179199219, 0.04691763305664062, 0.047185726165771484, 0.047114177703857424, 0.04652671813964844, 0.04639145660400391, 0.0466143684387207, 0.04676108932495117, 0.04679520034790039, 0.046881214141845706, 0.046727199554443356, 0.0471421127319336, 0.0471354866027832, 0.04719615936279297, 0.047118335723876956, 0.05119385528564453, 0.047333377838134766, 0.04698025512695313, 0.047397727966308596, 0.047459583282470706, 0.047155967712402345, 0.04710521697998047, 0.04695257568359375, 0.04697103881835937, 0.04705539321899414, 0.04701593780517578, 0.04692083358764648, 0.04674137496948242, 0.046823486328125, 0.046468032836914065, 0.046252033233642575, 0.046634239196777345, 0.04696044921875, 0.04653801727294922, 0.04685583877563477, 0.046854145050048826, 0.04723295974731445, 0.047054912567138674, 0.04676812744140625, 0.047016063690185544, 0.04744998550415039, 0.04726169586181641, 0.04655107116699219, 0.04678857421875, 0.046839134216308594, 0.04659987258911133, 0.04663395309448242, 0.046878719329833986, 0.04678656005859375, 0.04672716903686523, 0.046837760925292966, 0.046712833404541014, 0.046653438568115234, 0.04665280151367188, 0.04681299209594727, 0.04720844650268555, 0.04684064102172852, 0.04702588653564453, 0.04714329528808594, 0.0468070068359375, 0.04658988952636719, 0.047118080139160155, 0.04722918319702148, 0.04689891052246094, 0.04688924789428711, 0.04853926467895508, 0.04890566253662109, 0.04722787094116211, 0.0473026237487793, 0.04737843322753906, 0.04706918334960938, 0.047263744354248044, 0.047513599395751956, 0.04732067108154297, 0.04730511856079102, 0.04740304183959961, 0.04802864074707031, 0.04776038360595703, 0.05205545425415039, 0.04724396896362305, 0.0469851188659668, 0.047242305755615235, 0.05152617645263672, 0.04748534393310547, 0.04731606292724609, 0.047167617797851565, 0.046813983917236325, 0.04711423873901367, 0.04710575866699219, 0.046589344024658204, 0.04668499374389649, 0.04753180694580078, 0.04716783905029297, 0.046362560272216795, 0.04744339370727539, 0.048580192565917966, 0.04732723236083984, 0.04635116958618164, 0.0464180793762207, 0.046862335205078126, 0.04690739059448242, 0.04680607986450196, 0.047015007019042966, 0.047457183837890625, 0.047051712036132814, 0.04692172622680664, 0.04653590393066406, 0.046467872619628904, 0.04656947326660156, 0.04699347305297852, 0.046951904296875, 0.046848320007324216, 0.04699267196655273, 0.04722163009643555, 0.04694563293457031, 0.04710671997070313, 0.04755046463012695, 0.04776931381225586, 0.04729270553588867, 0.047254753112792966, 0.04724777603149414, 0.04730099105834961, 0.04736614227294922, 0.04757017517089844, 0.04754044723510742, 0.04737488174438476, 0.05087846374511719, 0.04738252639770508, 0.04746854400634765, 0.04723487854003906, 0.047526046752929686, 0.04743990325927734, 0.047156448364257815, 0.046988063812255856, 0.0474337272644043, 0.04717891311645508, 0.04705366516113281, 0.04675180816650391, 0.04652995300292969, 0.047434272766113283, 0.04810927963256836, 0.0519417610168457, 0.047077728271484376, 0.04640172958374023, 0.04697068786621094, 0.04640563201904297, 0.04666511917114258, 0.04692643356323242, 0.047323135375976565, 0.04672707366943359, 0.05037884902954102, 0.047062366485595704, 0.04695926284790039, 0.046682113647460936, 0.04672700881958008, 0.046612640380859376, 0.046860286712646484, 0.046976192474365235, 0.047475582122802736, 0.04689891052246094, 0.046827743530273434, 0.04683545684814453, 0.04740940856933594, 0.046435680389404294, 0.04655756759643555, 0.04797468948364258, 0.04718988800048828, 0.047042686462402346, 0.04729446411132812, 0.04701308822631836, 0.04726419067382812, 0.04722518539428711, 0.04721049499511719, 0.046994815826416014, 0.04709235382080078, 0.04700774383544922, 0.04717772674560547, 0.04729836654663086, 0.047163585662841796, 0.047393089294433595, 0.04703308868408203, 0.04678934478759766, 0.046782176971435545, 0.0461030387878418, 0.04570111846923828, 0.04666572952270508, 0.04707657623291016, 0.046848800659179686, 0.04717772674560547, 0.04702825546264648, 0.04698313522338867, 0.04721664047241211, 0.04693932723999023, 0.046906177520751956, 0.04700569534301758, 0.047011455535888674, 0.04687295913696289, 0.04681318283081055, 0.04668726348876953, 0.047362430572509766, 0.047032318115234374, 0.04681584167480469, 0.04639539337158203, 0.046521888732910154, 0.052090816497802735, 0.047023456573486326, 0.0470252799987793, 0.04678224182128906, 0.047027713775634764, 0.04681974411010742, 0.04650425720214844, 0.04647404861450195, 0.04705788803100586, 0.047065086364746093, 0.0470838394165039, 0.04694393539428711, 0.046927040100097656, 0.04692620849609375, 0.046993854522705075, 0.0478636474609375, 0.04695811080932617, 0.04698380661010742, 0.046935680389404294, 0.04707148742675781, 0.048269535064697264, 0.04701788711547852, 0.04703871917724609, 0.04935027313232422, 0.047102046966552735, 0.04690969467163086, 0.049872703552246093, 0.04675785446166992, 0.04678451156616211, 0.046744800567626955, 0.04685084915161133, 0.04711004638671875, 0.046526561737060546, 0.04653068923950195, 0.04658396911621094, 0.04631836700439453, 0.04609763336181641, 0.04689619064331055, 0.04666198348999023, 0.04640377426147461, 0.0462825927734375, 0.046561054229736325, 0.04679942321777344, 0.046724960327148436, 0.046813278198242186, 0.0465530891418457, 0.04698726272583008, 0.04655251312255859, 0.04691939163208008, 0.04930441665649414, 0.046367870330810544, 0.0464925422668457, 0.04700889587402344, 0.04682022476196289, 0.046712478637695315, 0.04658432006835937, 0.04642595291137695, 0.04627046585083008, 0.046652511596679686, 0.0467130241394043, 0.04651459121704102, 0.04667228698730469, 0.04731027221679687, 0.05203148651123047, 0.04748259353637695, 0.0474664306640625, 0.04748118209838867, 0.0474370231628418, 0.04721744155883789, 0.04723878479003906, 0.04727641677856445, 0.047108097076416014, 0.047607425689697266, 0.047212928771972654, 0.04712857437133789, 0.04734931182861328, 0.04725600051879883, 0.04710604858398437, 0.04874764633178711, 0.047149791717529296, 0.04674166488647461, 0.047110145568847656, 0.04689465713500977, 0.04671088027954102, 0.0466701774597168, 0.046854145050048826, 0.04757708740234375, 0.047376094818115236, 0.046981151580810544, 0.047042816162109376, 0.047185855865478514, 0.04701113510131836, 0.046719745635986326, 0.047052639007568356, 0.046887073516845706, 0.04691766357421875, 0.04685942459106445, 0.046804031372070315, 0.0470648307800293, 0.04731600189208984, 0.04691360092163086, 0.04677545547485352, 0.046917152404785153, 0.046606559753417966, 0.04649903869628906, 0.04651273727416992, 0.046966815948486326, 0.046833824157714844, 0.04678403091430664, 0.050544097900390626, 0.04812076950073242, 0.046798912048339844, 0.046757118225097656, 0.04678937530517578, 0.04738848114013672, 0.046780513763427733, 0.046769248962402345, 0.046973953247070314, 0.047132225036621095, 0.04701638412475586, 0.04723632049560547, 0.04749596786499023, 0.04763033676147461, 0.04981145477294922, 0.047511550903320314, 0.04756480026245117, 0.05374806213378906, 0.04743132781982422, 0.046841537475585934, 0.046467742919921874, 0.046785984039306644, 0.04676665496826172, 0.04664947128295899, 0.04670451354980469, 0.046129505157470704, 0.045872798919677736, 0.0458666877746582, 0.045984416961669924, 0.04607926559448242, 0.046168449401855466, 0.04676809692382813, 0.04669164657592773, 0.04755497741699219, 0.0464851188659668, 0.0468583984375, 0.04662326431274414, 0.046589088439941403, 0.046736190795898434, 0.04656342315673828, 0.04636774444580078, 0.046054336547851564, 0.04612035369873047, 0.04657241439819336, 0.04688633728027344, 0.04741763305664062, 0.047080799102783205, 0.04632387161254883, 0.046236480712890625, 0.046231136322021485, 0.04623369598388672, 0.045965312957763675, 0.046077728271484375, 0.04657183837890625, 0.047379680633544925, 0.04693471908569336, 0.046792606353759765, 0.046717025756835937, 0.04666195297241211, 0.04678623962402344, 0.04709344100952149, 0.04747296142578125, 0.047105377197265624, 0.04714460754394531, 0.047134944915771484, 0.047311649322509766, 0.04735795211791992, 0.04759535980224609, 0.0473540153503418, 0.04722892761230469, 0.04726169586181641, 0.04774092864990234, 0.04715315246582031, 0.047282176971435545, 0.04721811294555664, 0.04754691314697266, 0.047261726379394534, 0.04726784133911133, 0.047925247192382815, 0.05047615814208985, 0.05258422470092773, 0.0472119026184082, 0.046723072052001956, 0.04672556686401367, 0.04663132858276367, 0.0466431999206543, 0.047155231475830076, 0.04653871917724609, 0.047209854125976565, 0.04678057479858398, 0.046779071807861325, 0.04696041488647461, 0.04686438369750977, 0.0465715217590332, 0.046798686981201175, 0.04723318481445313, 0.04667350387573242, 0.047483295440673826, 0.04695004653930664, 0.046919456481933595, 0.047196704864501955, 0.047443073272705076, 0.047104705810546876, 0.04738848114013672, 0.04690972900390625, 0.04676976013183594, 0.04638364791870117, 0.0468372802734375, 0.04676860809326172, 0.047099807739257815, 0.046782302856445315, 0.046978912353515624, 0.04682393646240234, 0.04689014434814453, 0.046940929412841795, 0.04723507308959961, 0.0484117431640625, 0.04756889724731445, 0.048480800628662106, 0.04772288131713867, 0.0470077133178711, 0.046964767456054685, 0.047066783905029295, 0.0470654411315918, 0.04745625686645508, 0.04692921447753906, 0.04741596984863281, 0.047718433380126955, 0.04805017471313477, 0.04666777420043945, 0.04663827133178711, 0.04704134368896484, 0.04676982498168945, 0.04672867202758789, 0.04672175979614258, 0.04639670562744141, 0.046395713806152344, 0.04677795028686523, 0.04653395080566406, 0.046636703491210935, 0.04673891067504883, 0.04682601547241211, 0.046878719329833986, 0.052262527465820316, 0.04715161514282227, 0.046774208068847654, 0.046585792541503905, 0.046685440063476566, 0.04742118453979492, 0.04679372787475586, 0.0474337272644043, 0.04696473693847656, 0.04713059234619141, 0.04717366409301758, 0.047026176452636716, 0.04701747131347656, 0.04879206466674805, 0.04696672058105469, 0.046911006927490236, 0.046769695281982424, 0.04701219177246094, 0.04735836791992187, 0.047255809783935544, 0.04729651260375976, 0.04780441665649414, 0.04739788818359375, 0.04729753494262695, 0.047032257080078126, 0.04716508865356445, 0.04742595291137695, 0.0472470703125, 0.047683616638183594, 0.047370494842529295, 0.04734566497802734, 0.04728387069702149, 0.047216991424560546, 0.04717071914672852, 0.04703641510009766, 0.046832065582275394, 0.047122463226318356, 0.047392223358154295, 0.04678054428100586, 0.04671529769897461, 0.04674803161621094, 0.046729217529296874, 0.04656332778930664, 0.04707468795776367, 0.049135967254638674, 0.04753164672851563, 0.047030017852783206, 0.0466124496459961, 0.04683491134643555, 0.04706684875488281, 0.04672284698486328, 0.04640534210205078, 0.04668467330932617, 0.04686963272094727, 0.04700249481201172, 0.048842273712158206, 0.049482208251953125, 0.04899571228027344, 0.056950752258300784, 0.046863262176513674, 0.04687868881225586, 0.04673865509033203, 0.046268672943115235, 0.05170819091796875, 0.04694220733642578, 0.04696473693847656, 0.04748905563354492, 0.046804958343505856, 0.04755660629272461, 0.0470302734375, 0.047087551116943356, 0.046927295684814456, 0.04697971343994141, 0.04682451248168945, 0.04729087829589844, 0.047129024505615236, 0.047474750518798826, 0.04761171340942383, 0.04760614395141602, 0.04747647857666016, 0.04699926376342774, 0.04710758590698242, 0.04726486587524414, 0.04809286499023437, 0.04634339141845703, 0.04640784072875977, 0.0485873908996582, 0.04838329696655273, 0.04685068893432617, 0.04702828979492187, 0.04659404754638672, 0.04664748764038086, 0.046190399169921875, 0.045610240936279293, 0.04597366333007812, 0.045754974365234374, 0.045856769561767576, 0.04640972900390625, 0.04729446411132812, 0.04628662490844727, 0.04619494247436524, 0.04605948638916016, 0.046002334594726565, 0.04631766510009765, 0.046163711547851566, 0.04627257537841797, 0.04619286346435547, 0.046154655456542966, 0.04670547103881836, 0.046499839782714845, 0.04665507125854492, 0.0462391357421875, 0.04629782485961914, 0.04709711837768555, 0.046672832489013674, 0.04636064147949219, 0.046640159606933594, 0.046588897705078125, 0.046448192596435546, 0.04644524765014649, 0.04771846389770508, 0.04655440139770508, 0.04644467163085937, 0.04846416091918945, 0.046925376892089844, 0.046509696960449216]",tokens/s,21.223823583594587,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1102.036992,4937.678848,0.0,4542.431232,4484.571136,s,1,14.752904296875,14.752904296875,0.0,14.752904296875,14.752904296875,14.752904296875,14.752904296875,[14.752904296875],,kWh,0.0002231884544166668,2.4612022554655027e-05,8.0394786538e-05,0.0003281952635093218,,MB,1298.18624,5480.841216,0.0,5073.010688,4884.617216,s,10,10.971497924804687,1.0971497924804687,0.007856330733512247,1.0982994384765625,1.1034736572265624,1.10629560546875,1.1085531640625,"[1.0782109375, 1.0902459716796875, 1.0958052978515624, 1.0969268798828125, 1.0970880126953124, 1.100737548828125, 1.0995108642578124, 1.10100830078125, 1.1028465576171875, 1.1091175537109375]",tokens/s,233.33185837936279,kWh,3.1934041897916597e-05,3.521803565735267e-06,2.1167794712000044e-05,5.66236401756519e-05,tokens/kWh,4521079.874163224,MB,1318.592512,5495.52128,0.0,5087.690752,4884.619776,s,10,49.122622070312495,4.91226220703125,0.020120577195886623,4.919377685546875,4.93226650390625,4.9333185546875,4.9341601953125,"[4.8706455078125, 4.886169921875, 4.8998837890625, 4.907328125, 4.91805517578125, 4.92144091796875, 4.9207001953125, 4.9319951171875, 4.93203271484375, 4.93437060546875]",tokens/s,12.825048286271013,kWh,0.00014475006764749977,1.5967055393106905e-05,9.617016026939989e-05,0.00025688728331000655,tokens/kWh,245243.7473285621,,s,630,49.1183184890747,0.0779655849032932,0.00191787837552134,0.07762366485595704,0.07906547698974609,0.07959547348022461,0.09099831825256348,"[0.08926860809326172, 0.07618364715576172, 0.07637757110595703, 0.0760898895263672, 0.07598899078369141, 0.07650016021728516, 0.0764277114868164, 0.07645017242431641, 0.07608319854736328, 0.07603814697265625, 0.07644694519042969, 0.0776957778930664, 0.0802877426147461, 0.07865948486328125, 0.0773755874633789, 0.07660543823242187, 0.07668531036376953, 0.07667097473144531, 0.07665827178955079, 0.07654236602783203, 0.07594393920898437, 0.07612556457519531, 0.07653011322021484, 0.07711676788330078, 0.07854892730712891, 0.07858882904052734, 0.07787519836425781, 0.07693312072753906, 0.07710047912597656, 0.07736377716064453, 0.07745126342773437, 0.07710227203369141, 0.07757497406005859, 0.07623474884033203, 0.07672831726074218, 0.07711539459228516, 0.0778317108154297, 0.07806761932373046, 0.07797747039794922, 0.07793888092041015, 0.07683942413330078, 0.0768358383178711, 0.07762544250488282, 0.07743926239013672, 0.0769276123046875, 0.07739110565185547, 0.07685401916503906, 0.0763719711303711, 0.07696521759033204, 0.07787382507324218, 0.0776703338623047, 0.07796537780761718, 0.07730518341064453, 0.07733100891113281, 0.07732755279541016, 0.07707536315917969, 0.07677500915527344, 0.0769184341430664, 0.0767004165649414, 0.07674009704589843, 0.07696588897705078, 0.07736511993408203, 0.07774578857421875, 0.09197212982177734, 0.07646002960205078, 0.07630172729492188, 0.07632752227783203, 0.07714816284179687, 0.07644147491455078, 0.07614415740966797, 0.07655689239501953, 0.07641228485107422, 0.07637017822265625, 0.07609334564208985, 0.07731043243408203, 0.08061673736572265, 0.07902486419677734, 0.07734272003173828, 0.07655980682373047, 0.07709126281738281, 0.07649292755126953, 0.07644979095458984, 0.07663206481933593, 0.07655219268798828, 0.07667027282714843, 0.07679849243164062, 0.07713603210449219, 0.07841734313964843, 0.07906899261474609, 0.07840025329589843, 0.07749222564697265, 0.07771929931640625, 0.07771897888183593, 0.07740089416503906, 0.07656038665771485, 0.07656028747558594, 0.07682441711425782, 0.07719347381591797, 0.07731571197509765, 0.07730006408691406, 0.07851990509033203, 0.07791661071777344, 0.07788854217529297, 0.0776402587890625, 0.07729353332519531, 0.07730425262451172, 0.07720294189453125, 0.07689881896972656, 0.07710720062255859, 0.07744822692871094, 0.0770522232055664, 0.07733110046386718, 0.07738105773925781, 0.0776033935546875, 0.07803823852539063, 0.07773670196533203, 0.07800220489501954, 0.07737289428710938, 0.07760457611083985, 0.0771099853515625, 0.07712895965576172, 0.07769379425048828, 0.07776860809326172, 0.07750870513916015, 0.07782195281982422, 0.07843628692626953, 0.092042236328125, 0.07608697509765625, 0.0767286376953125, 0.07707215881347657, 0.07717046356201172, 0.07669190216064453, 0.07661158752441406, 0.07637606048583985, 0.07637811279296874, 0.07637171173095703, 0.07679615783691406, 0.07832780456542969, 0.08089600372314452, 0.0783994903564453, 0.07728073883056641, 0.07711519622802734, 0.07762403106689453, 0.0775125732421875, 0.07758246612548828, 0.07658649444580078, 0.07698246765136718, 0.07689193725585937, 0.07671247863769531, 0.07782195281982422, 0.07968768310546875, 0.07895200347900391, 0.07807743835449218, 0.07727619171142579, 0.07718083190917968, 0.07720719909667968, 0.07717839813232422, 0.07722038269042969, 0.07691497802734375, 0.07660543823242187, 0.07693238067626954, 0.07689830780029297, 0.07791827392578125, 0.07859677124023437, 0.07882956695556641, 0.07828880310058593, 0.07732233428955078, 0.07731779479980469, 0.07743289947509766, 0.07728975677490234, 0.077338623046875, 0.07740006256103515, 0.07728323364257812, 0.07668335723876953, 0.07728128051757813, 0.07766630554199219, 0.07834009552001953, 0.07837625885009766, 0.07833622741699219, 0.07825804901123047, 0.07922748565673828, 0.0785401611328125, 0.07723481750488281, 0.0774655990600586, 0.07801606750488281, 0.07741689300537109, 0.0774483871459961, 0.07741232299804687, 0.07856204986572266, 0.09206169891357421, 0.07652620697021484, 0.07662220764160156, 0.07711743927001953, 0.07656845092773437, 0.07678534698486328, 0.0768802261352539, 0.07632809448242188, 0.07729203033447266, 0.0775766372680664, 0.07745536041259765, 0.0794717788696289, 0.08120406341552734, 0.07950540924072266, 0.07737881469726562, 0.07766502380371093, 0.07713504028320313, 0.07701776123046875, 0.07668508911132813, 0.07671437072753906, 0.07650713348388671, 0.07691852569580078, 0.07714998626708984, 0.07788591766357422, 0.07924486541748046, 0.07934732818603515, 0.07828562927246094, 0.07804041290283203, 0.07734732818603515, 0.07729901123046876, 0.07688278198242188, 0.0772908477783203, 0.0772061767578125, 0.07741795349121093, 0.07681814575195313, 0.07719526672363282, 0.07790815734863281, 0.07861721801757812, 0.07816553497314453, 0.07808048248291016, 0.07791001892089844, 0.07780294036865235, 0.076761474609375, 0.07750879669189453, 0.07747984313964844, 0.07775206756591797, 0.07756835174560547, 0.07798989105224609, 0.07786495971679687, 0.07807180786132813, 0.0781843490600586, 0.07848342132568359, 0.07806578826904297, 0.07846502685546874, 0.07796736145019531, 0.07741622161865234, 0.07772783660888671, 0.07749644470214843, 0.07735091400146485, 0.07727513885498047, 0.07805133056640624, 0.078166015625, 0.07796326446533203, 0.09175663757324219, 0.07707814025878906, 0.07720508575439453, 0.07673117065429688, 0.07681228637695313, 0.07694236755371094, 0.07776764678955078, 0.0769781723022461, 0.07710720062255859, 0.07697974395751953, 0.07688649749755859, 0.07944316864013672, 0.08178562927246094, 0.07993516540527344, 0.07816838073730469, 0.07787696075439453, 0.07652745819091797, 0.07671443176269531, 0.07724345397949219, 0.07688896179199219, 0.07722528076171875, 0.07712025451660157, 0.07678880310058593, 0.0779435806274414, 0.07948041534423828, 0.0797927017211914, 0.07873945617675782, 0.07797760009765625, 0.07792182159423829, 0.07735472106933594, 0.07799996948242187, 0.07688278198242188, 0.07699593353271485, 0.07731887817382813, 0.07758233642578125, 0.07751884460449218, 0.07847321319580078, 0.07891490936279297, 0.07905347442626953, 0.07871878051757812, 0.07794297790527344, 0.07775437164306641, 0.07776380920410156, 0.07752489471435547, 0.07765900421142578, 0.0776817626953125, 0.07773481750488281, 0.07696694183349609, 0.07780976104736329, 0.07812287902832031, 0.07902719879150391, 0.07883366394042969, 0.07807590484619141, 0.07790991973876953, 0.07793465423583984, 0.07771324920654298, 0.07755385589599609, 0.07749385833740234, 0.07769129943847657, 0.0780021743774414, 0.07740825653076172, 0.0783022689819336, 0.07807686614990235, 0.09157756805419921, 0.07674755096435547, 0.07706134033203126, 0.0774254379272461, 0.07752294158935547, 0.0775167999267578, 0.07747993469238282, 0.07675494384765626, 0.07712944030761719, 0.077034912109375, 0.07707491302490234, 0.07888713836669922, 0.08155561828613281, 0.07898934173583984, 0.07811481475830079, 0.07720655822753907, 0.07674562835693359, 0.07738579559326172, 0.077412353515625, 0.0774031982421875, 0.07759548950195312, 0.07727523040771485, 0.0768443832397461, 0.07761920166015625, 0.0790512924194336, 0.0791942367553711, 0.07896028900146485, 0.07807561492919922, 0.07797209930419922, 0.07724441528320312, 0.07771955108642578, 0.07745686340332031, 0.0777072982788086, 0.07750502777099609, 0.07761510467529296, 0.07803084564208984, 0.07868621063232421, 0.07910134124755859, 0.07940771484375, 0.07859200286865234, 0.07772978973388672, 0.07841382598876953, 0.07781990051269531, 0.07744921875, 0.07751475524902343, 0.07738162994384766, 0.07727718353271484, 0.07759200286865234, 0.07782457733154297, 0.07827193450927734, 0.0786396484375, 0.0785117416381836, 0.0781884765625, 0.07800985717773437, 0.07801718139648438, 0.07830764770507813, 0.07728460693359375, 0.07777561950683594, 0.07762329864501953, 0.077795166015625, 0.07847283172607422, 0.0780475845336914, 0.078388671875, 0.09062461090087891, 0.07655619049072265, 0.07697417449951172, 0.07689523315429687, 0.07720857238769531, 0.07717068481445312, 0.0769986572265625, 0.07686895751953125, 0.07673875427246094, 0.07722441864013672, 0.07722598266601563, 0.07866162872314453, 0.081421630859375, 0.0789590072631836, 0.07837926483154296, 0.07691600036621093, 0.07720809936523437, 0.07711561584472656, 0.07709001922607422, 0.07722882843017578, 0.07731401824951172, 0.07707855987548828, 0.07702323150634766, 0.07796883392333985, 0.0794691162109375, 0.07957030487060547, 0.07913475036621094, 0.07806012725830078, 0.07855104064941407, 0.07740825653076172, 0.07765606689453125, 0.07764947509765625, 0.07771590423583985, 0.07726802825927734, 0.07759149169921875, 0.07855478668212891, 0.07839510345458985, 0.07922259521484375, 0.0793004150390625, 0.07834111785888671, 0.07801225280761719, 0.0772460174560547, 0.07767414093017579, 0.07779782104492187, 0.07761309051513672, 0.07767884826660157, 0.0779716796875, 0.07753024291992187, 0.0775604476928711, 0.07833190155029297, 0.07873356628417968, 0.07873741149902344, 0.07877632141113282, 0.07797760009765625, 0.07793869018554687, 0.07817420959472657, 0.07765321350097656, 0.07743363189697265, 0.07776598358154296, 0.07809295654296874, 0.07781990051269531, 0.0782349090576172, 0.0788116455078125, 0.09117900848388671, 0.07717887878417969, 0.07776870727539062, 0.07668688201904297, 0.0770687713623047, 0.07717810821533203, 0.07724518585205079, 0.0772332763671875, 0.07712448120117188, 0.07714611053466797, 0.07718911743164063, 0.07860137939453125, 0.08075312042236328, 0.07922112274169922, 0.07856502532958984, 0.0777977294921875, 0.07735295867919922, 0.07741439819335938, 0.07767596435546875, 0.07731053161621093, 0.07733180999755859, 0.07748265838623047, 0.07766630554199219, 0.07856495666503906, 0.07945369720458985, 0.08012054443359375, 0.08000531005859375, 0.07848908996582031, 0.07777919769287109, 0.0775068130493164, 0.07752499389648437, 0.07764157104492188, 0.07760460662841796, 0.07734518432617188, 0.07810457611083985, 0.07764527893066406, 0.07818886566162109, 0.0790195541381836, 0.07927670288085938, 0.0784444808959961, 0.07830742645263672, 0.07792623901367188, 0.07821920013427734, 0.07787337493896485, 0.07816365051269532, 0.07733280181884766, 0.07772774505615235, 0.0778193588256836, 0.07777677154541016, 0.07863158416748046, 0.07970816040039062, 0.07904051208496093, 0.07840502166748047, 0.07804576110839843, 0.07797721862792968, 0.0774862060546875, 0.07818825531005859, 0.07796988677978516, 0.07776060485839843, 0.07846915435791016, 0.0781061782836914, 0.07818672180175781, 0.07955059051513672, 0.09101660919189453, 0.07695804595947266, 0.07690470123291016, 0.07700431823730469, 0.07696022033691406, 0.07730912017822265, 0.07718370819091797, 0.07712969970703125, 0.07693529510498047, 0.07701203155517578, 0.07717369842529297, 0.07928012847900391, 0.08156972503662109, 0.0796160659790039, 0.07813085174560547, 0.07764342498779297, 0.07760867309570313, 0.07835132598876954, 0.07768019104003906, 0.07756435394287109, 0.07740812683105469, 0.07753126525878906, 0.07769075012207032, 0.07851004791259765, 0.07996022033691406, 0.07982806396484375, 0.07891651153564454, 0.07889715576171875, 0.07735664367675782, 0.07743917083740234, 0.07787542724609375, 0.07728128051757813, 0.07728742218017579, 0.07717462158203126, 0.07823580932617187, 0.07755331420898437, 0.07847357177734375, 0.07914495849609375, 0.07906508636474609, 0.07862857818603515, 0.07816127777099609, 0.07790054321289062, 0.07786653137207031, 0.07795571136474609, 0.07787519836425781, 0.07731814575195313, 0.07716031646728516, 0.0777196502685547, 0.07826214599609375, 0.07891340637207031, 0.07919792175292968, 0.07927865600585937, 0.07844022369384765, 0.07841404724121094, 0.0778239974975586, 0.0783994903564453, 0.07819468688964844, 0.07852403259277344, 0.07787741088867188, 0.07743682861328124, 0.07816633605957031, 0.07863645172119141, 0.07878307342529296, 0.09095353698730468, 0.0773571548461914, 0.07703804779052735, 0.07774944305419922, 0.07724729919433594, 0.07715020751953125, 0.0771973114013672, 0.07709490966796875, 0.07734886169433594, 0.07758544158935547, 0.07741673278808593, 0.0791497573852539, 0.08200364685058593, 0.07928864288330079, 0.07853670501708984, 0.07763353729248047, 0.07752649688720703, 0.07790201568603515, 0.07745366668701172, 0.07719116973876954, 0.07711478424072266, 0.07716844940185547, 0.07740700531005859, 0.0783073272705078, 0.08032665252685547, 0.07952588653564453, 0.07868364715576172, 0.07845734405517578, 0.07784786987304687, 0.07737149047851563, 0.07749858856201172, 0.07753971099853516, 0.07734681701660157, 0.07713177490234376, 0.07712153625488281, 0.07828070068359375, 0.08018329620361328, 0.07970406341552734, 0.07879065704345703, 0.078323486328125, 0.07814575958251953, 0.07799193572998046, 0.07723612976074219, 0.07786016082763672, 0.07831222534179688, 0.0776785888671875, 0.0776325454711914, 0.07782498931884765, 0.07845069122314453, 0.07909580993652343, 0.07940882873535156, 0.07926758575439453, 0.07881171417236328, 0.07850371551513671, 0.07770941162109375, 0.07810265350341797, 0.07804313659667969, 0.07804108428955078, 0.07742668914794922, 0.07783424377441406, 0.07784006500244141, 0.07860665893554687, 0.07916134643554687]",tokens/s,12.826171973703245,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1047.486464,1393.491968,0.0,998.244352,995.459072,s,1,9.1995439453125,9.1995439453125,0.0,9.1995439453125,9.1995439453125,9.1995439453125,9.1995439453125,[9.1995439453125],,kWh,6.138048507500286e-05,6.759885228847262e-06,2.232835119599791e-05,9.046872149984803e-05,,MB,1316.298752,1619.984384,0.0,1212.153856,1174.001664,s,10,2.065621322631836,0.2065621322631836,0.001057923379171417,0.20703142547607423,0.20757976837158204,0.20789478073120116,0.20814679061889646,"[0.20546205139160156, 0.2072406005859375, 0.207006591796875, 0.2082097930908203, 0.20497737121582033, 0.20498719787597655, 0.2070726776123047, 0.20705625915527343, 0.20609901428222657, 0.207509765625]",tokens/s,1239.3365482586466,kWh,6.249252624379373e-06,6.890893309256835e-07,4.169678276638249e-06,1.1108020231943305e-05,tokens/kWh,23046411.030457206,MB,1338.712064,1619.984384,0.0,1212.153856,1174.004224,s,10,22.765936279296877,2.2765936279296874,0.010787243324537987,2.280182861328125,2.28728349609375,2.2916496826171873,2.2951426318359376,"[2.286313232421875, 2.257575927734375, 2.296015869140625, 2.26831201171875, 2.279771240234375, 2.28084423828125, 2.280594482421875, 2.27190283203125, 2.263395263671875, 2.281211181640625]",tokens/s,27.67292292621042,kWh,6.532558710478703e-05,7.2054362435162614e-06,3.535490889856203e-05,0.00010788593224686533,tokens/kWh,583950.0914340061,,s,630,22.761943065643333,0.03613006835816398,0.0007618873217216914,0.03608678245544433,0.0365433364868164,0.036807624435424804,0.038606347618103026,"[0.03631513595581055, 0.03638886260986328, 0.03630489730834961, 0.03641753768920898, 0.03643932723999024, 0.03671318435668945, 0.03632534408569336, 0.03666128158569336, 0.03630819320678711, 0.03644054412841797, 0.03646905517578125, 0.03628851318359375, 0.03624284744262695, 0.03624406433105469, 0.036173824310302735, 0.03623321533203125, 0.036154464721679686, 0.0362259521484375, 0.03627798461914063, 0.03643830490112305, 0.036062782287597656, 0.035994049072265624, 0.036268096923828125, 0.03680006408691406, 0.0360816650390625, 0.03593801498413086, 0.03613056182861328, 0.03600476837158203, 0.03606118392944336, 0.03620249557495117, 0.036171775817871094, 0.03632467269897461, 0.03680716705322266, 0.03615094375610352, 0.03638457489013672, 0.03647151947021485, 0.03633148956298828, 0.03638070297241211, 0.036269855499267575, 0.03617337417602539, 0.036276897430419924, 0.036345855712890625, 0.036216705322265626, 0.03619782257080078, 0.03608000183105469, 0.03618572616577148, 0.03661606216430664, 0.03612713623046875, 0.03635036849975586, 0.03609395217895508, 0.03621683120727539, 0.03639839935302734, 0.036049633026123046, 0.0360807991027832, 0.03611334228515625, 0.036359870910644534, 0.0361064338684082, 0.03632332611083984, 0.03639449691772461, 0.03618790435791015, 0.03641622543334961, 0.036286495208740235, 0.03637452697753906, 0.036089855194091795, 0.03622611236572266, 0.03609417724609375, 0.03588927841186523, 0.03541183853149414, 0.035952865600585936, 0.036131328582763675, 0.03614672088623047, 0.036176353454589846, 0.03618815994262695, 0.03607932662963867, 0.03604246520996094, 0.0361060791015625, 0.036063968658447264, 0.036073471069335936, 0.03620364761352539, 0.03607846450805664, 0.036116481781005856, 0.03645183944702148, 0.036114944458007815, 0.03611580657958984, 0.03637315368652344, 0.036345855712890625, 0.03615913772583008, 0.036267616271972655, 0.036692737579345706, 0.03616358566284179, 0.03586867141723633, 0.03549507141113281, 0.035915775299072264, 0.03579375839233399, 0.03560451126098633, 0.03560444641113281, 0.03557891082763672, 0.035503070831298826, 0.035604480743408204, 0.03596492767333984, 0.03577974319458008, 0.03558281707763672, 0.03577446365356445, 0.03547663879394531, 0.03570979309082031, 0.035563518524169925, 0.03548364639282227, 0.03538739013671875, 0.03550931167602539, 0.035275104522705075, 0.03582419204711914, 0.03569052886962891, 0.035741504669189454, 0.03582585525512695, 0.035827713012695314, 0.03561593627929688, 0.035447616577148434, 0.03531478500366211, 0.03510774230957031, 0.03537510299682617, 0.03563430404663086, 0.035447486877441405, 0.035483230590820314, 0.03567196655273437, 0.03564358520507813, 0.03540838241577148, 0.036004417419433596, 0.035833854675292966, 0.03548160171508789, 0.035438591003417966, 0.03548364639282227, 0.035315711975097655, 0.03712575912475586, 0.03573183822631836, 0.03578019332885742, 0.036105758666992185, 0.03603340911865234, 0.03621478271484375, 0.03589529418945313, 0.03587891387939453, 0.035776512145996094, 0.038744224548339846, 0.043791969299316405, 0.0459975357055664, 0.03665795135498047, 0.03726889419555664, 0.03670412826538086, 0.037017887115478515, 0.03653023910522461, 0.0365428466796875, 0.0364620475769043, 0.036416030883789065, 0.03625369644165039, 0.03636627197265625, 0.03639302444458008, 0.03640444946289063, 0.0364095344543457, 0.03645091247558594, 0.0363392333984375, 0.036831710815429686, 0.036397056579589845, 0.03664896011352539, 0.03649331283569336, 0.03610009765625, 0.035910816192626954, 0.035939167022705075, 0.035846080780029294, 0.03612377548217773, 0.03573356628417969, 0.035773311614990234, 0.03599788665771484, 0.03612854385375976, 0.03589532852172852, 0.0359444465637207, 0.03590348815917969, 0.03618406295776367, 0.0360263671875, 0.03596083068847656, 0.03633273696899414, 0.035928768157958986, 0.03603263854980469, 0.0359659538269043, 0.03593667221069336, 0.03576073455810547, 0.03557331085205078, 0.03573907089233398, 0.03591993713378906, 0.035851200103759764, 0.03592547225952149, 0.03603308868408203, 0.03596083068847656, 0.03570217514038086, 0.03559513473510742, 0.03530902481079102, 0.03533849716186523, 0.035514240264892576, 0.036141151428222655, 0.036136768341064454, 0.03607686233520508, 0.036037151336669924, 0.03591551971435547, 0.03571161651611328, 0.03590758514404297, 0.03583590316772461, 0.03603046417236328, 0.036050945281982424, 0.03593625640869141, 0.03629040145874023, 0.03593977737426758, 0.036444896697998046, 0.036087806701660154, 0.03611795043945312, 0.03611091232299805, 0.0362630729675293, 0.0359637451171875, 0.03611603164672852, 0.03605686569213867, 0.03604665756225586, 0.03621897506713867, 0.03638502502441406, 0.03612416076660156, 0.0359444465637207, 0.03610726547241211, 0.0363675537109375, 0.03611936187744141, 0.036797534942626955, 0.036170654296875, 0.03596822357177734, 0.03600259017944336, 0.035985408782958986, 0.0361085433959961, 0.03600950241088867, 0.03635398483276367, 0.03862748718261719, 0.03642176055908203, 0.035934207916259765, 0.03549184036254883, 0.035489791870117186, 0.03578265762329102, 0.03605500793457031, 0.036081695556640626, 0.03584342575073242, 0.03549801635742188, 0.035299457550048825, 0.035197311401367185, 0.03527692794799805, 0.035186622619628904, 0.035397022247314454, 0.03545353698730469, 0.03559596633911133, 0.03563484954833984, 0.03836928176879883, 0.03598332977294922, 0.03587376022338867, 0.035811328887939455, 0.035837310791015625, 0.03582169723510742, 0.03607807922363281, 0.03552175903320313, 0.03606403350830078, 0.03562905502319336, 0.0360079345703125, 0.03608575820922852, 0.03604044723510742, 0.03588940811157226, 0.036046527862548826, 0.036182334899902344, 0.03597721481323242, 0.03600352096557617, 0.03594249725341797, 0.03581267166137695, 0.035754913330078124, 0.0355404167175293, 0.03668751907348633, 0.035715744018554686, 0.035975425720214844, 0.036018177032470705, 0.036208641052246096, 0.03609600067138672, 0.035905025482177735, 0.03604326248168945, 0.036239360809326174, 0.036308609008789065, 0.03619190216064453, 0.03636912155151367, 0.036675582885742186, 0.03707411193847656, 0.03697747039794922, 0.03709952163696289, 0.03701929473876953, 0.036439937591552736, 0.03645196914672852, 0.036380992889404294, 0.03628441619873047, 0.03621532821655273, 0.03634902572631836, 0.03626691055297852, 0.03624905776977539, 0.03640169525146485, 0.036239360809326174, 0.036224414825439456, 0.036799072265625, 0.03633135986328125, 0.036369728088378905, 0.036363105773925784, 0.036359169006347655, 0.036365310668945314, 0.036428958892822265, 0.03614761734008789, 0.03631507110595703, 0.03589580917358398, 0.03579904174804688, 0.03594649505615234, 0.03587481689453125, 0.03630617523193359, 0.03628630447387695, 0.03569084930419922, 0.03552854537963867, 0.03561033630371094, 0.04100960159301758, 0.035587425231933596, 0.03542287826538086, 0.03538236618041992, 0.03623004913330078, 0.03603235244750977, 0.03594460678100586, 0.03570073699951172, 0.03851878356933594, 0.03585971069335937, 0.035526496887207035, 0.03513817596435547, 0.03518288040161133, 0.03566947174072266, 0.035435039520263674, 0.035604480743408204, 0.03538905715942383, 0.03527923202514648, 0.03544678497314453, 0.035794944763183595, 0.03634918212890625, 0.03610265731811523, 0.036398944854736326, 0.03721420669555664, 0.03613737487792969, 0.035872608184814456, 0.036019935607910156, 0.03630329513549805, 0.03643734359741211, 0.03562972640991211, 0.03586492919921875, 0.03560524749755859, 0.03623004913330078, 0.0363765754699707, 0.03624755096435547, 0.03659161758422851, 0.03575603103637695, 0.03544416046142578, 0.03628268814086914, 0.03634991836547852, 0.036260128021240234, 0.03649331283569336, 0.03633724975585938, 0.036647327423095705, 0.03644163131713867, 0.03692303848266602, 0.0366569938659668, 0.03649212646484375, 0.03632326507568359, 0.03637267303466797, 0.03637990570068359, 0.03635891342163086, 0.03686604690551758, 0.036517887115478515, 0.03700735855102539, 0.03647283172607422, 0.036547744750976566, 0.036413345336914066, 0.0363570556640625, 0.03855459213256836, 0.036674560546875, 0.036335166931152345, 0.036018623352050784, 0.035694591522216795, 0.03568025588989258, 0.0360079345703125, 0.03698908615112305, 0.03581353759765625, 0.03591446304321289, 0.036236255645751954, 0.036239360809326174, 0.03592192077636719, 0.036044769287109375, 0.03585436630249023, 0.03557580947875977, 0.03546908950805664, 0.035727039337158206, 0.04241667175292969, 0.03923932647705078, 0.03579702377319336, 0.03566828918457031, 0.03592752075195312, 0.03591222381591797, 0.03601747131347656, 0.035754688262939455, 0.03562656021118164, 0.03557011032104492, 0.03562496185302735, 0.035624286651611325, 0.035830432891845704, 0.03587071990966797, 0.035880863189697264, 0.03617728042602539, 0.03585004806518555, 0.036109214782714845, 0.0357498893737793, 0.035703968048095704, 0.0362402229309082, 0.03618201446533203, 0.03595017623901367, 0.035665599822998044, 0.03557449722290039, 0.03554508972167969, 0.03590566253662109, 0.03616748809814453, 0.035856063842773435, 0.03563763046264649, 0.03566947174072266, 0.035846687316894534, 0.0364738883972168, 0.036293598175048826, 0.036238624572753904, 0.03631135940551758, 0.036729248046875, 0.03618764877319336, 0.0363524169921875, 0.03630259323120117, 0.036331871032714846, 0.036274177551269535, 0.036173473358154296, 0.03651824188232422, 0.03658863830566406, 0.036259166717529295, 0.03645708847045898, 0.03639292907714844, 0.036344894409179686, 0.035871742248535156, 0.03554099273681641, 0.03529523086547852, 0.03530137634277344, 0.035619998931884764, 0.03618697738647461, 0.03595468902587891, 0.036224384307861325, 0.03605295944213867, 0.03575260925292969, 0.03583590316772461, 0.035676063537597655, 0.035468929290771486, 0.03545471954345703, 0.03521200180053711, 0.035398815155029295, 0.036025150299072266, 0.0357540168762207, 0.036066753387451175, 0.03582534408569336, 0.03592057418823242, 0.03614739227294922, 0.03609190368652344, 0.03606902313232422, 0.03583158493041992, 0.03618649673461914, 0.03573574447631836, 0.03577967834472656, 0.03621571350097656, 0.03596492767333984, 0.035764320373535156, 0.03631504058837891, 0.036435966491699216, 0.0372262077331543, 0.03676803207397461, 0.036029792785644534, 0.03587676620483399, 0.035832576751708985, 0.03578060913085938, 0.036122623443603515, 0.035899391174316404, 0.03649846267700195, 0.03775968170166016, 0.036117889404296874, 0.03606780624389649, 0.03690335845947266, 0.03605692672729492, 0.03600751876831055, 0.03616128158569336, 0.03617670440673828, 0.036083713531494144, 0.03609132766723633, 0.03614777755737305, 0.036261886596679685, 0.03626598358154297, 0.036222976684570314, 0.03626355361938476, 0.03615071868896484, 0.03620345687866211, 0.03631382369995117, 0.03618953704833985, 0.03646716690063476, 0.03631052780151367, 0.036324031829833986, 0.03648716735839844, 0.036190208435058595, 0.0365588493347168, 0.03680799865722656, 0.036307647705078126, 0.03648876953125, 0.036630977630615236, 0.03636633682250977, 0.03693772888183594, 0.036206592559814454, 0.03641753768920898, 0.03631039810180664, 0.03772684860229492, 0.03576617431640625, 0.035458656311035154, 0.03545753479003906, 0.03533824157714844, 0.03544863891601562, 0.03528278350830078, 0.03553519821166992, 0.035683551788330076, 0.03586947250366211, 0.03572447967529297, 0.03551110458374023, 0.035422206878662106, 0.03552249526977539, 0.035272640228271486, 0.035461246490478514, 0.03572323226928711, 0.036388607025146485, 0.036030750274658206, 0.036111679077148434, 0.03584451293945313, 0.03639865493774414, 0.03620060729980469, 0.036041278839111325, 0.03581542587280274, 0.035672065734863284, 0.035432449340820314, 0.03598745727539063, 0.036111934661865235, 0.03669561767578125, 0.035289985656738285, 0.035278335571289066, 0.03565824127197265, 0.03575807952880859, 0.03544582366943359, 0.03556857681274414, 0.03561417770385742, 0.03551286315917969, 0.03536076736450195, 0.03554099273681641, 0.03572531127929687, 0.03574915313720703, 0.035482303619384765, 0.03573276901245117, 0.03549078369140625, 0.035643199920654296, 0.03655267333984375, 0.03637152099609375, 0.0363570556640625, 0.03666329574584961, 0.03645587158203125, 0.03635843276977539, 0.03630313491821289, 0.03613654327392578, 0.036192798614501955, 0.036124542236328126, 0.036534271240234374, 0.0364150390625, 0.03649491119384766, 0.03656179046630859, 0.036296703338623046, 0.036347904205322266, 0.03629171371459961, 0.03660070419311524, 0.036474815368652345, 0.036467937469482424, 0.035936351776123046, 0.036082176208496096, 0.036319488525390624, 0.03579084777832031, 0.0355676155090332, 0.03625574493408203, 0.036536319732666016, 0.03630080032348633, 0.036446208953857424, 0.03794851303100586, 0.035977886199951174, 0.03591932678222656, 0.03552259063720703, 0.035584766387939455, 0.03611033630371094, 0.0355810546875, 0.03575616073608399, 0.03627494430541992, 0.0366196174621582, 0.036238304138183595, 0.03626361465454102, 0.03598057556152344, 0.03578339385986328, 0.035741119384765624, 0.03582624053955078, 0.035706241607666014, 0.03554982376098633, 0.03544063949584961, 0.03544073486328125, 0.035727264404296875, 0.03570406341552734, 0.03633023834228516, 0.03594623947143555, 0.03628249740600586, 0.0366429443359375, 0.03623321533203125, 0.036459617614746094, 0.03705948638916016, 0.03685171127319336, 0.03671244812011719, 0.03612089538574219, 0.03596681594848633, 0.03631497573852539]",tokens/s,27.67777769161179,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,2525.478912,4618.911744,0.0,4223.664128,4030.321664,s,1,12.98856640625,12.98856640625,0.0,12.98856640625,12.98856640625,12.98856640625,12.98856640625,[12.98856640625],,kWh,0.0001635869273792044,1.8036465604369006e-05,5.936421415800308e-05,0.00024098760714157648,,MB,2266.554368,5021.564928,0.0,4613.7344,4385.21344,s,10,4.838052032470704,0.4838052032470704,0.001823344870428049,0.4842698822021484,0.4858554473876953,0.486376789855957,0.4867938638305664,"[0.48689813232421875, 0.4818007507324219, 0.48216598510742187, 0.48156146240234377, 0.48153952026367186, 0.48395285034179686, 0.4857395935058594, 0.4845869140625, 0.4848028259277344, 0.4850039978027344]",tokens/s,529.1385836321101,kWh,1.4523641770832303e-05,1.6012413217024642e-06,9.68122467618851e-06,2.5806107768723277e-05,tokens/kWh,9920132.175463874,MB,2295.25504,5023.66208,0.0,4615.831552,4385.216,s,10,44.64654589843751,4.464654589843751,0.01035591943382472,4.462802734375,4.478136865234375,4.480356762695312,4.482132680664062,"[4.48257666015625, 4.465416015625, 4.467009765625, 4.4586259765625, 4.47295849609375, 4.44545654296875, 4.460189453125, 4.4776435546875, 4.45950244140625, 4.4571669921875]",tokens/s,14.110834048240404,kWh,0.00013087549349458487,1.4436641335989185e-05,7.927101315221339e-05,0.00022458314798278747,tokens/kWh,280519.71203479817,,s,630,44.64342021179197,0.0708625717647492,0.0007126091269215054,0.07069612884521484,0.07140072250366211,0.07209344902038574,0.074042163772583,"[0.07142793273925781, 0.07068627166748047, 0.07074208068847657, 0.07168669128417969, 0.0706246109008789, 0.07109014129638672, 0.0752339859008789, 0.07027507019042968, 0.07079116821289062, 0.07076169586181641, 0.07063565063476562, 0.07032486724853515, 0.07041161346435547, 0.07123628997802735, 0.0705820770263672, 0.07047596740722656, 0.0706984634399414, 0.07108668518066406, 0.07110444641113281, 0.07131459045410156, 0.07075721740722657, 0.07086080169677735, 0.07076019287109375, 0.07072089385986328, 0.07092339324951172, 0.07071241760253906, 0.07120355224609375, 0.07083615875244141, 0.07259750366210938, 0.07069081878662109, 0.07055506896972656, 0.07256326293945313, 0.07136460876464844, 0.070866943359375, 0.07139881896972657, 0.07094313812255859, 0.07096339416503906, 0.07097708892822266, 0.07068252563476562, 0.07061968231201173, 0.07880403137207032, 0.07280287933349609, 0.07096768188476563, 0.07074742126464843, 0.07064217376708984, 0.07061894226074218, 0.07052944183349609, 0.07043891143798828, 0.07056179046630859, 0.07122537231445313, 0.07080143737792968, 0.07058767700195312, 0.0709126739501953, 0.07315894317626953, 0.071231201171875, 0.07085260772705078, 0.07083766174316407, 0.07084095764160156, 0.07102051544189453, 0.07075043487548828, 0.07055648040771484, 0.0705914535522461, 0.0705987548828125, 0.07146530914306641, 0.07075555419921875, 0.07068144226074219, 0.07077279663085938, 0.07155506896972656, 0.07107174682617187, 0.07114752197265625, 0.07154278564453125, 0.0708619842529297, 0.07033344268798829, 0.0704776611328125, 0.07071481323242187, 0.07077740478515625, 0.07086835479736328, 0.07092444610595704, 0.07070563507080078, 0.07065727996826172, 0.07051945495605469, 0.0708109130859375, 0.07114806365966797, 0.07220867156982422, 0.07109356689453125, 0.07085126495361328, 0.0708505630493164, 0.07080941009521484, 0.07172223663330078, 0.0710973129272461, 0.07069283294677735, 0.07064780426025391, 0.07106124877929687, 0.07033452606201172, 0.07056329345703125, 0.07033277130126953, 0.07063088226318359, 0.07098655700683594, 0.07056092834472656, 0.07041734313964844, 0.07046710205078124, 0.07069487762451172, 0.07391693115234375, 0.07084416198730469, 0.07051494598388672, 0.07085244750976563, 0.07066336059570312, 0.07011631774902344, 0.07033184051513672, 0.07031561279296875, 0.07028630065917969, 0.07051673889160157, 0.0705692138671875, 0.07028607940673828, 0.07030374145507813, 0.07029759979248047, 0.0704368667602539, 0.07041228485107422, 0.07086080169677735, 0.0708853759765625, 0.0708016357421875, 0.07030691528320313, 0.07021762847900391, 0.07097142028808594, 0.07409331512451171, 0.07249305725097656, 0.07143424224853516, 0.07074610900878907, 0.07087049865722657, 0.07055209350585938, 0.07052214050292968, 0.07068959808349609, 0.0706869125366211, 0.07087484741210938, 0.07137184143066407, 0.0708371810913086, 0.07038288116455078, 0.07062995147705078, 0.07150406646728516, 0.07058739471435548, 0.07209193420410157, 0.07091065979003906, 0.07134815979003906, 0.07040582275390625, 0.07039955139160156, 0.07054589080810547, 0.07055753326416016, 0.07064832305908203, 0.07089971160888672, 0.07028096008300781, 0.07053670501708985, 0.07074073791503906, 0.0708116455078125, 0.07074179077148438, 0.07035107421875, 0.07220223999023437, 0.0708136978149414, 0.07097548675537109, 0.07091404724121093, 0.0726178207397461, 0.07068073272705078, 0.07080095672607421, 0.07090774536132813, 0.07076310729980469, 0.07130726623535157, 0.07053311920166015, 0.07114777374267578, 0.07155072021484375, 0.07188668823242188, 0.07122102355957032, 0.07095948791503906, 0.07075635528564453, 0.07047372436523437, 0.07044873809814453, 0.07059292602539062, 0.0711905288696289, 0.07155046081542969, 0.07074428558349609, 0.07069725036621094, 0.07039081573486328, 0.07107683563232423, 0.07158112335205079, 0.07081017303466797, 0.0704901123046875, 0.07084646606445312, 0.0709959716796875, 0.07096524810791016, 0.07038982391357422, 0.07045113372802735, 0.07112054443359375, 0.07032115173339844, 0.07032627105712891, 0.0706388168334961, 0.07073792266845703, 0.07060275268554687, 0.07030659484863282, 0.0710770263671875, 0.0704623031616211, 0.07123558044433594, 0.07064575958251954, 0.07049420928955077, 0.07053276824951171, 0.07053135681152344, 0.0706785888671875, 0.07062493133544921, 0.07032252502441406, 0.07049436950683594, 0.07055753326416016, 0.0704040298461914, 0.07036524963378907, 0.07252582550048828, 0.07120706939697266, 0.07076592254638672, 0.07050495910644532, 0.07052288055419922, 0.0708152618408203, 0.0709513931274414, 0.07067852783203125, 0.07056787109375, 0.07051884460449219, 0.07078912353515625, 0.07112908935546874, 0.07061404418945312, 0.070600830078125, 0.07135727691650391, 0.07078272247314453, 0.0709145278930664, 0.07082351684570312, 0.07047801971435547, 0.07072940826416016, 0.07049782562255859, 0.07066413116455078, 0.07090873718261718, 0.07086083221435546, 0.070552734375, 0.07077721405029297, 0.07060527801513672, 0.0705249252319336, 0.07071903991699219, 0.07097395324707031, 0.07241091156005859, 0.07059085083007813, 0.07046121978759766, 0.07081910705566406, 0.0711605453491211, 0.07110246276855468, 0.07098178863525391, 0.07060377502441406, 0.0712364501953125, 0.07089084625244141, 0.07063961791992188, 0.07057625579833984, 0.07134169769287109, 0.07097318267822265, 0.07084662628173828, 0.07036799621582031, 0.0706409912109375, 0.07095958709716797, 0.07059571075439453, 0.07065885162353516, 0.07106320190429688, 0.07083830261230468, 0.07051251220703125, 0.0706739501953125, 0.07026166534423828, 0.07069302368164063, 0.07066607666015624, 0.07089356994628906, 0.07053517150878906, 0.07059455871582031, 0.07077683258056641, 0.071005859375, 0.07069452667236328, 0.07059324645996094, 0.07118643188476563, 0.07061238098144532, 0.07102845001220703, 0.07067273712158204, 0.07048451232910156, 0.07065705871582031, 0.07072866821289063, 0.07075750732421875, 0.07068899536132812, 0.07239103698730469, 0.07071769714355469, 0.07071699523925781, 0.07351356506347656, 0.07090089416503906, 0.0708831024169922, 0.07106861114501953, 0.07084207916259766, 0.07068841552734376, 0.07121369934082031, 0.07088925170898437, 0.07464572906494141, 0.07521279907226562, 0.07100550079345704, 0.0706116485595703, 0.07071539306640626, 0.07072096252441407, 0.0705910415649414, 0.07098566436767578, 0.07142940521240235, 0.07209468841552734, 0.07101200103759765, 0.07069500732421875, 0.07025465393066406, 0.07114137268066406, 0.0709591064453125, 0.07066012573242188, 0.07045718383789062, 0.07040217590332032, 0.07075820922851563, 0.0708834228515625, 0.07056966400146485, 0.07142809295654297, 0.07046546936035156, 0.07078713226318359, 0.07031193542480468, 0.07018291473388671, 0.07042457580566407, 0.07035660552978516, 0.07053526306152344, 0.07038745880126954, 0.07057472229003907, 0.07076649475097656, 0.07033360290527343, 0.07034966278076171, 0.07026592254638672, 0.07045590209960938, 0.07009510040283203, 0.07058441925048828, 0.07030979156494141, 0.07018422698974609, 0.07020626831054687, 0.07141903686523438, 0.07040496063232422, 0.07144019317626953, 0.07343325042724609, 0.07129817962646484, 0.07028419494628907, 0.07038710021972656, 0.0702529296875, 0.07038899230957031, 0.07034764862060547, 0.07023216247558593, 0.07021692657470703, 0.07054006195068359, 0.07048191833496094, 0.0703272933959961, 0.07059353637695312, 0.0702784652709961, 0.07064765167236328, 0.07167062377929688, 0.070723388671875, 0.07070333099365235, 0.07077232360839844, 0.07072377777099609, 0.07058614349365235, 0.07063414764404297, 0.07063836669921875, 0.07028835296630859, 0.07028498840332031, 0.07024793243408203, 0.07062201690673828, 0.07011532592773438, 0.07038140869140624, 0.07075555419921875, 0.07050937652587891, 0.07034687805175781, 0.07022755432128906, 0.07036067199707031, 0.07034349060058594, 0.07044096374511719, 0.0705650863647461, 0.07081858825683594, 0.07020706939697266, 0.07021158599853515, 0.07096236419677734, 0.0704145278930664, 0.0713158721923828, 0.07061129760742188, 0.07017664337158203, 0.07040831756591796, 0.07153024291992187, 0.07133004760742187, 0.07097510528564453, 0.07037289428710937, 0.07066687774658204, 0.07092610931396484, 0.07070870208740235, 0.07027196502685547, 0.07040169525146485, 0.07048636627197266, 0.07035903930664063, 0.07049791717529297, 0.07046086120605469, 0.0705230712890625, 0.07023423767089844, 0.07049696350097656, 0.07059859466552734, 0.07045085144042969, 0.07089353942871093, 0.07105369567871093, 0.07047577667236328, 0.0702586898803711, 0.07043251037597656, 0.0703911361694336, 0.07066242980957031, 0.07089421081542968, 0.07199932861328125, 0.07080131530761719, 0.07094092559814454, 0.07093043518066407, 0.0730808334350586, 0.07124787139892579, 0.07081983947753906, 0.07071129608154297, 0.07064335632324219, 0.07046556854248047, 0.07041804504394532, 0.07070381164550782, 0.07068582153320313, 0.07333567810058594, 0.07108812713623047, 0.07037920379638672, 0.07080172729492187, 0.07075218963623046, 0.07120902252197266, 0.07094477081298828, 0.07118768310546875, 0.07095999908447266, 0.07060057830810547, 0.07037750244140625, 0.07032572937011719, 0.070506591796875, 0.07099231719970703, 0.07102198028564453, 0.07057437133789063, 0.07057395172119141, 0.07054771423339844, 0.07175580596923828, 0.07081254577636718, 0.07087935638427735, 0.07074301147460937, 0.07067945861816406, 0.0707460479736328, 0.07059811401367187, 0.07039475250244141, 0.07159164428710937, 0.07074816131591796, 0.07076777648925782, 0.07099273681640625, 0.07157759857177734, 0.07054541015625, 0.07107164764404297, 0.07052297973632812, 0.07049420928955077, 0.07063756561279297, 0.07048191833496094, 0.07070515441894532, 0.07049954986572265, 0.0703497314453125, 0.07041792297363281, 0.07064614105224609, 0.0743605728149414, 0.07126246643066406, 0.07112713623046875, 0.07099785614013672, 0.07064985656738282, 0.07053932952880859, 0.07277072143554687, 0.07059542083740235, 0.07107750701904297, 0.07070105743408203, 0.07057849884033203, 0.07053523254394531, 0.07065599822998046, 0.07082598114013672, 0.07135542297363281, 0.07117922973632812, 0.07072358703613281, 0.0710257568359375, 0.07160924530029297, 0.07080726623535157, 0.07448194885253906, 0.07059619140625, 0.07071135711669922, 0.07073545837402344, 0.07062937927246093, 0.07088998413085937, 0.07075225830078125, 0.07106735992431641, 0.07153862762451171, 0.07262064361572265, 0.07231488037109375, 0.07083952331542968, 0.07068956756591797, 0.0714734115600586, 0.0712475814819336, 0.07144064331054688, 0.07090889739990235, 0.07123753356933593, 0.07112745666503906, 0.0716693115234375, 0.07091785430908203, 0.0713551025390625, 0.07081484985351562, 0.07135935974121094, 0.07079936218261719, 0.07024639892578124, 0.07066445159912109, 0.0706618881225586, 0.07078688049316406, 0.07049791717529297, 0.07132399749755859, 0.07137423706054688, 0.07059334564208984, 0.07080499267578125, 0.07037776184082031, 0.07055506896972656, 0.07070374298095704, 0.07093673706054687, 0.07076863861083985, 0.07032217407226563, 0.07019945526123048, 0.0703936996459961, 0.07029145812988281, 0.07045132446289062, 0.07063948822021485, 0.07061199951171875, 0.07039228820800782, 0.07030630493164063, 0.0704307861328125, 0.07062448120117187, 0.07021180725097656, 0.07095321655273437, 0.0707499237060547, 0.07210646057128907, 0.07132371520996093, 0.07019036865234375, 0.07308096313476563, 0.07116041564941407, 0.0705249252319336, 0.07141785430908203, 0.07107788848876953, 0.07069200134277344, 0.0709142074584961, 0.0705072021484375, 0.07089065551757813, 0.07252003479003906, 0.07098201751708984, 0.07065312194824219, 0.07062016296386718, 0.0703917465209961, 0.0703543701171875, 0.07040211486816406, 0.07037798309326172, 0.07085670471191406, 0.07096934509277343, 0.07034880065917969, 0.07049945831298828, 0.07056988525390626, 0.07042499542236329, 0.07063021087646484, 0.07052877044677734, 0.07037522888183594, 0.07129193878173828, 0.07080854034423828, 0.07057631683349609, 0.07086214447021484, 0.07057593536376953, 0.0708795166015625, 0.07116841888427734, 0.07114137268066406, 0.07068450927734375, 0.0706786880493164, 0.07074400329589844, 0.07097058868408203, 0.07106598663330078, 0.07151663970947265, 0.07091404724121093, 0.0706447982788086, 0.0706118392944336, 0.0704983673095703, 0.07052288055419922, 0.07061504364013672, 0.0703446044921875, 0.07058236694335937, 0.07039180755615235, 0.07038771057128906, 0.0713084487915039, 0.07264137268066406, 0.0722059555053711, 0.07157183837890625, 0.07116387176513672, 0.07062531280517578, 0.07033241271972657, 0.07024540710449219, 0.07049292755126953, 0.07048623657226563, 0.07038307189941406, 0.07030223846435547, 0.07042412567138671, 0.07016480255126953, 0.07041648101806641, 0.07060688018798827, 0.07093673706054687, 0.0708544921875, 0.07056934356689454, 0.0703348159790039, 0.07058460998535156, 0.07039900970458984, 0.07093756866455078, 0.0707747802734375, 0.07093193817138672, 0.07078092956542968, 0.07055142211914063, 0.070771484375, 0.07063056182861328, 0.0706645736694336, 0.07052323150634765, 0.07096729278564454, 0.0705261459350586, 0.07032710266113282, 0.0704901123046875, 0.07070671844482422, 0.07069318389892579, 0.07048572540283203, 0.07055609893798828]",tokens/s,14.111822011199616,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,994.603008,896.466944,0.0,501.219328,495.906816,s,1,8.4486875,8.4486875,0.0,8.4486875,8.4486875,8.4486875,8.4486875,[8.4486875],,kWh,3.8634957929139084e-05,4.2523124273827515e-06,1.3758344339989681e-05,5.6645614696511517e-05,,MB,1280.659456,1060.0448,0.0,652.214272,602.88,s,10,0.6205229492187501,0.06205229492187501,0.00027595415758395323,0.06209208106994629,0.062417222595214844,0.06243381195068359,0.062447083435058594,"[0.06241353607177735, 0.06221529769897461, 0.0617476806640625, 0.06200137710571289, 0.06179667282104492, 0.06245040130615234, 0.062107616424560544, 0.06153657531738281, 0.06217724609375, 0.062076545715332034]",tokens/s,4125.552492817691,kWh,1.9457863596111968e-06,2.1457113460483855e-07,1.2915362184133065e-06,3.4518937126293417e-06,tokens/kWh,74162190.76021384,MB,1305.849856,1072.627712,0.0,664.797184,611.073536,s,10,22.119763916015625,2.2119763916015627,0.00939409823685884,2.211623046875,2.2202459716796876,2.2256539916992186,2.229980407714844,"[2.219044189453125, 2.212201416015625, 2.21731494140625, 2.19991943359375, 2.215291748046875, 2.195977294921875, 2.23106201171875, 2.211044677734375, 2.20692236328125, 2.21098583984375]",tokens/s,28.481316635746452,kWh,6.477474127580723e-05,7.144476916506505e-06,2.7728792553385804e-05,9.964801074569954e-05,tokens/kWh,632225.3653489903,,s,630,22.11338684844971,0.03510061404515827,0.0005865970056677282,0.03507145500183105,0.03545301055908203,0.03568929996490478,0.038359202995300296,"[0.0351126708984375, 0.03557414245605469, 0.03589945602416992, 0.03536259078979492, 0.035674175262451174, 0.035448833465576174, 0.03554924774169922, 0.035574081420898435, 0.03562662506103516, 0.03541100692749023, 0.03543145751953125, 0.035452831268310545, 0.03558758544921875, 0.035260128021240233, 0.035433246612548826, 0.03536041641235352, 0.03590339279174805, 0.03526726531982422, 0.03520223999023438, 0.03492534255981445, 0.03495948791503906, 0.03528476715087891, 0.03514761734008789, 0.03509475326538086, 0.035184417724609375, 0.03508652877807617, 0.035114334106445315, 0.03512985610961914, 0.034936511993408206, 0.034758975982666016, 0.03501670455932617, 0.034860607147216796, 0.03490822219848633, 0.034926239013671874, 0.03498262405395508, 0.03831635284423828, 0.03570156860351562, 0.03544521713256836, 0.03524240112304688, 0.035202465057373046, 0.03534864044189453, 0.035238048553466794, 0.03510025787353516, 0.0351157112121582, 0.03524790573120117, 0.035093055725097654, 0.034854560852050784, 0.034721790313720705, 0.03489708709716797, 0.03475270462036133, 0.03469724655151367, 0.03503779220581055, 0.035046527862548825, 0.03531158447265625, 0.035201152801513674, 0.03507238388061523, 0.03494134521484375, 0.03484595108032226, 0.03459968185424805, 0.03454576110839844, 0.03463270568847656, 0.03485580825805664, 0.034942657470703124, 0.03491551971435547, 0.03513631820678711, 0.03532185745239258, 0.03533004760742187, 0.03557894515991211, 0.03553580856323242, 0.035280895233154294, 0.03533004760742187, 0.03533811187744141, 0.035356895446777344, 0.0352845458984375, 0.035283294677734375, 0.03526812744140625, 0.03526294326782226, 0.035264511108398434, 0.03535676956176758, 0.03535795211791992, 0.035447391510009765, 0.03543584060668945, 0.03534924697875977, 0.03537926483154297, 0.035393470764160155, 0.03552851104736328, 0.03523731231689453, 0.035364864349365234, 0.03544140625, 0.03542547225952149, 0.03530124664306641, 0.03548067092895508, 0.03467875289916992, 0.03591350555419922, 0.034665664672851565, 0.034560928344726564, 0.034730144500732425, 0.03486294555664062, 0.035142814636230466, 0.035093345642089845, 0.034975200653076174, 0.03497366333007813, 0.03481011199951172, 0.035618942260742185, 0.03539737701416015, 0.035146175384521486, 0.035182144165039064, 0.035130142211914066, 0.034631328582763674, 0.03448524856567383, 0.0349725456237793, 0.03505779266357422, 0.035116737365722656, 0.03496172714233398, 0.035009727478027344, 0.0348782730102539, 0.03472592163085937, 0.03425088119506836, 0.03435625457763672, 0.034087390899658204, 0.034748767852783205, 0.03445145416259766, 0.035674304962158204, 0.03441424179077148, 0.03482598495483399, 0.03504579162597656, 0.03451145553588867, 0.03463987350463867, 0.03446988677978516, 0.03454771041870117, 0.03462368011474609, 0.034899776458740234, 0.0344923210144043, 0.03473622512817383, 0.03471308898925781, 0.03475868988037109, 0.034950847625732424, 0.03530831909179687, 0.034971649169921876, 0.03509689712524414, 0.03894240188598633, 0.035402751922607424, 0.03493180847167969, 0.03510236740112305, 0.03494732666015625, 0.03525379180908203, 0.035011039733886716, 0.03528051376342774, 0.034959743499755856, 0.03500032043457031, 0.034936832427978515, 0.03508243179321289, 0.03505078506469726, 0.035050079345703124, 0.03512928009033203, 0.03934620666503906, 0.035218463897705075, 0.03531846237182617, 0.038376705169677734, 0.03860201644897461, 0.03553875350952149, 0.03518422317504883, 0.03522592163085937, 0.035053569793701174, 0.03519184112548828, 0.03528799819946289, 0.03520719909667969, 0.035098430633544925, 0.0349796142578125, 0.03542406463623047, 0.03494911956787109, 0.03481270217895508, 0.034587871551513674, 0.03449484634399414, 0.03468230438232422, 0.03518726348876953, 0.03486742401123047, 0.034609119415283204, 0.03445955276489258, 0.03461676788330078, 0.03499305725097656, 0.03498783874511719, 0.035041343688964846, 0.03525027084350586, 0.035006271362304685, 0.03481190490722656, 0.0345824966430664, 0.034512096405029294, 0.03441132736206055, 0.03445977783203125, 0.03475715255737305, 0.034645694732666016, 0.03471769714355469, 0.03506774520874024, 0.03495100784301758, 0.03495910263061523, 0.035109344482421874, 0.03479951858520508, 0.03456982421875, 0.034552417755126956, 0.03465574264526367, 0.03469372940063477, 0.03514767837524414, 0.03476275253295898, 0.0346512336730957, 0.03462236785888672, 0.034586814880371096, 0.034790431976318356, 0.034775489807128905, 0.034812255859375, 0.03512934494018555, 0.03502035140991211, 0.03484102249145508, 0.03490329742431641, 0.03491712188720703, 0.03474959945678711, 0.034628257751464844, 0.034568382263183595, 0.03497356796264649, 0.03511939239501953, 0.03506159973144531, 0.03589862442016602, 0.03515228652954101, 0.03505920028686523, 0.035070526123046876, 0.035327617645263674, 0.03505014419555664, 0.035021984100341796, 0.03507843017578125, 0.0350555191040039, 0.03513116836547851, 0.03546406555175781, 0.03517200088500977, 0.0350211181640625, 0.03512944030761719, 0.035026016235351565, 0.0350126724243164, 0.03526531219482422, 0.03521305465698242, 0.035190078735351564, 0.035068641662597655, 0.035168479919433594, 0.03513350296020508, 0.03452627182006836, 0.034570175170898436, 0.03462665557861328, 0.034717536926269534, 0.03456998443603516, 0.03460742568969727, 0.03470739364624023, 0.03466649627685547, 0.034603073120117185, 0.034716350555419925, 0.03494400024414063, 0.03891279983520508, 0.03493478393554687, 0.03469209671020508, 0.03595110321044922, 0.03456460952758789, 0.034613536834716796, 0.03448393630981445, 0.03455590438842773, 0.03481808090209961, 0.03498137664794922, 0.03517817687988281, 0.03507689666748047, 0.03498937606811523, 0.03521782302856445, 0.035049758911132815, 0.03480947113037109, 0.0345615348815918, 0.03470630264282227, 0.03473231887817383, 0.03489555358886719, 0.03483820724487305, 0.03485935974121094, 0.03474774551391602, 0.03495951843261719, 0.03492505645751953, 0.03485696029663086, 0.03551174545288086, 0.03498995208740235, 0.034903934478759766, 0.03457228851318359, 0.03510150527954101, 0.03473180770874024, 0.035160289764404294, 0.035043582916259766, 0.034860256195068356, 0.03465097427368164, 0.034743999481201174, 0.03484262466430664, 0.03502460861206055, 0.03516969680786133, 0.035246177673339846, 0.03530217742919922, 0.03531283187866211, 0.035334014892578126, 0.0352239990234375, 0.03513600158691406, 0.035176448822021485, 0.035323680877685545, 0.03515846252441406, 0.035171329498291014, 0.0352694091796875, 0.03531897735595703, 0.035238304138183595, 0.03524240112304688, 0.035272705078125, 0.03530319976806641, 0.035215263366699216, 0.03525254440307617, 0.035176448822021485, 0.03514777755737305, 0.039908702850341794, 0.03512070465087891, 0.035621086120605466, 0.035105182647705076, 0.035385665893554685, 0.03503897476196289, 0.0352891845703125, 0.034912094116210934, 0.03475251388549805, 0.034680831909179685, 0.03449856185913086, 0.03427123260498047, 0.03423040008544922, 0.034242496490478516, 0.03425001525878906, 0.034401248931884766, 0.03459471893310547, 0.034770942687988284, 0.03512022399902344, 0.03484332656860351, 0.034988033294677735, 0.035339679718017575, 0.035270942687988284, 0.035227649688720705, 0.03452131271362305, 0.03538761520385742, 0.0344290885925293, 0.034864192962646486, 0.034149024963378904, 0.033836544036865236, 0.034018878936767576, 0.03563375854492187, 0.03482659149169922, 0.03513126373291016, 0.035118366241455076, 0.035028961181640624, 0.03463894271850586, 0.034204608917236326, 0.03512815856933594, 0.03405209732055664, 0.03398451232910156, 0.033941505432128906, 0.03383327865600586, 0.033780960083007815, 0.03409945678710938, 0.0343361930847168, 0.03452617645263672, 0.036985694885253904, 0.03516336059570312, 0.03522124862670899, 0.03542015838623047, 0.03493199920654297, 0.03482313537597656, 0.0347562255859375, 0.03516950225830078, 0.035353504180908206, 0.036014110565185546, 0.03511088180541992, 0.035005950927734376, 0.035023040771484375, 0.03509859085083008, 0.03525651168823242, 0.03530153656005859, 0.03522895812988281, 0.035417377471923826, 0.03597769546508789, 0.03557708740234375, 0.03543308639526367, 0.03523417663574219, 0.03529523086547852, 0.03521763229370117, 0.03518032073974609, 0.0352749137878418, 0.035273662567138674, 0.035289825439453124, 0.03546121597290039, 0.03527478408813477, 0.035501953125, 0.03529987335205078, 0.03545462417602539, 0.03551395034790039, 0.03515750503540039, 0.036289440155029294, 0.035757568359375, 0.03534819030761719, 0.03638761520385742, 0.035377086639404295, 0.03550419235229492, 0.03552793502807617, 0.035506271362304685, 0.035407711029052734, 0.03537977600097656, 0.03527091217041016, 0.03586294555664062, 0.035288673400878906, 0.034944286346435545, 0.03502892684936523, 0.03506201553344727, 0.035164703369140626, 0.035224990844726564, 0.035289024353027346, 0.03541788864135742, 0.03537395095825195, 0.03600588989257812, 0.037115169525146485, 0.03641593551635742, 0.03547571182250977, 0.03584617614746094, 0.03475251388549805, 0.03469327926635742, 0.034996063232421874, 0.03527078247070312, 0.035211135864257816, 0.03514134216308594, 0.03482870483398438, 0.035030143737792965, 0.03531372833251953, 0.03510752105712891, 0.03518259048461914, 0.034960670471191405, 0.03517897415161133, 0.0349516487121582, 0.03689039993286133, 0.036117568969726566, 0.035199935913085935, 0.03480780792236328, 0.03471152114868164, 0.034812641143798825, 0.03526566314697266, 0.0352039680480957, 0.03511920166015625, 0.03495091247558594, 0.03519657516479492, 0.03501311874389648, 0.03514940643310547, 0.0351932487487793, 0.03530137634277344, 0.03512662506103516, 0.03570684814453125, 0.03523551940917969, 0.035953662872314454, 0.03540108871459961, 0.03538950347900391, 0.03565011215209961, 0.03521331024169922, 0.035186817169189456, 0.03515955352783203, 0.035153854370117185, 0.03523952102661133, 0.0352182731628418, 0.035211231231689455, 0.03513142395019531, 0.0352973747253418, 0.0353155517578125, 0.03511299133300781, 0.035284000396728514, 0.03498073577880859, 0.034983745574951174, 0.03483004760742187, 0.034754337310791014, 0.0354947509765625, 0.03483606338500977, 0.03491068649291992, 0.03502012634277344, 0.034820350646972656, 0.034831871032714845, 0.035324737548828124, 0.03503104019165039, 0.03504563140869141, 0.03499184036254883, 0.03493686294555664, 0.034783550262451175, 0.03476886367797852, 0.03474812698364258, 0.03490345764160156, 0.03504188919067383, 0.03487644958496094, 0.03490208053588867, 0.034764801025390625, 0.034828510284423825, 0.03478316879272461, 0.034921150207519534, 0.03489324951171875, 0.03473916625976563, 0.03475027084350586, 0.0350022087097168, 0.03523583984375, 0.03505900955200195, 0.03497644805908203, 0.035415103912353516, 0.03449555206298828, 0.034870208740234374, 0.035170143127441406, 0.03528086471557617, 0.03479692840576172, 0.03482259368896484, 0.034840545654296874, 0.03468124771118164, 0.03472771072387695, 0.03478345489501953, 0.03480166244506836, 0.03495913696289062, 0.035126945495605466, 0.03502748870849609, 0.0353485107421875, 0.03482771301269531, 0.03468860626220703, 0.034732257843017575, 0.03509913635253906, 0.035129791259765626, 0.03503494262695313, 0.0350797119140625, 0.0350684814453125, 0.03504710388183594, 0.03512937545776367, 0.03523356628417969, 0.03533635330200195, 0.035358814239501955, 0.03732863998413086, 0.03542422485351562, 0.035332542419433594, 0.03531980895996094, 0.035259807586669925, 0.03497654342651367, 0.03517987060546875, 0.035127777099609375, 0.03513958358764648, 0.035108062744140626, 0.03527350234985351, 0.034952255249023435, 0.035164447784423826, 0.0349211196899414, 0.03482009506225586, 0.03483238220214844, 0.034699264526367186, 0.03475225448608398, 0.03466880035400391, 0.034551807403564457, 0.035094528198242186, 0.03495734405517578, 0.03490963363647461, 0.034871871948242185, 0.03496239852905274, 0.034974624633789066, 0.035200191497802735, 0.034885921478271485, 0.03484735870361328, 0.03473574447631836, 0.034811550140380856, 0.03478790283203125, 0.035279006958007814, 0.03493478393554687, 0.034686782836914065, 0.0349733772277832, 0.03492963027954102, 0.03680444717407227, 0.03563520050048828, 0.03585833740234375, 0.03510847854614258, 0.034700000762939456, 0.03443891143798828, 0.03441254425048828, 0.03423027038574219, 0.03421404647827148, 0.03423385620117188, 0.03423862457275391, 0.034516769409179686, 0.03477148818969727, 0.034797569274902344, 0.0349716796875, 0.034996192932128904, 0.03537452697753906, 0.03494166564941406, 0.03495292663574219, 0.034698528289794923, 0.03468352127075195, 0.03482588958740234, 0.03849667358398438, 0.03511004638671875, 0.03492678451538086, 0.03505984115600586, 0.03507868957519531, 0.035030399322509766, 0.03509868621826172, 0.03539820861816406, 0.035446815490722657, 0.03536281585693359, 0.035631103515625, 0.035155998229980466, 0.03510879898071289, 0.03529264068603516, 0.035146495819091794, 0.035123008728027344, 0.035213409423828126, 0.035081344604492186, 0.03515235137939453, 0.03511072158813477, 0.035101184844970705, 0.03513267135620117, 0.035262622833251954, 0.03550252914428711, 0.03518457412719726, 0.03514380645751953, 0.0351802864074707, 0.03521577453613281, 0.035447742462158205, 0.03523625564575195, 0.03482479858398437, 0.03480166244506836, 0.034549983978271484, 0.03442374420166015, 0.03471651077270508, 0.03474991989135742, 0.03504105758666992, 0.035176223754882815, 0.0353109130859375]",tokens/s,28.489530089515306,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1036.218368,1613.692928,0.0,1218.445312,1206.173696,s,1,9.4240576171875,9.4240576171875,0.0,9.4240576171875,9.4240576171875,9.4240576171875,9.4240576171875,[9.4240576171875],,kWh,6.484195211666777e-05,7.142521866360762e-06,2.3296685304000087e-05,9.528115928702862e-05,,MB,1293.1072,1909.39136,0.0,1501.560832,1463.359488,s,10,1.9863009643554688,0.19863009643554688,0.0006513418414676444,0.19857725524902345,0.19956176300048828,0.1995944953918457,0.19962068130493166,"[0.19862535095214845, 0.19818499755859376, 0.19772694396972657, 0.19815501403808594, 0.198687744140625, 0.19785023498535156, 0.19852915954589845, 0.19962722778320313, 0.1995544891357422, 0.19935980224609376]",tokens/s,1288.8278493237754,kWh,6.058258744047592e-06,6.68115788599339e-07,3.998846736489836e-06,1.0725221269136766e-05,tokens/kWh,23868971.425016064,MB,1315.4304,1909.39136,0.0,1501.560832,1463.362048,s,10,22.126185302734378,2.2126185302734376,0.014006153154463854,2.218113891601562,2.2226205810546875,2.2277609741210935,2.2318732885742185,"[2.221420654296875, 2.203199462890625, 2.187265625, 2.189372314453125, 2.221478271484375, 2.220020751953125, 2.2196083984375, 2.216619384765625, 2.2329013671875, 2.214299072265625]",tokens/s,28.473050884290657,kWh,6.384394182636952e-05,7.0419013335482064e-06,3.5243406879511035e-05,0.00010612925003942876,tokens/kWh,593615.8031512939,,s,630,22.122512981414793,0.03511509997049967,0.0005503760512095682,0.03513702392578125,0.03562043075561524,0.035890344810485836,0.03708655132293702,"[0.035834239959716796, 0.035659454345703126, 0.035242206573486326, 0.035146846771240234, 0.03531635284423828, 0.03540768051147461, 0.03556204986572266, 0.035209217071533204, 0.035073280334472656, 0.03533484649658203, 0.034916416168212894, 0.03506995010375977, 0.035235679626464844, 0.03523139190673828, 0.03513967895507813, 0.03523215866088867, 0.03520060729980469, 0.03481020736694336, 0.03487500762939453, 0.034996673583984374, 0.03522150421142578, 0.03492409515380859, 0.03467308807373047, 0.03489129638671875, 0.035133377075195316, 0.03483292770385742, 0.03468902587890625, 0.034856704711914065, 0.035118495941162106, 0.03517436981201172, 0.03491315078735351, 0.03473612976074219, 0.03485696029663086, 0.0348485107421875, 0.0351541748046875, 0.0352685432434082, 0.0347413444519043, 0.03473712158203125, 0.035237342834472656, 0.03513753509521484, 0.03538383865356445, 0.03544009780883789, 0.03530806350708008, 0.03572531127929687, 0.036751361846923826, 0.03548345565795898, 0.035423614501953124, 0.035881248474121094, 0.03550467300415039, 0.03538739013671875, 0.03532329559326172, 0.03539206314086914, 0.03564547348022461, 0.03535257720947266, 0.03560784149169922, 0.035512577056884764, 0.03548617553710937, 0.035487743377685545, 0.03549388885498047, 0.03547891235351563, 0.03547366333007813, 0.03540620803833008, 0.035372127532958986, 0.03523632049560547, 0.03520959854125977, 0.03506105422973633, 0.035186561584472656, 0.03567494583129883, 0.03514572906494141, 0.03512934494018555, 0.03498364639282227, 0.03507952117919922, 0.03519174575805664, 0.035020801544189455, 0.03487948989868164, 0.03496755218505859, 0.03484262466430664, 0.03511040115356445, 0.03519744110107422, 0.03541401672363281, 0.03523379135131836, 0.03517440032958984, 0.035170143127441406, 0.03481411361694336, 0.034744129180908204, 0.03488172912597656, 0.034576385498046876, 0.034557823181152345, 0.03424063873291015, 0.034154495239257815, 0.03441788864135742, 0.03438467025756836, 0.037029888153076174, 0.035211231231689455, 0.03552608108520508, 0.03515043258666992, 0.034770942687988284, 0.03414220809936523, 0.033941505432128906, 0.03404569625854492, 0.03401753616333008, 0.03417292785644531, 0.034430526733398435, 0.034678848266601565, 0.03433871841430664, 0.03454729461669922, 0.03484572982788086, 0.03494083023071289, 0.03496134567260742, 0.035099967956542966, 0.0349376335144043, 0.03477078247070312, 0.0350208625793457, 0.03505871963500977, 0.03474428939819336, 0.03481292724609375, 0.03486649703979492, 0.03511711883544922, 0.03524179077148438, 0.03532185745239258, 0.035574337005615235, 0.035531009674072266, 0.03546316909790039, 0.035846145629882815, 0.035606529235839846, 0.035343486785888674, 0.0354645767211914, 0.03505420684814453, 0.035176448822021485, 0.03544268798828125, 0.03518873596191406, 0.03529520034790039, 0.03529321670532227, 0.03527679824829102, 0.03489712142944336, 0.03487619018554688, 0.034926273345947265, 0.03464172744750976, 0.03451894378662109, 0.03409366226196289, 0.03397359848022461, 0.034136447906494144, 0.03403744125366211, 0.03415884780883789, 0.034567806243896486, 0.03496623992919922, 0.03539731216430664, 0.03526278305053711, 0.03448982238769531, 0.03434921646118164, 0.03457267379760742, 0.03461119842529297, 0.03432598495483399, 0.0340968017578125, 0.0341635856628418, 0.03441254425048828, 0.0341319694519043, 0.03420687866210938, 0.034417503356933596, 0.034432926177978516, 0.03453785705566406, 0.03518230438232422, 0.03462963104248047, 0.034336769104003906, 0.03411465454101562, 0.03420662307739258, 0.03413174438476563, 0.03431212615966797, 0.03434883117675781, 0.0346690559387207, 0.03433865737915039, 0.0342935676574707, 0.034386238098144534, 0.034772544860839846, 0.0350700798034668, 0.03532015991210938, 0.035237823486328125, 0.03465017700195312, 0.03433865737915039, 0.03420284652709961, 0.0343623046875, 0.03435238265991211, 0.034241279602050784, 0.03429935836791992, 0.034777599334716795, 0.035188766479492185, 0.036036609649658206, 0.038499744415283206, 0.03520719909667969, 0.03517129516601562, 0.03504537582397461, 0.03523993682861328, 0.035055583953857425, 0.03514780807495117, 0.03512115097045899, 0.035160064697265625, 0.03531161499023437, 0.03544220733642578, 0.03553327941894531, 0.035563518524169925, 0.034909759521484375, 0.035103168487548825, 0.03527065658569336, 0.035200191497802735, 0.035218238830566406, 0.03514108657836914, 0.035113502502441406, 0.03550207901000976, 0.03462144088745117, 0.03440639877319336, 0.034659488677978516, 0.03475129699707031, 0.034773025512695316, 0.03483852767944336, 0.03457583999633789, 0.03449494552612305, 0.03449043273925781, 0.03453952026367187, 0.03450435256958008, 0.03427363204956055, 0.03486537551879883, 0.033971134185791015, 0.03411145782470703, 0.03395651245117187, 0.0341662712097168, 0.03451337432861328, 0.03471811294555664, 0.03532783889770508, 0.03506380844116211, 0.03426303863525391, 0.03401520156860351, 0.03400668716430664, 0.03406476974487305, 0.03417683029174805, 0.034801856994628906, 0.03445916748046875, 0.034283103942871096, 0.0340447998046875, 0.03425238418579102, 0.03481209564208984, 0.03537468719482422, 0.035100318908691405, 0.03515203094482422, 0.03454035186767578, 0.034122913360595704, 0.034229057312011715, 0.0340992317199707, 0.03402547073364258, 0.03421120071411133, 0.035737823486328125, 0.035210784912109376, 0.035257217407226565, 0.03504127883911133, 0.03499008178710938, 0.03491132736206055, 0.0348675537109375, 0.03491897583007812, 0.03445897674560547, 0.03441651153564453, 0.03712080001831055, 0.035399681091308595, 0.03523583984375, 0.036782081604003904, 0.037109695434570315, 0.03582287979125977, 0.03554563140869141, 0.035506431579589846, 0.035389438629150394, 0.035415294647216794, 0.035413791656494144, 0.03547548675537109, 0.03545622253417969, 0.035366622924804685, 0.035364479064941404, 0.03538275146484375, 0.03532217788696289, 0.03549193572998047, 0.035594753265380856, 0.03574745559692383, 0.03625203323364258, 0.03552774429321289, 0.03558060836791992, 0.03555763244628906, 0.03495305633544922, 0.03484457778930664, 0.0346769905090332, 0.034590721130371094, 0.03502284622192383, 0.034991584777832034, 0.03473174285888672, 0.03564764785766601, 0.035060192108154295, 0.03518483352661133, 0.03528195190429687, 0.03533513641357422, 0.03527801513671875, 0.03480384063720703, 0.034746623992919924, 0.03530387115478516, 0.03487321472167969, 0.03476831817626953, 0.034695552825927733, 0.03476095962524414, 0.03472339248657227, 0.034872127532958985, 0.03513516616821289, 0.03524198532104492, 0.03591696166992187, 0.03546335983276367, 0.03511324691772461, 0.03474879837036133, 0.03508838272094727, 0.0347770881652832, 0.0350511360168457, 0.03505599975585937, 0.03571875381469727, 0.035061504364013674, 0.03500838470458984, 0.03453212738037109, 0.03480575942993164, 0.0349306869506836, 0.03484000015258789, 0.03533881759643555, 0.03509833526611328, 0.03489616012573242, 0.0349268798828125, 0.03509219360351563, 0.035198272705078124, 0.03533075332641602, 0.03537705612182617, 0.03622739028930664, 0.03563875198364258, 0.03515203094482422, 0.03519705581665039, 0.03530704116821289, 0.03549542236328125, 0.035326366424560544, 0.035379806518554685, 0.03522294235229492, 0.03531235122680664, 0.03549580764770508, 0.03590332794189453, 0.035435871124267576, 0.035369407653808596, 0.035387775421142575, 0.03563888168334961, 0.03552297592163086, 0.035501888275146484, 0.03539081573486328, 0.03534499359130859, 0.0353771858215332, 0.03547558212280273, 0.03528099060058594, 0.035393024444580076, 0.03529369735717774, 0.037386302947998044, 0.035485313415527346, 0.03571744155883789, 0.0357367057800293, 0.03525875091552735, 0.035153854370117185, 0.03526099014282227, 0.034840576171875, 0.0347147216796875, 0.03497804641723633, 0.0349697265625, 0.03470956802368164, 0.034603519439697264, 0.03449238586425781, 0.034506752014160154, 0.03481804656982422, 0.03508224105834961, 0.03503440093994141, 0.034878177642822264, 0.034979808807373045, 0.03486313629150391, 0.03493427276611328, 0.035059326171875, 0.035415935516357425, 0.03494051361083984, 0.03489440155029297, 0.035221790313720705, 0.03553091049194336, 0.035016254425048826, 0.0348205451965332, 0.03479904174804688, 0.034681407928466794, 0.03489382553100586, 0.03592182540893555, 0.0352789421081543, 0.03539948654174805, 0.03545721435546875, 0.035363998413085934, 0.0354920654296875, 0.03530307388305664, 0.0352655029296875, 0.03509360122680664, 0.035120033264160154, 0.03504451370239258, 0.03522022247314453, 0.03515135955810547, 0.035243648529052735, 0.03523273468017578, 0.03518668746948242, 0.035266559600830076, 0.034987648010253905, 0.03502726364135742, 0.03532396697998047, 0.03542809677124024, 0.035696384429931644, 0.03579248046875, 0.03556444931030273, 0.03528704071044922, 0.03539936065673828, 0.03528940963745117, 0.03530956649780274, 0.035356670379638674, 0.0354977912902832, 0.03547926330566406, 0.03548003387451172, 0.035350528717041016, 0.03551027297973633, 0.03543212890625, 0.035630943298339844, 0.035633121490478516, 0.035452735900878905, 0.035364639282226565, 0.03544566345214844, 0.03556265640258789, 0.03561510467529297, 0.0349920654296875, 0.03508892822265625, 0.034659713745117185, 0.034736766815185546, 0.03424415969848633, 0.03446623992919922, 0.03449856185913086, 0.03477459335327148, 0.0355615348815918, 0.03523827362060547, 0.03491779327392578, 0.03475545501708984, 0.03476070404052734, 0.03522355270385742, 0.0367646713256836, 0.03509891128540039, 0.03554377746582031, 0.034874366760253905, 0.0345241584777832, 0.03429580688476563, 0.034342910766601564, 0.0344567985534668, 0.0344699821472168, 0.03467129516601562, 0.03467193603515625, 0.034624191284179685, 0.03455385589599609, 0.034624671936035155, 0.034640609741210936, 0.03495686340332031, 0.03533062362670898, 0.03586969757080078, 0.03521638488769531, 0.03489177703857422, 0.034862529754638674, 0.03449033737182617, 0.034226463317871096, 0.03459427261352539, 0.034336990356445315, 0.03495337677001953, 0.03497622299194336, 0.03616358566284179, 0.035243358612060544, 0.0354699821472168, 0.0353546257019043, 0.03530752182006836, 0.03498393630981445, 0.03502489471435547, 0.03458819198608398, 0.03580096054077148, 0.0356192626953125, 0.035313121795654295, 0.036536510467529294, 0.035461631774902344, 0.03524784088134766, 0.035247936248779296, 0.03529331207275391, 0.03537516784667969, 0.03537539291381836, 0.0355491828918457, 0.035776512145996094, 0.03547750473022461, 0.03540316772460937, 0.0353364143371582, 0.03523417663574219, 0.035383296966552735, 0.03549967956542969, 0.03543689727783203, 0.03537209701538086, 0.03613792037963867, 0.03610201644897461, 0.0354837760925293, 0.035532768249511716, 0.03546268844604492, 0.035348480224609374, 0.03739852905273437, 0.03537919998168945, 0.035896320343017575, 0.03534038543701172, 0.03503811264038086, 0.035034175872802734, 0.03514054489135742, 0.035964641571044925, 0.03623555374145508, 0.03521852874755859, 0.035772865295410156, 0.034969470977783204, 0.035022815704345706, 0.035076736450195316, 0.03474771118164063, 0.035490497589111325, 0.034887680053710936, 0.03568435287475586, 0.03553843307495117, 0.03615212631225586, 0.035347999572753905, 0.03517987060546875, 0.03491904067993164, 0.035001953125, 0.03480806350708008, 0.03579939270019531, 0.034927806854248046, 0.03495139312744141, 0.034834144592285156, 0.03512368011474609, 0.034861473083496096, 0.03504467010498047, 0.034966209411621096, 0.035074047088623043, 0.03502284622192383, 0.03560857772827149, 0.035279006958007814, 0.03615727996826172, 0.03494297790527344, 0.03499980926513672, 0.034943359375, 0.03521484756469727, 0.035172065734863284, 0.03604777526855469, 0.03783712005615234, 0.03567728042602539, 0.035101280212402344, 0.03517747116088867, 0.03513651275634765, 0.035272705078125, 0.03517011260986328, 0.03841574478149414, 0.035221790313720705, 0.036342304229736326, 0.03538307189941406, 0.035786975860595704, 0.03555942535400391, 0.03530947113037109, 0.035375198364257815, 0.035438591003417966, 0.03531980895996094, 0.03537100982666016, 0.03551855850219727, 0.035337791442871094, 0.03531955337524414, 0.035340545654296875, 0.03538985443115234, 0.035019935607910155, 0.03544460678100586, 0.034786270141601565, 0.03489980697631836, 0.03477065658569336, 0.03491680145263672, 0.034796897888183596, 0.034934944152832034, 0.034926975250244144, 0.03606560134887695, 0.034899776458740234, 0.0346943359375, 0.03472409439086914, 0.03470937728881836, 0.03452384185791016, 0.03446783828735352, 0.03472150421142578, 0.0349268798828125, 0.035143680572509765, 0.03602022552490235, 0.035323070526123046, 0.03536729431152344, 0.03506016159057617, 0.03491561508178711, 0.034767742156982424, 0.03481379318237305, 0.03486492919921875, 0.034930912017822266, 0.034842079162597656, 0.03534646224975586, 0.035000831604003906, 0.03500425720214844, 0.03494879913330078, 0.03503766250610352, 0.03518409729003906, 0.034957855224609376, 0.034961406707763674, 0.03590550231933594, 0.03496758270263672, 0.03478694534301758, 0.03472592163085937, 0.03471193695068359, 0.03536816024780273, 0.034773696899414064, 0.03620678329467773, 0.03587392044067383, 0.03559436798095703, 0.03541862487792969, 0.035883041381835935, 0.03528409576416015, 0.0352402572631836, 0.03512960052490234, 0.03550249481201172, 0.035054977416992185, 0.03525081634521485, 0.03535257720947266, 0.03579289627075195, 0.03538460922241211]",tokens/s,28.477777390356394,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1183.674368,8106.47552,0.0,7711.227904,7603.953664,s,1,18.6817109375,18.6817109375,0.0,18.6817109375,18.6817109375,18.6817109375,18.6817109375,[18.6817109375],,kWh,0.00033669053976249566,3.7132231749903233e-05,0.00012545315591799833,0.0004992759274303972,,MB,1253.863424,9870.180352,0.0,9462.349824,8756.635648,s,10,16.76980871582031,1.6769808715820311,0.007806340836157971,1.6786487426757812,1.6843089477539064,1.6844797302246093,1.6846163562011718,"[1.657221923828125, 1.6715146484375, 1.6737459716796874, 1.6762406005859376, 1.681699951171875, 1.6785615234375, 1.68427099609375, 1.6787359619140625, 1.6846505126953124, 1.6831666259765625]",tokens/s,152.65528923922346,kWh,4.876559852499895e-05,5.377804635790192e-06,3.2354359216800466e-05,8.64977623775896e-05,tokens/kWh,2959614.1329353754,MB,1275.916288,9870.180352,0.0,9462.349824,8756.638208,s,10,85.37864160156249,8.53786416015625,0.02907519521229494,8.53781201171875,8.56888759765625,8.574248486328125,8.578537197265625,"[8.4894189453125, 8.5044462890625, 8.509966796875, 8.5240771484375, 8.54167578125, 8.5339482421875, 8.56262109375, 8.5676962890625, 8.565181640625, 8.579609375]",tokens/s,7.378894629643188,kWh,0.0002507171967387508,2.7656671500138203e-05,0.0001667083000331998,0.00044508216827208894,tokens/kWh,141546.89738431995,,s,630,85.37453198242181,0.13551513013082836,0.0017829660825156956,0.13535097503662108,0.1369326644897461,0.13733006439208986,0.14550091888427735,"[0.14503750610351562, 0.1334913330078125, 0.13315423583984376, 0.13340736389160157, 0.13329714965820313, 0.13323365783691407, 0.13481744384765626, 0.1373098907470703, 0.13452137756347657, 0.13359321594238283, 0.13402316284179688, 0.1335377960205078, 0.13362995910644532, 0.1343098907470703, 0.13658262634277343, 0.13452546691894532, 0.1343544921875, 0.13419155883789063, 0.13361497497558594, 0.13364083862304688, 0.13375709533691407, 0.136114013671875, 0.1344893798828125, 0.13402799987792968, 0.13494595336914061, 0.1335386505126953, 0.13431298828125, 0.13384332275390626, 0.13475202941894532, 0.1352056884765625, 0.13469255065917968, 0.13409837341308595, 0.13504396057128906, 0.13362789916992188, 0.1340968933105469, 0.13449830627441406, 0.13580224609375, 0.1355679931640625, 0.13494886779785156, 0.13395968627929689, 0.13430400085449218, 0.13443193054199218, 0.13516473388671876, 0.1347574005126953, 0.13545053100585938, 0.1344514617919922, 0.13368173217773438, 0.13453926086425783, 0.1356366424560547, 0.13461538696289063, 0.13555917358398437, 0.13463282775878907, 0.13620903015136718, 0.13417266845703124, 0.13549977111816405, 0.1350963134765625, 0.134944580078125, 0.13505964660644532, 0.13539564514160157, 0.13575538635253906, 0.1352458953857422, 0.13436671447753906, 0.13441424560546875, 0.14488575744628907, 0.13253984069824218, 0.13325164794921876, 0.1340702667236328, 0.13340467834472655, 0.13323622131347657, 0.13425100708007812, 0.13889564514160158, 0.134712646484375, 0.1334024963378906, 0.1339449005126953, 0.1336658172607422, 0.1336524200439453, 0.13396995544433593, 0.1373159942626953, 0.1350575714111328, 0.13430764770507814, 0.13389596557617187, 0.13401350402832032, 0.13368896484375, 0.1335422668457031, 0.13611007690429688, 0.13640908813476563, 0.13427285766601563, 0.13401884460449218, 0.13508872985839843, 0.13504620361328126, 0.13383456420898437, 0.13468150329589842, 0.13643775939941405, 0.1355018310546875, 0.13456793212890625, 0.13526425170898437, 0.13365440368652343, 0.1347624969482422, 0.13535606384277343, 0.13669424438476563, 0.13499122619628906, 0.1353877716064453, 0.1350200958251953, 0.1350528564453125, 0.13455859375, 0.13430374145507812, 0.13581517028808593, 0.13485874938964842, 0.1353502655029297, 0.1351127014160156, 0.13484831237792969, 0.13406553649902345, 0.13567674255371093, 0.13539251708984376, 0.1359797821044922, 0.1345966033935547, 0.13577830505371094, 0.13502204895019532, 0.1343877716064453, 0.13564915466308594, 0.13477743530273437, 0.1358561248779297, 0.13440614318847657, 0.13477389526367187, 0.13546524047851563, 0.1355078125, 0.145870849609375, 0.13322239685058593, 0.13324070739746094, 0.13331263732910156, 0.13322650146484374, 0.13332275390625, 0.13514956665039063, 0.13808396911621093, 0.13474234008789063, 0.1348811492919922, 0.1334417266845703, 0.13365843200683594, 0.13427558898925782, 0.13500384521484374, 0.13699468994140626, 0.13470118713378906, 0.13458822631835937, 0.13401231384277343, 0.1332825927734375, 0.13429756164550782, 0.1338880310058594, 0.13574758911132812, 0.1359418487548828, 0.13554495239257813, 0.13464796447753907, 0.133998046875, 0.1344027862548828, 0.133899169921875, 0.13530767822265624, 0.1354874267578125, 0.13577069091796876, 0.13495706176757813, 0.13450650024414063, 0.13514051818847655, 0.13405270385742188, 0.13530316162109374, 0.13647021484375, 0.1350863952636719, 0.13499801635742187, 0.13406617736816406, 0.13445712280273436, 0.1345364532470703, 0.13512188720703125, 0.1357864990234375, 0.13500355529785157, 0.1350469512939453, 0.13492921447753906, 0.13460479736328124, 0.1352948760986328, 0.13447177124023438, 0.13629849243164063, 0.1360341796875, 0.13469503784179687, 0.13681990051269532, 0.13467936706542968, 0.13575733947753907, 0.1345970916748047, 0.13604249572753907, 0.1353502655029297, 0.13544972229003907, 0.1349866180419922, 0.13528268432617188, 0.1358233642578125, 0.1459324493408203, 0.13329661560058595, 0.13322650146484374, 0.13314387512207032, 0.13316365051269533, 0.1335665283203125, 0.13628005981445312, 0.13920637512207032, 0.13460304260253905, 0.1342270965576172, 0.13344650268554686, 0.1332791748046875, 0.13422239685058593, 0.13487872314453125, 0.13769520568847657, 0.13548597717285157, 0.13462937927246094, 0.13324208068847657, 0.1344438018798828, 0.1339842529296875, 0.1348239288330078, 0.13693084716796874, 0.1371345672607422, 0.13525814819335938, 0.13387977600097656, 0.13432797241210936, 0.13362985229492189, 0.13380038452148438, 0.1358473663330078, 0.13662879943847656, 0.13572096252441407, 0.1353236541748047, 0.1346007080078125, 0.1339330596923828, 0.13425654602050782, 0.13563818359375, 0.1359014434814453, 0.13708912658691405, 0.13549411010742188, 0.13508221435546874, 0.13429318237304688, 0.1345410919189453, 0.1359302978515625, 0.13624876403808595, 0.13499040222167968, 0.135888671875, 0.13586044311523438, 0.13530490112304688, 0.13391065979003905, 0.13595440673828124, 0.13593212890625, 0.13600358581542968, 0.13556687927246094, 0.13495578002929687, 0.13535308837890625, 0.13442726135253907, 0.13619580078125, 0.13503961181640625, 0.13604454040527345, 0.13646553039550782, 0.13585087585449218, 0.1356216278076172, 0.13602706909179688, 0.1452974395751953, 0.13383775329589845, 0.13357466125488282, 0.13369139099121094, 0.13388121032714845, 0.13359907531738283, 0.13654095458984375, 0.1398190155029297, 0.13443685913085937, 0.13391871643066405, 0.13370994567871095, 0.13338201904296876, 0.1333634490966797, 0.13611958312988282, 0.13869052124023437, 0.13641612243652343, 0.1351619873046875, 0.13416435241699218, 0.1345108184814453, 0.13352540588378906, 0.1342319030761719, 0.1381643829345703, 0.13608982849121093, 0.13523741149902344, 0.13490322875976563, 0.13481837463378907, 0.13437132263183593, 0.13480140686035155, 0.13680230712890626, 0.1365166778564453, 0.1360987548828125, 0.13479437255859375, 0.13604135131835937, 0.1338040313720703, 0.13411705017089845, 0.13706681823730468, 0.13585594177246094, 0.13595242309570313, 0.13627769470214843, 0.13578073120117187, 0.13446102905273438, 0.13472982788085938, 0.1360993347167969, 0.13635401916503906, 0.13623773193359376, 0.13641731262207032, 0.1369490203857422, 0.13575241088867188, 0.13432217407226563, 0.13574925231933593, 0.13638217163085936, 0.13664527893066405, 0.13620223999023437, 0.1355091552734375, 0.1345211486816406, 0.1363113250732422, 0.1346068420410156, 0.13503427124023437, 0.1351399383544922, 0.13730816650390626, 0.1362960968017578, 0.13546678161621092, 0.1354040985107422, 0.14780621337890626, 0.13320838928222656, 0.13316432189941407, 0.13317161560058594, 0.133212158203125, 0.13333241271972657, 0.13649772644042968, 0.13985980224609376, 0.13513743591308594, 0.13435699462890624, 0.13371186828613282, 0.1334640655517578, 0.1334878387451172, 0.13588493347167968, 0.1373231658935547, 0.13557501220703125, 0.13497708129882813, 0.13426502990722655, 0.13324688720703126, 0.13393577575683593, 0.1358031005859375, 0.13588070678710937, 0.1371279296875, 0.13555302429199217, 0.13468812561035157, 0.13397782897949218, 0.13536349487304689, 0.13538304138183593, 0.13604591369628907, 0.13689248657226563, 0.13678236389160156, 0.13455162048339844, 0.13492544555664063, 0.1339646759033203, 0.13538713073730468, 0.13589442443847657, 0.1361634521484375, 0.13719920349121092, 0.13508082580566405, 0.13513113403320312, 0.13438473510742188, 0.13481053161621093, 0.13586534118652344, 0.13508819580078124, 0.13612742614746093, 0.13577830505371094, 0.13442658996582033, 0.13456591796875, 0.13475401306152343, 0.13650767517089843, 0.13672242736816406, 0.13512908935546875, 0.1371893768310547, 0.1351334686279297, 0.13545443725585937, 0.13485865783691406, 0.13498774719238282, 0.13648696899414062, 0.13594834899902344, 0.1353191680908203, 0.13643405151367188, 0.13543177795410155, 0.13473423767089843, 0.14608828735351562, 0.1337523193359375, 0.1340894775390625, 0.13474319458007813, 0.13424234008789063, 0.1340155487060547, 0.1370951690673828, 0.13830450439453126, 0.13548646545410156, 0.13473178100585936, 0.13463763427734374, 0.13404736328125, 0.13431634521484376, 0.1371249542236328, 0.13918505859375, 0.1361297607421875, 0.13467884826660156, 0.135004638671875, 0.13536869812011718, 0.13458009338378907, 0.13550563049316405, 0.13770515441894532, 0.13660643005371093, 0.13503231811523436, 0.135531005859375, 0.13522262573242189, 0.13402793884277345, 0.13648895263671876, 0.13669187927246093, 0.1369124755859375, 0.135180419921875, 0.1355076141357422, 0.1353955841064453, 0.13501817321777343, 0.1361864013671875, 0.13557942199707032, 0.13668783569335938, 0.1360747528076172, 0.13549618530273438, 0.13612031555175783, 0.13466447448730468, 0.1353516845703125, 0.1362660827636719, 0.13597894287109374, 0.13594834899902344, 0.13562617492675783, 0.13677215576171875, 0.13488931274414062, 0.1359687042236328, 0.13596627807617187, 0.13632374572753905, 0.13536627197265624, 0.13524755859375, 0.1369647979736328, 0.1362821044921875, 0.1362530517578125, 0.13645558166503907, 0.13577279663085937, 0.1361862030029297, 0.1370091552734375, 0.13520889282226561, 0.13632313537597657, 0.1368046112060547, 0.1455840301513672, 0.1336012725830078, 0.13409280395507814, 0.13309336853027343, 0.13383648681640625, 0.13548985290527343, 0.13782591247558593, 0.13838128662109375, 0.13501072692871094, 0.13514483642578126, 0.13361602783203125, 0.13509039306640624, 0.1341881866455078, 0.1371300506591797, 0.1392893829345703, 0.13580650329589844, 0.1352073974609375, 0.13523269653320313, 0.13443267822265625, 0.13363189697265626, 0.13652479553222657, 0.13731021118164063, 0.1376922607421875, 0.13528883361816407, 0.13543721008300783, 0.1345840301513672, 0.13496726989746094, 0.13634959411621095, 0.13623741149902344, 0.13667129516601562, 0.13594009399414062, 0.134619140625, 0.13478501892089845, 0.13531706237792968, 0.13591127014160156, 0.13635382080078126, 0.13691542053222655, 0.13547938537597656, 0.13529405212402343, 0.1360086669921875, 0.13586119079589845, 0.13474620056152345, 0.13733570861816408, 0.13639488220214843, 0.13710322570800781, 0.13610598754882813, 0.135546875, 0.13673193359375, 0.13557411193847657, 0.1362248992919922, 0.13619200134277343, 0.13749261474609376, 0.1364253387451172, 0.13604249572753907, 0.1358028869628906, 0.13529866027832033, 0.13682730102539062, 0.13697433471679688, 0.13724172973632812, 0.1361580810546875, 0.1359862060546875, 0.13632406616210938, 0.1354764404296875, 0.145797119140625, 0.13538099670410156, 0.13376307678222657, 0.13361561584472656, 0.13489884948730468, 0.13364720153808593, 0.13726934814453126, 0.13888050842285157, 0.13574700927734376, 0.13483721923828124, 0.13424339294433593, 0.13479417419433593, 0.13563285827636717, 0.13633251953125, 0.13727008056640624, 0.1366585235595703, 0.1357619171142578, 0.13385093688964844, 0.13528643798828124, 0.13527340698242188, 0.13575814819335938, 0.13584317016601563, 0.13656857299804687, 0.13531607055664063, 0.1358356475830078, 0.13492617797851564, 0.1351273651123047, 0.13570236206054687, 0.1363066864013672, 0.13705421447753907, 0.13637632751464843, 0.13619319152832032, 0.13608636474609376, 0.13488517761230467, 0.13519891357421876, 0.1350672607421875, 0.136255615234375, 0.13580227661132813, 0.13620903015136718, 0.13567202758789063, 0.13520828247070313, 0.13586051940917968, 0.1360388488769531, 0.136406982421875, 0.13674085998535157, 0.13538453674316406, 0.13651766967773438, 0.13479974365234376, 0.1364026184082031, 0.13526060485839844, 0.13714019775390626, 0.13637420654296875, 0.1359228515625, 0.1363527069091797, 0.1353564147949219, 0.13555302429199217, 0.13667123413085938, 0.13603021240234375, 0.13699026489257812, 0.13567840576171875, 0.1362124786376953, 0.13600563049316405, 0.1367403564453125, 0.14699113464355468, 0.13387472534179687, 0.13391357421875, 0.13472767639160158, 0.1345425262451172, 0.13478175354003907, 0.13681869506835936, 0.13851852416992189, 0.13541171264648438, 0.13432421875, 0.13475978088378907, 0.13447251892089843, 0.13559178161621094, 0.1364357452392578, 0.13861474609375, 0.13665267944335938, 0.135243896484375, 0.13519155883789064, 0.13405206298828126, 0.13541030883789062, 0.13636778259277343, 0.13727798461914062, 0.13643568420410157, 0.13672857666015625, 0.13542588806152345, 0.13488758850097657, 0.13487103271484374, 0.13561155700683594, 0.1368603515625, 0.13672486877441406, 0.1361784973144531, 0.13545916748046874, 0.13529356384277344, 0.13487309265136718, 0.13665823364257812, 0.13656544494628906, 0.13702554321289062, 0.13704937744140624, 0.13605142211914062, 0.13554278564453126, 0.13460887145996095, 0.13723651123046876, 0.1362879638671875, 0.1377421417236328, 0.1365768280029297, 0.13636265563964844, 0.13587455749511718, 0.13572402954101562, 0.13536358642578125, 0.1364910125732422, 0.13646847534179687, 0.13640106201171875, 0.1365215606689453, 0.13566278076171875, 0.13716966247558593, 0.13668768310546875, 0.13660563659667968, 0.13590739440917968, 0.1374720001220703, 0.13620428466796874, 0.13658067321777342, 0.1365852508544922, 0.13644023132324218]",tokens/s,7.379249822765804,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 615, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 160, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 282, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 296, in _quantize w = self.data.contiguous().to(device) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 615, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 160, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 282, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1173, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1150.042112,13202.55488,0.0,12807.307264,12661.927936,s,1,26.22058203125,26.22058203125,0.0,26.22058203125,26.22058203125,26.22058203125,26.22058203125,[26.22058203125],,kWh,0.0005574009664708247,6.147828936480916e-05,0.00020758127717600272,0.0008264605330116366,,MB,1272.885248,15727.525888,0.0,15319.69536,14320.027648,s,10,30.43433520507812,3.0434335205078122,0.007328731761490402,3.0446331787109377,3.051364599609375,3.053053637695313,3.0544048681640628,"[3.02811279296875, 3.0337109375, 3.041738525390625, 3.043341796875, 3.04512890625, 3.0509892578125, 3.0469658203125, 3.044137451171875, 3.045467041015625, 3.05474267578125]",tokens/s,84.11552224649385,kWh,8.847248944041761e-05,9.758437180565623e-06,5.87711859058003e-05,0.00015700211252678354,tokens/kWh,1630551.3083865547,MB,1295.986688,15727.525888,0.0,15319.69536,14320.030208,s,10,145.58787109375,14.558787109375,0.0227658984668569,14.569966308593749,14.579945214843749,14.580775341796874,14.581439443359375,"[14.5112666015625, 14.5271318359375, 14.542541015625, 14.5587841796875, 14.5797607421875, 14.56684765625, 14.57310546875, 14.5737431640625, 14.5730849609375, 14.58160546875]",tokens/s,4.327283552311285,kWh,0.000425607064507916,4.6947708711063024e-05,0.0002830553097773997,0.0007556100829963785,tokens/kWh,83376.33578177377,,s,630,145.58305897521964,0.23108422059558686,0.002321878640323582,0.23113728332519531,0.23276569824218749,0.2345638153076172,0.2406544161987305,"[0.23835647583007813, 0.22681805419921874, 0.22691226196289063, 0.2283701171875, 0.23533395385742187, 0.22786253356933595, 0.228238525390625, 0.2280592041015625, 0.23383724975585937, 0.22964346313476564, 0.2274272003173828, 0.22796624755859374, 0.23188954162597655, 0.2315145263671875, 0.22883705139160157, 0.22799360656738282, 0.23043600463867187, 0.2315552978515625, 0.22978009033203126, 0.22779705810546874, 0.2293472900390625, 0.23161170959472657, 0.22977603149414064, 0.2288714599609375, 0.22848899841308593, 0.23233836364746094, 0.23008566284179688, 0.22982736206054688, 0.22842387390136717, 0.2310840301513672, 0.23034060668945314, 0.23051222229003906, 0.23003996276855468, 0.23001097106933593, 0.230152099609375, 0.23047775268554688, 0.22883743286132813, 0.22996163940429687, 0.2318726043701172, 0.2311658935546875, 0.2301805419921875, 0.22902003479003907, 0.23188803100585936, 0.23133680725097655, 0.23044709777832031, 0.22952960205078124, 0.23002316284179689, 0.23109207153320313, 0.23068893432617188, 0.23061923217773436, 0.23042652893066407, 0.23081983947753906, 0.23059455871582032, 0.2311700439453125, 0.22997555541992187, 0.2327039031982422, 0.23061325073242187, 0.2315813751220703, 0.23038018798828125, 0.23189830017089844, 0.23119955444335938, 0.2314895324707031, 0.23127352905273438, 0.24072761535644532, 0.22724415588378907, 0.22727130126953124, 0.22921420288085936, 0.2344837188720703, 0.22802841186523437, 0.22699005126953126, 0.22786051940917967, 0.2346762237548828, 0.22905209350585937, 0.22809432983398437, 0.2280998077392578, 0.2327410888671875, 0.23019699096679688, 0.2286261444091797, 0.22834629821777344, 0.23072163391113282, 0.23181033325195313, 0.22893344116210937, 0.22952438354492188, 0.22937744140625, 0.23119932556152345, 0.230582275390625, 0.2286510009765625, 0.22834803771972656, 0.2320623321533203, 0.23174195861816407, 0.22911517333984374, 0.22776089477539063, 0.23132566833496093, 0.23087513732910156, 0.23090585327148438, 0.22885171508789062, 0.2310360565185547, 0.23094537353515626, 0.23201405334472655, 0.22943133544921876, 0.22956646728515626, 0.23067237854003905, 0.2320762939453125, 0.23006105041503908, 0.23008869934082032, 0.23062442016601561, 0.23252806091308595, 0.22978778076171874, 0.23117446899414062, 0.23062550354003905, 0.23214285278320312, 0.231189697265625, 0.2314714813232422, 0.2318995819091797, 0.2301317138671875, 0.2313768310546875, 0.23168415832519532, 0.2309979248046875, 0.2303468475341797, 0.23194784545898436, 0.23193394470214843, 0.23069686889648439, 0.23109686279296876, 0.2314176025390625, 0.23261415100097657, 0.23161351013183593, 0.2402891845703125, 0.2272303009033203, 0.2266926727294922, 0.23014854431152343, 0.2351820831298828, 0.22882009887695312, 0.22699224853515626, 0.2292446746826172, 0.2346293487548828, 0.22941282653808595, 0.2284610595703125, 0.2281793212890625, 0.2329054718017578, 0.2312572784423828, 0.2282239990234375, 0.22829055786132812, 0.23113523864746094, 0.23235894775390625, 0.22947724914550782, 0.22758761596679689, 0.23072006225585937, 0.23408140563964844, 0.23163958740234375, 0.22788540649414063, 0.2301063690185547, 0.23228492736816406, 0.2299099578857422, 0.22972451782226563, 0.22952572631835938, 0.23166157531738282, 0.23132070922851564, 0.2302320251464844, 0.22983917236328125, 0.2311603240966797, 0.23226786804199218, 0.23000210571289062, 0.23015890502929687, 0.2305834503173828, 0.23212646484375, 0.2303714599609375, 0.2305780487060547, 0.23006495666503907, 0.23231082153320312, 0.23118194580078125, 0.23163273620605468, 0.2303350067138672, 0.23126010131835936, 0.2312454376220703, 0.23151046752929688, 0.23053517150878905, 0.23174143981933593, 0.23089497375488283, 0.23117999267578124, 0.23116278076171876, 0.23116595458984374, 0.23181925964355468, 0.2312806396484375, 0.2321017303466797, 0.23123779296875, 0.2318602294921875, 0.23204454040527345, 0.23186431884765624, 0.2309345245361328, 0.23917977905273438, 0.22695677185058594, 0.22765171813964843, 0.22981581115722657, 0.2359817352294922, 0.22758195495605468, 0.2278943634033203, 0.22875155639648437, 0.23475273132324218, 0.22963340759277343, 0.22814373779296876, 0.22847897338867187, 0.23596237182617188, 0.2320299530029297, 0.2285810546875, 0.229042236328125, 0.23199385070800782, 0.23244744873046874, 0.2285019226074219, 0.22831936645507814, 0.23069900512695313, 0.23281869506835937, 0.23071273803710937, 0.22923234558105468, 0.23047666931152344, 0.2316104278564453, 0.23155091857910157, 0.22983065795898439, 0.2308590087890625, 0.23196237182617188, 0.231878662109375, 0.229718017578125, 0.23108607482910157, 0.23095295715332032, 0.23377920532226562, 0.2294847412109375, 0.2306414794921875, 0.22984037780761718, 0.23274044799804688, 0.2311519012451172, 0.23209024047851562, 0.2296584014892578, 0.23304214477539062, 0.23101408386230468, 0.2315676727294922, 0.2302423095703125, 0.23238041687011718, 0.23198707580566405, 0.23102886962890626, 0.2300185546875, 0.23139173889160156, 0.23266099548339844, 0.23224070739746094, 0.23099209594726564, 0.2305631103515625, 0.23196505737304687, 0.23195091247558594, 0.23223526000976563, 0.23117593383789062, 0.2316996154785156, 0.23258198547363282, 0.23231797790527345, 0.23074327087402344, 0.24260231018066405, 0.2279239044189453, 0.22738966369628907, 0.22891523742675782, 0.23686968994140625, 0.228669189453125, 0.22704742431640626, 0.22878207397460937, 0.23506246948242188, 0.23016099548339844, 0.22863690185546875, 0.22853176879882814, 0.2520203857421875, 0.22839736938476562, 0.2279838409423828, 0.22824140930175782, 0.235610107421875, 0.23063548278808593, 0.2279956817626953, 0.22952960205078124, 0.23299842834472656, 0.23343356323242187, 0.22948809814453125, 0.228315673828125, 0.23106739807128907, 0.23308248901367187, 0.23042317199707033, 0.22841958618164063, 0.2298440704345703, 0.23230557250976563, 0.23159193420410157, 0.22995907592773437, 0.23029525756835936, 0.23235775756835939, 0.23270297241210938, 0.23148115539550781, 0.22853036499023438, 0.2310102996826172, 0.23472848510742186, 0.23138198852539063, 0.2296351318359375, 0.23110304260253905, 0.23397824096679687, 0.23166371154785156, 0.2300654754638672, 0.2309412841796875, 0.23317503356933594, 0.23185350036621094, 0.23032890319824217, 0.2305146942138672, 0.23193299865722655, 0.23127955627441407, 0.23154893493652343, 0.23017062377929687, 0.23209368896484375, 0.2327941131591797, 0.2320343017578125, 0.23174552917480468, 0.2314915771484375, 0.23233737182617187, 0.23216336059570314, 0.2316247100830078, 0.230411865234375, 0.24189669799804686, 0.2276666564941406, 0.22706816101074218, 0.22999760437011718, 0.2378027801513672, 0.2287953338623047, 0.2277538604736328, 0.2277425994873047, 0.23395738220214843, 0.23059251403808595, 0.2288353271484375, 0.22753805541992186, 0.23353616333007812, 0.23206256103515624, 0.22814076232910158, 0.22905743408203125, 0.23218995666503905, 0.23307264709472655, 0.23025013732910157, 0.229431640625, 0.23041023254394533, 0.23225138854980468, 0.23076377868652342, 0.22966160583496092, 0.2302278137207031, 0.23178445434570313, 0.23142323303222656, 0.23101504516601562, 0.23027235412597657, 0.2323948516845703, 0.23126416015625, 0.23066294860839845, 0.2297318115234375, 0.2320389404296875, 0.23205223083496093, 0.23097190856933594, 0.2301255645751953, 0.23092576599121092, 0.23207379150390625, 0.23197039794921875, 0.23092189025878906, 0.23119468688964845, 0.23222108459472657, 0.23083445739746095, 0.23018290710449218, 0.23117369079589845, 0.23164710998535157, 0.23220895385742188, 0.23132774353027344, 0.23069631958007814, 0.23220223999023437, 0.23308761596679686, 0.23153990173339845, 0.23136253356933595, 0.23234442138671876, 0.23235340881347658, 0.2316537628173828, 0.2305986633300781, 0.2311393280029297, 0.23211538696289064, 0.23184466552734376, 0.23144242858886718, 0.2328783416748047, 0.24208816528320312, 0.2270812530517578, 0.22740652465820313, 0.23039414978027345, 0.2371604461669922, 0.22813081359863283, 0.22680720520019532, 0.2290694122314453, 0.2351513671875, 0.23098162841796874, 0.22872679138183594, 0.22834141540527345, 0.23496124267578125, 0.23155247497558593, 0.22857171630859374, 0.22814512634277342, 0.23276339721679687, 0.23289447021484375, 0.22927667236328125, 0.22758892822265625, 0.23168223571777344, 0.2330166778564453, 0.231295654296875, 0.228853759765625, 0.23089663696289062, 0.2327163848876953, 0.2311771240234375, 0.23053517150878905, 0.23043072509765625, 0.23140956115722655, 0.23178250122070312, 0.23098355102539062, 0.22995484924316406, 0.23191180419921875, 0.2323563232421875, 0.23156880187988282, 0.23006694030761718, 0.231272216796875, 0.2319459228515625, 0.23123983764648437, 0.23023799133300782, 0.23155567932128907, 0.23136151123046875, 0.23105209350585937, 0.23086904907226563, 0.23168109130859374, 0.2320572509765625, 0.2326492462158203, 0.23074153137207032, 0.23278640747070312, 0.23191168212890625, 0.23217277526855468, 0.23090205383300783, 0.23240019226074218, 0.23139833068847657, 0.23203634643554688, 0.23110450744628908, 0.23226771545410158, 0.23191903686523438, 0.23274124145507813, 0.23236019897460938, 0.23208551025390625, 0.2321667785644531, 0.24207699584960937, 0.22715872192382813, 0.2275081329345703, 0.22964437866210938, 0.2374266815185547, 0.22879026794433593, 0.22808515930175782, 0.23059059143066407, 0.2354204864501953, 0.2306096649169922, 0.22705836486816405, 0.22832150268554688, 0.23337164306640626, 0.23184384155273438, 0.22901475524902343, 0.22871270751953124, 0.23194473266601562, 0.2326650848388672, 0.2299658203125, 0.22854981994628906, 0.23159628295898438, 0.232884033203125, 0.23078579711914063, 0.22942515563964844, 0.23056973266601563, 0.23205914306640624, 0.2313318328857422, 0.22966061401367188, 0.23109552001953124, 0.2320611572265625, 0.23294451904296876, 0.23039974975585936, 0.22974038696289062, 0.23187472534179687, 0.23260365295410157, 0.2311265869140625, 0.22987767028808595, 0.2312352294921875, 0.23187692260742188, 0.23252024841308594, 0.23005699157714843, 0.23168234252929687, 0.23155296325683594, 0.23232293701171874, 0.23193075561523438, 0.2306334686279297, 0.2316940155029297, 0.2321595458984375, 0.23238627624511718, 0.23157994079589844, 0.23101222229003907, 0.23256600952148437, 0.23174029541015626, 0.23175782775878906, 0.23198025512695314, 0.23242547607421876, 0.23129945373535157, 0.23195059204101562, 0.23054762268066406, 0.23198310852050782, 0.23230998229980468, 0.2316295928955078, 0.2315898895263672, 0.24180738830566406, 0.22821888732910156, 0.22740777587890626, 0.22937770080566405, 0.23642544555664063, 0.22785638427734375, 0.22783818054199217, 0.22875135803222657, 0.23528652954101562, 0.2306759033203125, 0.22748150634765624, 0.22956112670898438, 0.23339990234375, 0.23139701843261717, 0.22971455383300782, 0.22833561706542968, 0.23189414978027345, 0.23156211853027345, 0.23042839050292968, 0.22777066040039062, 0.2309099578857422, 0.23235296630859376, 0.2310828094482422, 0.2294325408935547, 0.22992127990722655, 0.23389126586914064, 0.23158041381835937, 0.23022589111328126, 0.2302313537597656, 0.23354861450195313, 0.23140762329101563, 0.23023207092285156, 0.22970162963867188, 0.23234080505371094, 0.23223365783691408, 0.2310511932373047, 0.2308602294921875, 0.2318745880126953, 0.23238505554199218, 0.23134214782714843, 0.23003546142578124, 0.23168988037109375, 0.23195068359375, 0.2312539825439453, 0.2313338623046875, 0.23188636779785157, 0.2314686737060547, 0.23235472106933594, 0.23081369018554687, 0.23150784301757812, 0.23238368225097655, 0.23192649841308594, 0.23135459899902344, 0.23111807250976563, 0.23249411010742188, 0.2323289337158203, 0.2340240936279297, 0.23078988647460938, 0.23147120666503906, 0.23222610473632813, 0.23350093078613282, 0.23130361938476562, 0.23159504699707031, 0.24047520446777343, 0.2286473846435547, 0.22908767700195312, 0.2304389190673828, 0.23505101013183594, 0.22877993774414063, 0.2290134735107422, 0.23027850341796874, 0.23481832885742188, 0.23052450561523438, 0.22929830932617187, 0.2296746826171875, 0.23238902282714843, 0.23136592102050782, 0.2299155578613281, 0.22973440551757812, 0.2317884521484375, 0.23160432434082032, 0.23074610900878906, 0.230076416015625, 0.23059872436523438, 0.23225331115722656, 0.23163909912109376, 0.23001496887207032, 0.23086285400390624, 0.2309591064453125, 0.23105946350097656, 0.23099714660644532, 0.23141667175292968, 0.2306682891845703, 0.23140762329101563, 0.23135845947265626, 0.23256678771972655, 0.23079936218261718, 0.23238645935058594, 0.2306817626953125, 0.23233836364746094, 0.23072877502441405, 0.2319102325439453, 0.23057008361816406, 0.2316798095703125, 0.23185427856445312, 0.2319237060546875, 0.231468994140625, 0.23165373229980468, 0.2326580810546875, 0.23182188415527344, 0.23184165954589844, 0.23034275817871094, 0.23288426208496094, 0.23213658142089844, 0.23178866577148438, 0.2305576934814453, 0.23162384033203126, 0.23213104248046876, 0.23175308227539063, 0.231404541015625, 0.23193394470214843, 0.2330864715576172, 0.231938232421875, 0.2313691864013672, 0.2321017303466797, 0.23223091125488282]",tokens/s,4.327426586820344,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,810.307584,4683.923456,0.0,4288.67584,4213.842432,s,1,13.3825947265625,13.3825947265625,0.0,13.3825947265625,13.3825947265625,13.3825947265625,13.3825947265625,[13.3825947265625],,kWh,0.00018314163358749863,2.0194795041710082e-05,5.9438658661999924e-05,0.00026277508729120863,,MB,1203.580928,5107.54816,0.0,4699.717632,4535.245312,s,10,8.61562255859375,0.861562255859375,0.008665497944930763,0.8641178894042969,0.8676427673339844,0.8691550628662109,0.8703648992919921,"[0.8376939697265625, 0.8578470458984375, 0.8613602294921875, 0.86381201171875, 0.86074853515625, 0.8644237670898437, 0.8673067016601562, 0.866231689453125, 0.86553125, 0.8706673583984375]",tokens/s,297.13465075678124,kWh,2.512609172499987e-05,2.77097493549076e-06,1.671614763216668e-05,4.461321429265731e-05,tokens/kWh,5738210.170660891,MB,1254.387712,5115.936768,0.0,4708.10624,4535.247872,s,10,40.49503979492187,4.049503979492187,0.008818434365495443,4.05219580078125,4.058032958984375,4.060212646484375,4.0619563964843755,"[4.034101318359375, 4.038423095703125, 4.041629638671875, 4.04458203125, 4.0519599609375, 4.055001220703125, 4.052431640625, 4.062392333984375, 4.057548583984375, 4.056969970703125]",tokens/s,15.557460943130193,kWh,0.00011892334101375033,1.3116958640399224e-05,7.873059539183332e-05,0.00021077089504598282,tokens/kWh,298902.7492920956,,s,630,40.49235707473757,0.06427358265831357,0.0015613808815573744,0.06408670425415039,0.06476814651489259,0.06502831153869629,0.07511216720581056,"[0.07883795166015625, 0.06517282867431641, 0.06399033737182618, 0.06358208084106445, 0.06323929595947266, 0.06272499084472656, 0.06272784042358398, 0.06273904037475586, 0.06267811203002929, 0.0627658576965332, 0.06272927856445312, 0.06436297607421874, 0.0639450569152832, 0.06363328170776367, 0.06317478561401367, 0.06414745330810546, 0.06385391998291015, 0.06370131301879883, 0.06434236907958985, 0.06395510482788086, 0.06379913711547852, 0.06296960067749023, 0.06361520004272461, 0.06377679824829102, 0.06308883285522461, 0.06342943954467774, 0.06381792068481446, 0.0633469123840332, 0.06313430404663085, 0.06461849975585937, 0.06422099304199219, 0.0638199691772461, 0.0634093132019043, 0.06483977508544922, 0.06380006408691406, 0.06366617584228515, 0.06442111968994141, 0.06395775985717773, 0.06388051223754883, 0.0632364158630371, 0.06434371185302734, 0.0640823974609375, 0.0639526710510254, 0.06328163146972657, 0.06322134399414063, 0.06354927825927735, 0.06443417358398437, 0.06413369750976562, 0.0636313591003418, 0.06385459136962891, 0.06440464019775391, 0.0640021743774414, 0.06467219543457031, 0.06434342193603515, 0.06393334579467773, 0.06358425521850586, 0.0645481948852539, 0.06434678649902344, 0.06397747039794922, 0.06351433563232421, 0.06330352020263671, 0.06500188446044922, 0.06455219268798829, 0.07693769836425782, 0.06492147064208985, 0.06408739471435547, 0.06373033523559571, 0.06331763076782226, 0.06295158386230469, 0.06320095825195313, 0.06293056106567382, 0.06303382492065429, 0.06298790359497071, 0.06296275329589844, 0.06297983932495117, 0.06299609756469726, 0.0629961280822754, 0.0649551010131836, 0.06710681915283204, 0.06476179504394532, 0.06423149108886719, 0.06352719879150391, 0.06341603088378907, 0.06429417419433593, 0.06403663635253906, 0.06366227340698243, 0.06314006423950196, 0.06325827026367188, 0.06452877044677735, 0.06380323028564452, 0.06352758407592773, 0.06308438491821289, 0.06336959838867187, 0.06432745361328125, 0.0637583351135254, 0.06394204711914063, 0.064340576171875, 0.06398566436767578, 0.06374582290649414, 0.06383023834228516, 0.06456524658203125, 0.06415564727783203, 0.06390364837646484, 0.06357772827148438, 0.06443465423583984, 0.06453775787353516, 0.06418716430664062, 0.06393376159667968, 0.06361983871459961, 0.06444236755371094, 0.0639365119934082, 0.06369270324707031, 0.0644486083984375, 0.06403260803222656, 0.06384630584716797, 0.06356198501586914, 0.06447872161865234, 0.06412544250488281, 0.0639766731262207, 0.06348880004882812, 0.06452355194091797, 0.0646595230102539, 0.06434883117675781, 0.06393241500854492, 0.0634511375427246, 0.06363478469848632, 0.07382377624511718, 0.06468851470947265, 0.06397673416137695, 0.06359932708740235, 0.06322995376586914, 0.06435558319091797, 0.06392460632324219, 0.06360486221313477, 0.06313008117675781, 0.0629634895324707, 0.06299238586425782, 0.06474748992919922, 0.06424578857421875, 0.0637279052734375, 0.06355116653442383, 0.0643318099975586, 0.06368255996704102, 0.06357820892333985, 0.06436831665039063, 0.06392380905151367, 0.06372825622558594, 0.06377676773071289, 0.06486016082763672, 0.0642779541015625, 0.06379987335205078, 0.06348185729980468, 0.06330275344848633, 0.06440335845947266, 0.06384873580932617, 0.06357270431518555, 0.06407901000976562, 0.06407657623291016, 0.06385446548461914, 0.06494636535644531, 0.06439247894287109, 0.06387171173095703, 0.0638130226135254, 0.06351878356933593, 0.06440809631347656, 0.06471065521240234, 0.06398099136352539, 0.0633554573059082, 0.06436857604980468, 0.06392975997924805, 0.06367299270629882, 0.06373750305175781, 0.0644529571533203, 0.06406940460205078, 0.06442620849609375, 0.06395814514160156, 0.06356579208374023, 0.06376335906982422, 0.06454271697998047, 0.06424937438964844, 0.06495894622802735, 0.06439437103271485, 0.06396540832519532, 0.06380147171020507, 0.06445625305175781, 0.06410105895996093, 0.06381084823608399, 0.06449443054199219, 0.06414556884765625, 0.07492995452880859, 0.06452464294433594, 0.06403494262695313, 0.06359116744995118, 0.06315097427368165, 0.06425202941894531, 0.06392575836181641, 0.06389503860473633, 0.0632490234375, 0.06422566223144531, 0.0636819839477539, 0.06346912002563476, 0.06346566390991211, 0.06417286682128906, 0.06402845001220703, 0.06360496139526367, 0.0644106216430664, 0.0642325439453125, 0.06332611083984375, 0.06400972747802734, 0.06429478454589843, 0.06395065689086914, 0.06383084869384766, 0.06354358291625976, 0.06425296020507812, 0.06387782287597656, 0.06363750457763671, 0.06427017974853516, 0.06387868881225586, 0.0638490867614746, 0.06392623901367188, 0.06436156463623047, 0.06398223876953125, 0.06379983901977539, 0.06418816375732422, 0.064505859375, 0.06404096221923829, 0.0638914566040039, 0.0645090560913086, 0.06422617340087891, 0.06386003112792969, 0.06355424118041993, 0.06439910125732422, 0.06388761520385743, 0.06350438308715821, 0.0647906265258789, 0.06441522979736328, 0.06397699356079102, 0.06372236633300782, 0.0644582061767578, 0.06425039672851562, 0.06454608154296874, 0.06416588592529297, 0.06391471862792969, 0.06410614776611329, 0.06426787567138671, 0.06443084716796875, 0.06365343856811523, 0.0645263671875, 0.06400144195556641, 0.06385532760620118, 0.06457782745361328, 0.06448099517822266, 0.07537216186523438, 0.06461004638671874, 0.06401206207275391, 0.06360969543457032, 0.06322550582885743, 0.06404905700683594, 0.06376902389526368, 0.06357606506347656, 0.06314422225952149, 0.0641632308959961, 0.0636473617553711, 0.06421984100341797, 0.06379110336303712, 0.06360892868041992, 0.06380944061279296, 0.06426419067382813, 0.06401974487304687, 0.0645429458618164, 0.06417430114746094, 0.06360086441040039, 0.06428438568115234, 0.06390000152587891, 0.0641719970703125, 0.06391350555419922, 0.0637199363708496, 0.06364889526367187, 0.06416883087158202, 0.06385657501220703, 0.06374201583862305, 0.06441165161132813, 0.06381977462768555, 0.06365305709838867, 0.06437366485595702, 0.06487849426269532, 0.06428070068359375, 0.06387247848510742, 0.06446707153320312, 0.06407968139648437, 0.06493414306640626, 0.06615676879882812, 0.06381977462768555, 0.06358220672607422, 0.0634769287109375, 0.06500355529785157, 0.06440239715576172, 0.06374399948120117, 0.06419149017333985, 0.06447801971435548, 0.06456034851074219, 0.06441244506835937, 0.06415974426269531, 0.06456934356689453, 0.06422528076171875, 0.06504649353027343, 0.06456508636474609, 0.06413533020019531, 0.06357590484619141, 0.06401657867431641, 0.06507520294189453, 0.06457875061035157, 0.06402738952636719, 0.0638853759765625, 0.06462464141845703, 0.07551795196533204, 0.06456934356689453, 0.06394879913330079, 0.06416582489013672, 0.06370105743408203, 0.0635228157043457, 0.06346748733520508, 0.06398064041137695, 0.06359750366210938, 0.0632699203491211, 0.06420601654052735, 0.06385232162475586, 0.06363056182861328, 0.06320134353637695, 0.06418915557861328, 0.06505010986328125, 0.06477008056640625, 0.06423091125488281, 0.06377532958984375, 0.06418470764160156, 0.06426195526123046, 0.06387756729125976, 0.06366124725341797, 0.06336700820922851, 0.06432841491699219, 0.0644505615234375, 0.06410034942626953, 0.06381260681152344, 0.06378742218017579, 0.06438358306884766, 0.06542745971679688, 0.06431737518310547, 0.06476806640625, 0.06437593841552734, 0.06416063690185547, 0.06491526031494141, 0.06424508666992187, 0.06384316635131836, 0.06370297622680664, 0.06437689971923828, 0.0649583969116211, 0.0644109115600586, 0.06398236846923828, 0.06366761779785156, 0.06386368179321289, 0.06446665954589843, 0.06380953598022461, 0.06574845123291016, 0.06378675079345703, 0.06438790130615234, 0.06409827423095703, 0.06466873931884766, 0.0642011489868164, 0.06403727722167969, 0.06445065307617187, 0.06514073944091797, 0.06456934356689453, 0.06424761962890625, 0.06402272033691406, 0.0640610580444336, 0.0646123504638672, 0.06455567932128906, 0.06399151992797851, 0.07492991638183594, 0.06456793975830079, 0.06400204467773438, 0.06357196807861328, 0.06315827178955079, 0.06459552001953126, 0.06398611068725586, 0.06355699157714843, 0.0632856330871582, 0.06357958221435547, 0.06406790161132812, 0.06367283248901368, 0.0633449592590332, 0.06420652770996094, 0.06389126586914062, 0.06417635345458984, 0.06461436462402344, 0.0643399658203125, 0.06400819396972657, 0.06369459152221679, 0.0637768325805664, 0.0643094711303711, 0.06341363143920899, 0.06392483139038085, 0.0643276824951172, 0.0641630401611328, 0.06345721435546875, 0.06539759826660156, 0.06430105590820312, 0.06407955169677734, 0.06437715148925781, 0.0644382095336914, 0.06483542633056641, 0.06432790374755859, 0.06378438568115234, 0.06439993286132813, 0.06397235107421875, 0.0644775390625, 0.06417030334472656, 0.06380886459350586, 0.06344931030273437, 0.0644016342163086, 0.0639309425354004, 0.06461440277099609, 0.06449152374267578, 0.06452601623535156, 0.06423725128173828, 0.06501644897460937, 0.06437586975097656, 0.06398252868652343, 0.06384608078002929, 0.0640904312133789, 0.06449056243896484, 0.0640992660522461, 0.06395391845703124, 0.06415795135498047, 0.06456374359130859, 0.06418447875976563, 0.0645033950805664, 0.06420291137695312, 0.0642174072265625, 0.06462834930419922, 0.0651960678100586, 0.07654399871826172, 0.06484716796875, 0.06434886169433594, 0.06389350509643554, 0.06374409484863282, 0.06655133056640625, 0.06360271835327148, 0.06421129608154297, 0.06356172943115235, 0.0636231689453125, 0.0635814094543457, 0.06414828491210937, 0.06371865463256836, 0.06331606292724609, 0.064837890625, 0.06497318267822266, 0.06433177947998046, 0.06402252960205078, 0.06434611511230469, 0.06422732543945313, 0.06344467163085937, 0.0643703384399414, 0.06394464111328126, 0.06436265563964844, 0.0640456314086914, 0.06382953643798828, 0.06429878234863282, 0.06384662246704101, 0.06363820648193359, 0.06449517059326172, 0.0648153305053711, 0.06443417358398437, 0.06454681396484375, 0.06426214599609376, 0.06384435272216797, 0.06695116424560547, 0.06386687850952148, 0.06350787353515625, 0.0654131851196289, 0.06441629028320313, 0.06393856048583985, 0.06378905487060547, 0.06389680099487305, 0.06436943817138673, 0.06390934371948243, 0.06397139358520508, 0.06449199676513671, 0.06437423706054687, 0.0645633316040039, 0.0645206069946289, 0.0644814682006836, 0.064325439453125, 0.06422105407714844, 0.06557884979248046, 0.06444866943359374, 0.06401017761230468, 0.06391212844848633, 0.06395699310302734, 0.0646123504638672, 0.0644874267578125, 0.06414950561523437, 0.06510963439941406, 0.06422470092773437, 0.07518659210205078, 0.06450534057617187, 0.06406195068359374, 0.06460415649414063, 0.06399987030029297, 0.06354137420654297, 0.06339567947387695, 0.06402655792236328, 0.06361110305786133, 0.06428262329101563, 0.06396723175048828, 0.06374195098876953, 0.06334678268432617, 0.06427638244628907, 0.0638397102355957, 0.06467763519287109, 0.06433052825927735, 0.064036865234375, 0.06418637084960938, 0.0637317771911621, 0.06471612548828125, 0.06420950317382812, 0.06381913757324219, 0.06381427383422851, 0.06424371337890625, 0.06479180908203125, 0.06419942474365234, 0.06384844970703125, 0.06359878540039063, 0.06426809692382812, 0.06408601379394531, 0.06441983795166016, 0.06430643463134765, 0.06385283279418945, 0.06432975769042969, 0.0639799690246582, 0.06444009399414062, 0.06412310028076172, 0.06381296157836915, 0.06418294525146484, 0.06475971221923828, 0.06495552062988282, 0.06438114929199219, 0.06378726577758789, 0.06422742462158203, 0.0644758071899414, 0.06495945739746094, 0.0643755874633789, 0.06413648223876953, 0.06489775848388672, 0.06409625244140625, 0.06494380950927735, 0.06434425354003906, 0.06388748931884766, 0.06459091186523437, 0.06418118286132812, 0.06499737548828124, 0.06451824188232422, 0.06576118469238282, 0.06392217636108398, 0.06448025512695313, 0.06399897766113281, 0.0642007064819336, 0.07526537322998048, 0.06485635375976563, 0.06396556854248046, 0.06350848007202148, 0.0634224624633789, 0.06410240173339844, 0.06363919830322265, 0.06352262496948242, 0.0630338897705078, 0.064104736328125, 0.06366547012329102, 0.06352278518676757, 0.06342873764038086, 0.06422278594970703, 0.06473554992675781, 0.06542176055908203, 0.06533734130859375, 0.06435750579833985, 0.06411148834228515, 0.06333235168457031, 0.06362863922119141, 0.06534210968017579, 0.06421702575683594, 0.06390313720703125, 0.06368483352661133, 0.06379286575317383, 0.0641370849609375, 0.06380563354492187, 0.06349177551269532, 0.06483197021484376, 0.06454118347167968, 0.0647759017944336, 0.06444624328613281, 0.06436637115478516, 0.06458211517333984, 0.06432998657226563, 0.06473113250732422, 0.06507315063476563, 0.06415315246582032, 0.06381232070922851, 0.06364950561523437, 0.06436204528808594, 0.06400249481201171, 0.06389775848388672, 0.06392345428466797, 0.06458428955078124, 0.06442915344238281, 0.06457027435302734, 0.06465945434570312, 0.0645010528564453, 0.06423126220703125, 0.06476886749267578, 0.06454476928710938, 0.06442598724365234, 0.06395465469360352, 0.06388281631469726, 0.0648259506225586, 0.06449779510498047, 0.06388121414184571, 0.06503801727294922, 0.06453414154052735, 0.06417052459716797, 0.06414498901367187]",tokens/s,15.558491663925528,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,2196.226048,7355.695104,0.0,6960.447488,6722.822144,s,1,15.48496875,15.48496875,0.0,15.48496875,15.48496875,15.48496875,15.48496875,[15.48496875],,kWh,0.00024491887688329823,2.700919066494004e-05,9.056562800799361e-05,0.0003624936955562319,,MB,1687.216128,7921.926144,0.0,7514.095616,7161.534464,s,10,10.654595092773437,1.0654595092773438,0.006423383328901411,1.06682958984375,1.0714741821289062,1.073470672607422,1.0750678649902345,"[1.05067626953125, 1.0610797119140625, 1.0617020263671875, 1.0637078857421876, 1.0675457763671874, 1.06951171875, 1.0677606201171874, 1.0661134033203126, 1.0754671630859376, 1.071030517578125]",tokens/s,240.2719181450959,kWh,3.107582351999781e-05,3.425319836929779e-06,2.056862756600053e-05,5.506977092292811e-05,tokens/kWh,4648648.3547984995,MB,1691.369472,7924.023296,0.0,7516.192768,7161.537024,s,10,51.312615722656254,5.131261572265625,0.02000570967956672,5.1339006347656255,5.1546525390625,5.1548863281249995,5.155073359375,"[5.08705126953125, 5.113611328125, 5.11806201171875, 5.12836181640625, 5.12734521484375, 5.139439453125, 5.1427421875, 5.14628173828125, 5.1546005859375, 5.1551201171875]",tokens/s,12.277682420345485,kWh,0.00015117442855958185,1.66774975152408e-05,0.00010051808041439957,0.0002683700064892223,tokens/kWh,234750.52530704497,,s,630,51.30943235778806,0.08144354342506047,0.0018556501328159932,0.08116019439697265,0.08238155364990235,0.08316942100524902,0.09381185348510744,"[0.09235491180419922, 0.07987139129638672, 0.07895305633544922, 0.0793702392578125, 0.07965609741210937, 0.07962505340576172, 0.08048127746582032, 0.08027760314941407, 0.08023337554931641, 0.08075183868408203, 0.08094796752929688, 0.08150627136230469, 0.08381804656982422, 0.08080230712890625, 0.08005366516113281, 0.07967327880859375, 0.08007746887207032, 0.0799683837890625, 0.08066035461425781, 0.07986790466308594, 0.07976134490966796, 0.0806052474975586, 0.08042192077636719, 0.0819163818359375, 0.08121395111083984, 0.080932861328125, 0.08070867156982423, 0.08046031951904296, 0.07979161834716797, 0.08015267181396485, 0.07988835144042969, 0.07966191864013672, 0.08017305755615234, 0.07999897766113281, 0.08042700958251953, 0.08049785614013671, 0.0828070068359375, 0.0808446044921875, 0.08102521514892579, 0.08054844665527344, 0.08053350067138672, 0.08042291259765624, 0.08017305755615234, 0.08003584289550782, 0.08043462371826172, 0.0800118408203125, 0.07991085052490235, 0.08071379089355468, 0.08134182739257813, 0.08108048248291015, 0.08065071868896484, 0.08116838073730469, 0.08102285003662109, 0.08108863830566407, 0.08072566223144531, 0.08020003509521484, 0.08060066986083984, 0.08054134368896484, 0.08051789093017578, 0.08116429138183594, 0.08105574035644532, 0.08100863647460937, 0.08156364440917968, 0.09391452789306641, 0.08084342193603515, 0.08176016235351563, 0.08120652770996094, 0.08080470275878907, 0.08087551879882812, 0.0814571533203125, 0.08086729431152344, 0.08091785430908204, 0.08091923522949218, 0.08113152313232422, 0.08108771514892578, 0.0823897933959961, 0.08096768188476562, 0.08032611083984376, 0.0803476791381836, 0.08047206115722656, 0.08028160095214844, 0.0808686752319336, 0.08029840087890625, 0.0799315185546875, 0.08054595184326171, 0.08061488342285156, 0.08148226928710937, 0.0811171875, 0.08043929290771484, 0.08063926696777343, 0.08138758087158203, 0.08060380554199219, 0.08026316833496094, 0.08073983764648437, 0.08050265502929688, 0.07991753387451171, 0.08049833679199218, 0.08124591827392579, 0.08107087707519531, 0.08148377227783203, 0.08137856292724609, 0.08154390716552734, 0.08059241485595703, 0.08011971282958984, 0.08076758575439454, 0.08063906860351562, 0.08079043579101562, 0.08068505859375, 0.0810040283203125, 0.080961181640625, 0.08051388549804687, 0.08084230041503906, 0.0816409912109375, 0.0813636474609375, 0.08124444580078125, 0.08128915405273437, 0.08102662658691406, 0.08159241485595703, 0.0823193588256836, 0.08141859436035156, 0.08181145477294922, 0.0808938217163086, 0.08155919647216797, 0.08099273681640624, 0.0807383041381836, 0.08136089324951172, 0.09320713806152343, 0.07985715484619141, 0.07958758544921875, 0.08017091369628906, 0.07982902526855469, 0.08019999694824219, 0.07990262603759765, 0.07976048278808594, 0.079801025390625, 0.07980063629150391, 0.07969586944580079, 0.08378777313232422, 0.0831827163696289, 0.08123481750488282, 0.08051302337646485, 0.08053350067138672, 0.080648193359375, 0.08057609558105469, 0.07981712341308594, 0.07982041931152344, 0.07990924835205078, 0.0805212173461914, 0.08060723114013672, 0.08210431671142578, 0.08249459075927734, 0.08157679748535156, 0.08113549041748047, 0.08078966522216798, 0.08125234985351562, 0.08121753692626953, 0.08198963165283203, 0.08212480163574219, 0.08148786926269531, 0.08151039886474609, 0.08091958618164062, 0.08192098999023438, 0.08170496368408203, 0.08100249481201172, 0.08116806030273438, 0.08077123260498047, 0.0807507553100586, 0.08076652526855468, 0.0812674560546875, 0.08136057281494141, 0.08089750671386718, 0.0807449951171875, 0.08140595245361328, 0.08097792053222656, 0.08132559967041016, 0.08142076873779297, 0.08104959869384766, 0.08076271820068359, 0.08041693115234375, 0.08193228912353516, 0.08146134185791015, 0.08172124481201172, 0.08086457824707032, 0.08126943969726562, 0.08235622406005859, 0.08257917022705077, 0.0814302749633789, 0.08189801788330078, 0.08091840362548829, 0.09483315277099609, 0.0802607650756836, 0.07969305419921875, 0.08019744110107421, 0.08028050994873047, 0.0796275863647461, 0.08017526245117187, 0.07992988586425781, 0.07973887634277343, 0.07993110656738281, 0.08008022308349609, 0.08390956878662109, 0.08330035400390624, 0.08172886657714844, 0.08063657379150391, 0.08060928344726563, 0.0810265884399414, 0.08053193664550781, 0.0805992660522461, 0.08063362884521484, 0.0797655029296875, 0.08036966705322265, 0.08128431701660156, 0.08217884826660156, 0.08286406707763672, 0.08349616241455078, 0.08154198455810546, 0.08159369659423828, 0.08076156616210937, 0.08077101135253906, 0.08083865356445312, 0.08099839782714843, 0.08073625946044923, 0.08087347412109375, 0.08111698913574218, 0.08177446746826172, 0.08227059173583984, 0.08204847717285156, 0.08115452575683593, 0.081491455078125, 0.08113613128662109, 0.08050466918945312, 0.08137315368652344, 0.0806316146850586, 0.08080217742919922, 0.08015382385253907, 0.0813985595703125, 0.0813096923828125, 0.08242160034179688, 0.08296259307861328, 0.08308499145507812, 0.08283372497558594, 0.08118886566162109, 0.08058675384521484, 0.0805212173461914, 0.08064985656738281, 0.08073171234130859, 0.0810047378540039, 0.0818799057006836, 0.08119478607177734, 0.08205516815185547, 0.08150527954101562, 0.08242415618896484, 0.09356047821044922, 0.08068348693847656, 0.08021810913085937, 0.08008700561523438, 0.08062905883789062, 0.08074291229248047, 0.08055420684814453, 0.08056422424316406, 0.08060675048828125, 0.0809816665649414, 0.08050112152099609, 0.08344316864013672, 0.08213565063476562, 0.08126627349853516, 0.08087648010253906, 0.08070441436767578, 0.08064409637451173, 0.08031737518310547, 0.08027545928955078, 0.07996956634521485, 0.07961264038085937, 0.08069529724121094, 0.08142198181152344, 0.08220838165283204, 0.08178880310058594, 0.08203510284423828, 0.08119750213623046, 0.08110867309570312, 0.08079347229003907, 0.08096720123291015, 0.0802557144165039, 0.08094534301757812, 0.08078150177001953, 0.08099606323242188, 0.08107427215576171, 0.08175411224365234, 0.0832573471069336, 0.083174560546875, 0.08139657592773437, 0.08059699249267578, 0.08060873413085938, 0.0810624008178711, 0.08054991912841797, 0.08052652740478515, 0.08082514953613282, 0.08093260955810547, 0.08120140838623047, 0.08225587463378906, 0.08193574523925781, 0.08202713775634765, 0.08122163391113281, 0.08108998107910156, 0.08088816070556641, 0.08138313293457031, 0.0811045150756836, 0.08125689697265626, 0.08086367797851562, 0.08067839813232422, 0.08236083221435547, 0.08255487823486328, 0.08380210876464844, 0.08173494720458985, 0.0813372802734375, 0.09488384246826172, 0.080729248046875, 0.08134127807617188, 0.0817086410522461, 0.08167215728759766, 0.08159056091308593, 0.08081005096435546, 0.08169245147705079, 0.08082611083984376, 0.08109257507324219, 0.08091887664794922, 0.08220902252197265, 0.08184355163574218, 0.08063862609863282, 0.08063385772705078, 0.08067696380615234, 0.08091149139404297, 0.08105244445800781, 0.08089395141601563, 0.08088905334472657, 0.0802291488647461, 0.08082431793212891, 0.08167424011230469, 0.08168447875976563, 0.08140921783447265, 0.0817242202758789, 0.08100249481201172, 0.08105359649658203, 0.08128521728515625, 0.08129312133789063, 0.08180339050292969, 0.08089615631103515, 0.08039823913574219, 0.08133554840087891, 0.08141696166992188, 0.0823193588256836, 0.0836456298828125, 0.08129209899902344, 0.08135475158691406, 0.08128038024902344, 0.08078924560546875, 0.0804727020263672, 0.08146524810791016, 0.0810351333618164, 0.08059133148193359, 0.08068505859375, 0.08194048309326171, 0.08227021026611328, 0.08240946960449219, 0.0814202880859375, 0.08122525024414062, 0.08168019104003907, 0.08144963073730468, 0.0816885757446289, 0.0818116455078125, 0.08280044555664062, 0.08144895935058594, 0.08128704071044922, 0.08167436981201172, 0.08166595458984376, 0.08180540466308593, 0.08148512268066406, 0.08108268737792969, 0.09476316833496094, 0.08143218994140625, 0.08160294342041016, 0.08108441925048829, 0.08126866912841797, 0.08076703643798828, 0.08090419006347656, 0.08093695831298828, 0.08082950592041016, 0.08093177795410156, 0.08079682922363281, 0.08326640319824219, 0.08206130981445313, 0.0817371826171875, 0.08109724426269531, 0.08065023803710937, 0.08085298919677734, 0.08078540802001953, 0.08027519989013672, 0.08078975677490234, 0.08028160095214844, 0.08065744018554688, 0.081544189453125, 0.08228639984130859, 0.08199411010742187, 0.08174784088134765, 0.08135465240478515, 0.08149951934814453, 0.08153766632080078, 0.08162713623046874, 0.08159212493896484, 0.08248038482666016, 0.08195986938476563, 0.08099215698242188, 0.08169071960449219, 0.08118271636962891, 0.08172748565673828, 0.08150851440429688, 0.08103119659423828, 0.08163513946533203, 0.08150198364257813, 0.0809324493408203, 0.08168102264404296, 0.08111433410644531, 0.08079849243164063, 0.08115609741210937, 0.08150857543945313, 0.08147046661376953, 0.08173238372802734, 0.08209212493896484, 0.08126560211181641, 0.0822833251953125, 0.08210006713867188, 0.08304994964599609, 0.08178489685058593, 0.0813177947998047, 0.08090675354003907, 0.08125888061523437, 0.08122496032714843, 0.08129203033447266, 0.08176435089111328, 0.08141619110107422, 0.08160460662841797, 0.09513529968261719, 0.08085327911376954, 0.08183990478515625, 0.08083289337158203, 0.0809574432373047, 0.08075878143310547, 0.08076898956298828, 0.08075267028808594, 0.08087888336181641, 0.08091910552978515, 0.0804947509765625, 0.084170654296875, 0.0823806381225586, 0.08115213012695313, 0.08163750457763672, 0.08101840209960938, 0.080390625, 0.08103472137451172, 0.08077356719970703, 0.08077117156982422, 0.08047539520263672, 0.08079436492919922, 0.08169420623779297, 0.08242963409423829, 0.08302880096435547, 0.0814830093383789, 0.08168319702148437, 0.08316313934326172, 0.08121510314941406, 0.0814513931274414, 0.08138931274414063, 0.08004972839355469, 0.08071206665039063, 0.08049839782714843, 0.08138198089599609, 0.08210368347167969, 0.08237734222412109, 0.08182150268554687, 0.08185699462890625, 0.08162480163574219, 0.0814940185546875, 0.08182307434082031, 0.08066320037841797, 0.08103529357910157, 0.08088162994384765, 0.08131759643554687, 0.08164985656738281, 0.08205052947998047, 0.08426284790039062, 0.08152054595947265, 0.08142108917236328, 0.08176025390625, 0.08141974639892578, 0.08112796783447265, 0.08154447937011719, 0.08094588470458984, 0.08021318054199218, 0.08172736358642578, 0.08171965026855468, 0.08224009704589844, 0.0827883529663086, 0.0815308837890625, 0.08211251068115234, 0.09568163299560548, 0.0809701156616211, 0.08182425689697266, 0.08213916778564453, 0.08151174163818359, 0.08140665435791015, 0.08061666870117187, 0.08102992248535157, 0.08105680084228516, 0.08077206420898438, 0.08072576141357422, 0.084076416015625, 0.08255068969726563, 0.081218017578125, 0.08183602905273438, 0.08143901062011719, 0.08137677001953125, 0.08043746948242188, 0.08093081665039062, 0.08036502075195312, 0.08121548461914062, 0.08071222686767578, 0.08144687652587891, 0.08283926391601562, 0.08157843017578124, 0.0811805419921875, 0.08276604461669922, 0.08362166595458985, 0.08137522888183593, 0.08090160369873046, 0.08142899322509765, 0.08033849334716797, 0.08032713317871094, 0.08071984100341797, 0.0813197784423828, 0.08180307006835938, 0.08183433532714844, 0.08169884490966797, 0.08270880126953126, 0.08196249389648437, 0.08157350158691407, 0.0814044189453125, 0.081295166015625, 0.08123622131347656, 0.08066783905029297, 0.08128377532958984, 0.08312435150146484, 0.08316242980957031, 0.08104188537597656, 0.08162531280517578, 0.08177247619628907, 0.08223446655273438, 0.08147859191894531, 0.08162102508544922, 0.08112947082519531, 0.08078540802001953, 0.08136294555664063, 0.08099635314941406, 0.08213024139404297, 0.08247907257080078, 0.08202889251708985, 0.08332530975341797, 0.08276172637939454, 0.09585167694091797, 0.08175276947021484, 0.08160806274414062, 0.08131273651123047, 0.08087503814697265, 0.08171363067626954, 0.08068595123291016, 0.08080025482177734, 0.08066297912597656, 0.08019967651367188, 0.08018115234375, 0.08466031646728515, 0.08314262390136719, 0.08172752380371094, 0.0814073257446289, 0.08085772705078124, 0.08089615631103515, 0.08079766082763672, 0.08054080200195313, 0.08081078338623048, 0.08069324493408203, 0.08089600372314452, 0.08136495971679687, 0.0836178207397461, 0.08219647979736328, 0.08444518280029296, 0.08162070465087891, 0.08036978912353515, 0.08168019104003907, 0.08072166442871094, 0.08052387237548828, 0.08052915191650391, 0.08041702270507813, 0.0810096664428711, 0.08175446319580078, 0.0819039077758789, 0.08273872375488281, 0.08255315399169921, 0.08181609344482423, 0.08103936004638672, 0.08179724884033203, 0.08125424194335938, 0.0820343017578125, 0.08160912322998047, 0.08236589050292968, 0.08203218841552734, 0.0819183349609375, 0.081168701171875, 0.0827026596069336, 0.08221305847167969, 0.08170579528808594, 0.08157695770263672, 0.08136080169677734, 0.08173782348632813, 0.08131890869140625, 0.08061849975585937, 0.08141366577148437, 0.08200444793701171, 0.0815302734375, 0.08163001251220703, 0.08381417846679687, 0.08320537567138672, 0.08141606140136719]",tokens/s,12.278444158316134,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 615, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 160, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 282, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 436, in set_module_tensor_to_device new_value = param_cls(new_value, requires_grad=old_value.requires_grad, **kwargs).to(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 332, in to return self._quantize(device) File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/nn/modules.py"", line 297, in _quantize w_4bit, quant_state = bnb.functional.quantize_4bit( File ""/usr/local/lib/python3.10/dist-packages/bitsandbytes/functional.py"", line 1173, in quantize_4bit out = torch.zeros(((n + 1) // mod, 1), dtype=quant_storage, device=A.device) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,884.334592,657.391616,0.0,262.144,258.163712,s,1,8.331421875,8.331421875,0.0,8.331421875,8.331421875,8.331421875,8.331421875,[8.331421875],,kWh,2.553798450001068e-05,2.8098189440193495e-06,8.393062269990148e-06,3.6740865714020175e-05,,MB,1195.66336,758.054912,0.0,350.224384,317.820928,s,10,0.23482944107055664,0.023482944107055665,0.0001976525830758702,0.023503056526184084,0.02369461154937744,0.023746105670928955,0.023787300968170166,"[0.02336067199707031, 0.02362454414367676, 0.023585119247436524, 0.023522176742553712, 0.023483936309814452, 0.023074464797973634, 0.023319007873535157, 0.02379759979248047, 0.023378751754760743, 0.023683168411254882]",tokens/s,10901.529162311572,kWh,6.968638696897281e-07,7.68513307585397e-08,4.622689988257993e-07,1.2359841992740671e-06,tokens/kWh,207122388.90299484,MB,1229.115392,779.026432,0.0,371.195904,317.823488,s,10,11.461855712890623,1.1461855712890623,0.008816181875136324,1.14757421875,1.1575904418945313,1.1585895935058594,1.159388914794922,"[1.1345552978515625, 1.1515673828125, 1.1480777587890625, 1.157368408203125, 1.1485537109375, 1.1332515869140625, 1.1595887451171876, 1.1347701416015625, 1.1470706787109375, 1.147052001953125]",tokens/s,54.96492154332984,kWh,3.322266337739506e-05,3.6639666195696823e-06,1.38264368765738e-05,5.0713066873538534e-05,tokens/kWh,1242283.3775188746,,s,630,11.456464830398557,0.018184864810156444,0.00044804628817504525,0.0181625919342041,0.018491910934448245,0.018595945358276366,0.019085622653961187,"[0.01769340705871582, 0.018038816452026367, 0.017971168518066405, 0.017991199493408203, 0.017947359085083006, 0.01788697624206543, 0.01800396728515625, 0.018288576126098632, 0.01803225517272949, 0.01802729606628418, 0.01840905570983887, 0.018012224197387697, 0.017982528686523436, 0.017949600219726563, 0.017825056076049804, 0.017947519302368164, 0.017827007293701173, 0.01778521537780762, 0.017997919082641603, 0.017994304656982422, 0.017968128204345703, 0.018207487106323243, 0.017945920944213867, 0.017820255279541015, 0.017889280319213868, 0.017879039764404296, 0.018149375915527344, 0.017958015441894532, 0.01788812828063965, 0.01796847915649414, 0.018225248336791993, 0.018433631896972655, 0.018410303115844726, 0.01839529609680176, 0.018440000534057616, 0.018106399536132814, 0.018067615509033203, 0.01818934440612793, 0.018217983245849608, 0.01787696075439453, 0.018159839630126955, 0.01795465660095215, 0.018069440841674805, 0.017939647674560546, 0.018033567428588866, 0.01786979293823242, 0.017748928070068358, 0.017718719482421874, 0.017820064544677734, 0.017721248626708985, 0.017753536224365235, 0.017774816513061523, 0.017752351760864257, 0.01766383934020996, 0.01775868797302246, 0.017811679840087892, 0.01778659248352051, 0.018026399612426757, 0.017965024948120117, 0.01808211135864258, 0.018124671936035158, 0.018335071563720703, 0.01846134376525879, 0.017756351470947264, 0.018103103637695312, 0.018481151580810547, 0.017893375396728514, 0.017821695327758787, 0.017915903091430666, 0.01804697608947754, 0.018247840881347656, 0.01840320014953613, 0.018009727478027343, 0.017759904861450196, 0.017966880798339843, 0.017768768310546874, 0.01819926452636719, 0.018120576858520508, 0.018160991668701172, 0.018094751358032228, 0.01834966468811035, 0.01807401657104492, 0.018218399047851563, 0.01812950325012207, 0.018081855773925782, 0.018442432403564454, 0.018593536376953126, 0.018255872726440428, 0.01844633674621582, 0.017954944610595703, 0.017932159423828125, 0.017983104705810545, 0.017885568618774415, 0.01828873634338379, 0.017960832595825194, 0.01847260856628418, 0.01811289596557617, 0.018194463729858397, 0.01818623924255371, 0.018184127807617186, 0.01797228813171387, 0.01801254463195801, 0.018409631729125978, 0.018345855712890626, 0.018516544342041016, 0.018367712020874023, 0.018385887145996094, 0.018453664779663086, 0.018451103210449217, 0.01857161521911621, 0.018523807525634765, 0.01840127944946289, 0.01848121643066406, 0.01849955177307129, 0.018532320022583006, 0.018710527420043945, 0.018792448043823243, 0.01849888038635254, 0.018476959228515624, 0.018921375274658203, 0.01862499237060547, 0.018557600021362305, 0.018480640411376953, 0.018520320892333984, 0.01856060791015625, 0.01842838478088379, 0.018151424407958985, 0.01849113655090332, 0.018448640823364258, 0.018409023284912108, 0.018479551315307617, 0.01830297660827637, 0.018381856918334962, 0.01866032028198242, 0.018595680236816407, 0.018452640533447265, 0.018539648056030273, 0.018402175903320314, 0.01840947151184082, 0.018548959732055663, 0.018510751724243164, 0.018617311477661134, 0.018536352157592775, 0.018366592407226563, 0.01848512077331543, 0.018381919860839844, 0.01818281555175781, 0.017939744949340822, 0.018190399169921875, 0.018172832489013673, 0.01803264045715332, 0.018093631744384765, 0.018022335052490235, 0.01802627182006836, 0.01808697509765625, 0.018110111236572267, 0.017876575469970703, 0.017902048110961914, 0.017858495712280275, 0.017989023208618164, 0.018666048049926758, 0.01835580825805664, 0.018155904769897462, 0.018155584335327147, 0.01822329521179199, 0.018187904357910158, 0.01865292739868164, 0.01842835235595703, 0.018290016174316408, 0.01867024040222168, 0.018869760513305665, 0.01801849555969238, 0.018071008682250977, 0.018216096878051757, 0.017823423385620117, 0.01788217544555664, 0.017777599334716798, 0.017864704132080078, 0.017819648742675782, 0.017958656311035156, 0.017856000900268554, 0.01780950355529785, 0.017775487899780274, 0.017870624542236327, 0.018104320526123048, 0.018179424285888673, 0.018000736236572265, 0.018083776473999023, 0.018145151138305664, 0.018109888076782227, 0.018170080184936523, 0.018233823776245116, 0.018835552215576173, 0.01825564765930176, 0.018464799880981445, 0.01833977508544922, 0.018152576446533203, 0.01800284767150879, 0.017954975128173827, 0.018294271469116212, 0.01780678367614746, 0.017770816802978515, 0.01774444770812988, 0.018429983139038087, 0.017922048568725587, 0.017903039932250977, 0.017905599594116212, 0.01797385597229004, 0.017799200057983397, 0.017733600616455077, 0.018282751083374023, 0.017780511856079102, 0.018292736053466797, 0.018685951232910156, 0.018441503524780273, 0.01809619140625, 0.018069183349609375, 0.018351423263549806, 0.018107679367065428, 0.01826464080810547, 0.018115936279296876, 0.018080255508422852, 0.018157855987548828, 0.018670944213867186, 0.01872496032714844, 0.01848067283630371, 0.018395967483520508, 0.01812879943847656, 0.01813811111450195, 0.018039392471313476, 0.01806502342224121, 0.01807436752319336, 0.01800998306274414, 0.017960159301757813, 0.017924543380737304, 0.018096607208251955, 0.018126880645751953, 0.01795187187194824, 0.017927040100097658, 0.0179182071685791, 0.018015167236328126, 0.020692800521850584, 0.01842870330810547, 0.018358015060424806, 0.018285343170166016, 0.018300607681274415, 0.018301984786987305, 0.018546911239624025, 0.020617984771728517, 0.02458624076843262, 0.018993152618408202, 0.018486623764038087, 0.017826784133911134, 0.018275520324707032, 0.01834441566467285, 0.01838425636291504, 0.0182893123626709, 0.01842835235595703, 0.018463903427124024, 0.018419519424438476, 0.018506528854370118, 0.01847859191894531, 0.01834566307067871, 0.018264480590820312, 0.018270336151123046, 0.018298879623413086, 0.01827155113220215, 0.018303871154785156, 0.01827235221862793, 0.018230815887451172, 0.018276832580566407, 0.018300224304199218, 0.018291391372680665, 0.018304927825927735, 0.018415712356567384, 0.018372608184814454, 0.01859584045410156, 0.018519872665405272, 0.01828022384643555, 0.018383487701416016, 0.018462303161621094, 0.01836684799194336, 0.018377920150756837, 0.018284223556518556, 0.018432416915893556, 0.018306623458862303, 0.01838960075378418, 0.01828656005859375, 0.01829724884033203, 0.018364416122436524, 0.018328863143920897, 0.018350751876831054, 0.01839849662780762, 0.018403711318969725, 0.018552928924560546, 0.018461055755615234, 0.018219104766845705, 0.018132831573486326, 0.0180849609375, 0.018006431579589845, 0.01835468864440918, 0.018354175567626953, 0.01807356834411621, 0.018062559127807618, 0.017865535736083984, 0.017738752365112305, 0.01780393600463867, 0.017754655838012695, 0.017767679214477538, 0.017657920837402342, 0.017705472946166992, 0.017663999557495116, 0.017770719528198243, 0.017856096267700194, 0.01772163200378418, 0.01734275245666504, 0.018203456878662108, 0.017975296020507812, 0.017812160491943358, 0.017941791534423827, 0.01769375991821289, 0.017700735092163085, 0.017712703704833986, 0.01780726432800293, 0.017840511322021486, 0.017826303482055664, 0.017915552139282226, 0.01802979278564453, 0.018282527923583983, 0.018131711959838866, 0.01870470428466797, 0.01816339111328125, 0.01802444839477539, 0.018506784439086914, 0.017996768951416015, 0.018355871200561525, 0.018198751449584962, 0.017879487991333008, 0.017880256652832032, 0.01791811180114746, 0.01764182472229004, 0.017649824142456055, 0.018350112915039064, 0.01896780776977539, 0.01784480094909668, 0.01785241508483887, 0.01789030456542969, 0.01796112060546875, 0.01816569519042969, 0.018039712905883788, 0.017957984924316408, 0.018069408416748048, 0.018043392181396483, 0.01811097526550293, 0.018145503997802733, 0.018328895568847658, 0.01808252716064453, 0.017961984634399415, 0.01785116767883301, 0.01771321678161621, 0.01761622428894043, 0.017723968505859375, 0.01760665512084961, 0.0176312313079834, 0.01757798385620117, 0.017563648223876953, 0.017737247467041015, 0.01774991989135742, 0.0178156795501709, 0.018004447937011718, 0.019564512252807618, 0.01809619140625, 0.01793222427368164, 0.017922048568725587, 0.017918239593505858, 0.017821407318115233, 0.017894399642944335, 0.018076671600341796, 0.0177096004486084, 0.018045055389404298, 0.01794892883300781, 0.018273344039916994, 0.0182260799407959, 0.018186431884765625, 0.018607168197631835, 0.01838345527648926, 0.01839689636230469, 0.018444095611572266, 0.018524768829345704, 0.018239391326904296, 0.01838703918457031, 0.018227071762084962, 0.018337919235229493, 0.018307071685791015, 0.018126047134399415, 0.01802329635620117, 0.01862646484375, 0.01812895965576172, 0.018005823135375975, 0.018091295242309572, 0.018121536254882813, 0.018019424438476563, 0.01793734359741211, 0.0178787841796875, 0.01794256019592285, 0.018090208053588866, 0.018085599899291992, 0.018186527252197264, 0.01844540786743164, 0.018545568466186522, 0.01833091163635254, 0.018250463485717773, 0.01828668785095215, 0.018319263458251953, 0.018403135299682617, 0.01912339210510254, 0.01840015983581543, 0.018466911315917968, 0.018394912719726562, 0.018311391830444335, 0.018470943450927733, 0.018417631149291992, 0.018466304779052735, 0.018345567703247072, 0.018347999572753907, 0.018384159088134764, 0.018407072067260742, 0.018435232162475584, 0.018370752334594728, 0.018206943511962892, 0.018423967361450196, 0.018274911880493162, 0.01835100746154785, 0.018342784881591797, 0.018419488906860352, 0.02021107292175293, 0.022358943939208984, 0.018523967742919922, 0.018596031188964843, 0.01852604866027832, 0.018341888427734376, 0.018132543563842772, 0.018364864349365233, 0.01845248031616211, 0.018357887268066406, 0.018315168380737306, 0.018465248107910157, 0.01833123207092285, 0.01829478454589844, 0.018306528091430664, 0.01814860725402832, 0.01808729553222656, 0.018009727478027343, 0.017937088012695314, 0.017721343994140625, 0.017696767807006835, 0.017688575744628905, 0.01769267272949219, 0.017889280319213868, 0.017735103607177734, 0.017760032653808593, 0.017750751495361327, 0.017968223571777343, 0.017849279403686524, 0.017786720275878905, 0.01779840087890625, 0.017646528244018553, 0.017729631423950197, 0.017679391860961916, 0.01776278305053711, 0.017718879699707032, 0.017775136947631835, 0.017823808670043944, 0.017750240325927733, 0.017734912872314452, 0.01780790328979492, 0.018024799346923828, 0.018207679748535155, 0.01813395118713379, 0.018161535263061523, 0.018161792755126954, 0.01802239990234375, 0.017879039764404296, 0.017790016174316407, 0.017719968795776368, 0.017815839767456054, 0.01818828773498535, 0.018121919631958007, 0.018111488342285157, 0.01817350387573242, 0.018432256698608398, 0.017989023208618164, 0.017938880920410155, 0.018484512329101564, 0.017934656143188475, 0.01795907211303711, 0.01817622375488281, 0.018341951370239258, 0.01822265625, 0.018207456588745115, 0.018042560577392577, 0.018071456909179686, 0.018077951431274414, 0.017912832260131836, 0.017520832061767577, 0.017829120635986326, 0.017867103576660156, 0.018158367156982422, 0.018278432846069337, 0.01805308723449707, 0.01814089584350586, 0.018397279739379883, 0.018657184600830077, 0.018361440658569338, 0.018383583068847655, 0.018300960540771485, 0.01830694389343262, 0.01834124755859375, 0.01839606475830078, 0.01824176025390625, 0.018058687210083007, 0.01803094482421875, 0.017928192138671875, 0.01811187171936035, 0.017887872695922853, 0.017954336166381837, 0.017799135208129882, 0.01789148712158203, 0.01803081512451172, 0.018039039611816406, 0.018347391128540037, 0.018573535919189452, 0.017994239807128908, 0.0180467529296875, 0.018268159866333008, 0.01821900749206543, 0.018179264068603516, 0.018279327392578124, 0.018263967514038085, 0.018247903823852538, 0.018475999832153322, 0.018413503646850585, 0.018299776077270506, 0.01829395294189453, 0.01827465629577637, 0.018355743408203125, 0.018383808135986328, 0.018433759689331055, 0.018155807495117186, 0.01822719955444336, 0.01813929557800293, 0.018208608627319336, 0.01836851119995117, 0.01819171142578125, 0.018233535766601562, 0.018231008529663088, 0.01807436752319336, 0.0182043514251709, 0.018065536499023437, 0.01804243278503418, 0.018252544403076172, 0.018210687637329102, 0.018126848220825196, 0.01824723243713379, 0.018319807052612304, 0.01835139274597168, 0.018503423690795898, 0.01827299118041992, 0.01864521598815918, 0.018702367782592773, 0.018557056427001953, 0.018509695053100586, 0.018479103088378905, 0.01848431968688965, 0.01841878318786621, 0.018443552017211914, 0.018407968521118163, 0.01831020736694336, 0.01837766456604004, 0.018507776260375978, 0.01840127944946289, 0.01838051223754883, 0.018469152450561525, 0.01837980842590332, 0.018364864349365233, 0.018401567459106444, 0.018364479064941406, 0.01824563217163086, 0.01837094306945801, 0.01830019187927246, 0.018290719985961913, 0.01835856056213379, 0.018415872573852538, 0.01844220733642578, 0.018757631301879883, 0.018315263748168945, 0.018427711486816406, 0.018386463165283203, 0.0184550724029541, 0.018435583114624024, 0.01851587104797363, 0.018406112670898436, 0.0183985595703125, 0.01884774398803711, 0.01844291114807129, 0.018275583267211914, 0.018119232177734375, 0.018002111434936522, 0.01790332794189453, 0.017885471343994142, 0.017893375396728514, 0.01783782386779785, 0.017832191467285156, 0.017674240112304687, 0.017749343872070313, 0.017754079818725586, 0.017625791549682617, 0.017657855987548828, 0.017573312759399416, 0.017813568115234376, 0.01793280029296875, 0.017942527770996093, 0.01780531120300293, 0.01769584083557129, 0.018309696197509766, 0.017867103576660156, 0.01776639938354492, 0.017615936279296876, 0.017666080474853515, 0.017697471618652344]",tokens/s,54.9907854933015,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,962.535424,1613.692928,0.0,1218.445312,1206.173696,s,1,9.4604873046875,9.4604873046875,0.0,9.4604873046875,9.4604873046875,9.4604873046875,9.4604873046875,[9.4604873046875],,kWh,6.477771084166382e-05,7.138271905804687e-06,2.3721130088000253e-05,9.563711283546876e-05,,MB,1243.926528,1909.39136,0.0,1501.560832,1463.359488,s,10,1.9908367919921874,0.19908367919921877,0.0004472589274700427,0.1991101303100586,0.19956671447753907,0.19967988739013673,0.19977042572021486,"[0.1997930603027344, 0.19954156494140626, 0.199281982421875, 0.19933203125, 0.19893525695800782, 0.1981090850830078, 0.19911955261230468, 0.19863119506835938, 0.1989923553466797, 0.1991007080078125]",tokens/s,1285.8914453948096,kWh,6.066193267262017e-06,6.68988535059734e-07,4.033217512285902e-06,1.0768399314607655e-05,tokens/kWh,23773264.021954346,MB,1265.979392,1909.39136,0.0,1501.560832,1463.362048,s,10,22.183607177734377,2.2183607177734377,0.018181601864536465,2.2126961669921874,2.240122021484375,2.2506093505859375,2.2589992138671873,"[2.2610966796875, 2.23779150390625, 2.220457275390625, 2.221549072265625, 2.210393310546875, 2.191359619140625, 2.213748046875, 2.206274658203125, 2.209292724609375, 2.211644287109375]",tokens/s,28.399348895446057,kWh,6.437977178898731e-05,7.100976421997975e-06,3.540911959711486e-05,0.00010688986780810015,tokens/kWh,589391.6915783279,,s,630,22.17988812637329,0.03520617162916396,0.0012707433426642676,0.03513360023498535,0.035653885650634765,0.035893260955810545,0.03744401447296143,"[0.03918384170532226, 0.035512928009033204, 0.03554707336425781, 0.03542380905151367, 0.035546783447265626, 0.03576063919067383, 0.03560681533813476, 0.03563315200805664, 0.035434497833251956, 0.03811075210571289, 0.03747273635864258, 0.04433462524414063, 0.035721534729003905, 0.035815582275390626, 0.03562396621704102, 0.035605312347412106, 0.035238048553466794, 0.035318912506103514, 0.035388416290283206, 0.03526614379882813, 0.036552833557128905, 0.03556703948974609, 0.035668033599853516, 0.03566262435913086, 0.035788673400878906, 0.03542425537109375, 0.035454975128173825, 0.03546112060546875, 0.035520511627197264, 0.035581439971923826, 0.03554489517211914, 0.03629536056518555, 0.035506175994873046, 0.035560638427734374, 0.03570880126953125, 0.03562591934204101, 0.03542323303222656, 0.03598233413696289, 0.03555327987670898, 0.03550576019287109, 0.03549983978271484, 0.03569724655151367, 0.03622732925415039, 0.03588889694213867, 0.035639297485351565, 0.03578278350830078, 0.035491455078125, 0.03553955078125, 0.0365871696472168, 0.03544649505615234, 0.035448223114013674, 0.03547030258178711, 0.03580707168579102, 0.03568787384033203, 0.03570751953125, 0.03555123138427734, 0.0356495361328125, 0.03537919998168945, 0.035373313903808594, 0.03560012817382813, 0.03531980895996094, 0.035319297790527344, 0.03558147048950195, 0.035301601409912106, 0.035175968170166015, 0.03517891311645508, 0.03528851318359375, 0.0355164794921875, 0.03611315155029297, 0.035501953125, 0.03525574493408203, 0.03559084701538086, 0.03536857604980469, 0.03564787292480469, 0.035323902130126955, 0.03538438415527344, 0.03536991882324219, 0.03527065658569336, 0.03626617431640625, 0.03622857666015625, 0.035496288299560544, 0.035411201477050784, 0.03554790496826172, 0.03529846572875977, 0.035168384552001955, 0.03539017486572266, 0.0351987190246582, 0.03539788818359375, 0.03574323272705078, 0.03574256134033203, 0.035468673706054686, 0.035366622924804685, 0.03547737503051758, 0.03538771057128906, 0.03519097518920898, 0.03544902420043945, 0.035391616821289065, 0.03541987228393555, 0.03518000030517578, 0.03551916885375977, 0.035600414276123045, 0.035358688354492185, 0.035272705078125, 0.03559833526611328, 0.03524198532104492, 0.03558195114135742, 0.035274974822998045, 0.03536259078979492, 0.03594035339355469, 0.03704966354370117, 0.03577721786499023, 0.03579513549804687, 0.03561248016357422, 0.03561881637573242, 0.03547955322265625, 0.03545718383789063, 0.03567103958129883, 0.03541411209106445, 0.03523455810546875, 0.03544377517700195, 0.03585299301147461, 0.03546886444091797, 0.03526521682739258, 0.03581257629394531, 0.03571382522583008, 0.035366912841796876, 0.03527008056640625, 0.03499852752685547, 0.03547811126708984, 0.035155071258544925, 0.035154624938964846, 0.03512115097045899, 0.035362014770507814, 0.03507193756103515, 0.03494316864013672, 0.03583657455444336, 0.03536896133422852, 0.03524198532104492, 0.03575603103637695, 0.035485599517822264, 0.035178592681884766, 0.03545436859130859, 0.035250785827636716, 0.03524169540405273, 0.035428638458251956, 0.03549593734741211, 0.03606425476074219, 0.03517459106445313, 0.03517663955688476, 0.03535116958618164, 0.035356670379638674, 0.035334110260009766, 0.035079647064208984, 0.03507462310791016, 0.035244129180908204, 0.03522719955444336, 0.03542256164550781, 0.03571654510498047, 0.035592769622802736, 0.03524156951904297, 0.03521523284912109, 0.03524867248535156, 0.03494454574584961, 0.03471007919311524, 0.03501456069946289, 0.03520512008666992, 0.035776512145996094, 0.03606070327758789, 0.035051296234130856, 0.03493548965454102, 0.03495907211303711, 0.035096286773681644, 0.03504195022583008, 0.034985759735107425, 0.03498368072509766, 0.03526079940795898, 0.034977790832519534, 0.03506995010375977, 0.0350464973449707, 0.03502915191650391, 0.03516511917114258, 0.035194496154785156, 0.03510470581054687, 0.03506524658203125, 0.03517536163330078, 0.035092159271240236, 0.03509859085083008, 0.034995712280273435, 0.03512521743774414, 0.03565363311767578, 0.035598655700683594, 0.03512271881103515, 0.035134750366210936, 0.03500921630859375, 0.03563740921020508, 0.035108768463134765, 0.03505936050415039, 0.035226303100585936, 0.03502671813964844, 0.03467689514160156, 0.034584415435791015, 0.03463987350463867, 0.034588481903076174, 0.03444863891601563, 0.034586910247802735, 0.034331520080566405, 0.0342628173828125, 0.03426508712768555, 0.03476889419555664, 0.03471273422241211, 0.034663265228271484, 0.03477673721313477, 0.060598400115966795, 0.034808032989501955, 0.036792320251464845, 0.03516009521484375, 0.03510268783569336, 0.0355676155090332, 0.03496521759033203, 0.03463590240478515, 0.03459718322753906, 0.03478716659545898, 0.03467417526245117, 0.034670753479003905, 0.034540992736816406, 0.034929088592529293, 0.034982177734375, 0.034844001770019534, 0.03468313598632813, 0.034592384338378905, 0.03447907257080078, 0.0343644790649414, 0.034327487945556644, 0.03435712051391602, 0.03411491012573242, 0.0345134391784668, 0.03456963348388672, 0.034626335144042966, 0.034553920745849606, 0.03451638412475586, 0.0348037109375, 0.034878177642822264, 0.034942623138427734, 0.03517871856689453, 0.03502284622192383, 0.03499008178710938, 0.03521945571899414, 0.035020030975341794, 0.034796257019042966, 0.03488358306884766, 0.035059745788574216, 0.03509404754638672, 0.03521772766113281, 0.034969600677490234, 0.035071487426757815, 0.03514556884765625, 0.034998943328857425, 0.035059711456298825, 0.03522124862670899, 0.03511561584472656, 0.04593587112426758, 0.03533456039428711, 0.03514134216308594, 0.03528265762329102, 0.035308223724365234, 0.03539494323730469, 0.0354984016418457, 0.035337696075439455, 0.035641983032226564, 0.035743743896484374, 0.03523161697387695, 0.034971614837646485, 0.03485283279418945, 0.034910465240478514, 0.0351374397277832, 0.03497545623779297, 0.03512966537475586, 0.03492166519165039, 0.03466937637329102, 0.03513350296020508, 0.03457632064819336, 0.03456204986572266, 0.034482078552246095, 0.03463587188720703, 0.03417638397216797, 0.034205345153808596, 0.03439545440673828, 0.034655902862548826, 0.034869247436523435, 0.034991233825683594, 0.03511795043945312, 0.03485190582275391, 0.034786048889160155, 0.03512339019775391, 0.03486105728149414, 0.03473020935058594, 0.03457001495361328, 0.03483622360229492, 0.034836734771728516, 0.03455376052856445, 0.0344169921875, 0.03473417663574219, 0.03472348785400391, 0.03486105728149414, 0.03486854553222656, 0.03555327987670898, 0.03502560043334961, 0.0349224967956543, 0.03459670257568359, 0.03471785736083984, 0.0345904655456543, 0.034455806732177734, 0.03458022308349609, 0.034441471099853516, 0.034476032257080076, 0.03487539291381836, 0.03478838348388672, 0.034398494720458986, 0.034807743072509764, 0.03470003128051758, 0.035093662261962894, 0.035133697509765624, 0.035175006866455076, 0.03511270523071289, 0.03509888076782226, 0.03506585693359375, 0.035127296447753906, 0.035272705078125, 0.035280895233154294, 0.03525820922851562, 0.03516636657714844, 0.03496755218505859, 0.034914398193359376, 0.03510262298583984, 0.035003936767578125, 0.03537148666381836, 0.03506406402587891, 0.0351328010559082, 0.03513177490234375, 0.035286720275878904, 0.036055072784423825, 0.03514748764038086, 0.03526895904541016, 0.035095806121826174, 0.03445859146118164, 0.03410124969482422, 0.03393535995483398, 0.034029121398925784, 0.03415084838867188, 0.03486508941650391, 0.03470476913452149, 0.03446387100219726, 0.034751041412353516, 0.03462758255004883, 0.03477532958984375, 0.03475836944580078, 0.03460435104370117, 0.034766559600830076, 0.034794464111328124, 0.0347770881652832, 0.03440438461303711, 0.03437360000610352, 0.03420751953125, 0.03425302505493164, 0.03429580688476563, 0.03425894546508789, 0.03403366470336914, 0.03408892822265625, 0.03402345657348633, 0.034253822326660154, 0.034966529846191405, 0.03607904052734375, 0.03499679946899414, 0.03463577651977539, 0.034662689208984375, 0.03461705780029297, 0.03430627059936524, 0.03418854522705078, 0.03506572723388672, 0.036149120330810545, 0.036761791229248046, 0.03496633529663086, 0.03476502227783203, 0.03528271865844727, 0.03576764678955078, 0.03531171035766602, 0.03515564727783203, 0.03565283203125, 0.034926239013671874, 0.034955265045166016, 0.03517657470703125, 0.03505855941772461, 0.035326976776123044, 0.03518054580688477, 0.03541756820678711, 0.035451423645019534, 0.035347808837890626, 0.03516227340698242, 0.035278526306152344, 0.03513427352905273, 0.03509619140625, 0.036006271362304686, 0.03529276657104492, 0.03528096008300781, 0.03539542388916016, 0.035377662658691404, 0.03537849426269531, 0.03540176010131836, 0.035485374450683595, 0.03537609481811523, 0.03539494323730469, 0.03532863998413086, 0.03539276885986328, 0.03523660659790039, 0.03527884674072266, 0.03521945571899414, 0.03523279953002929, 0.03507299041748047, 0.034409889221191405, 0.03423088073730469, 0.03509622573852539, 0.03493724822998047, 0.03506988906860352, 0.03589683151245117, 0.03478204727172852, 0.034673694610595704, 0.03468761444091797, 0.034654335021972654, 0.034697086334228515, 0.035065536499023435, 0.03443983840942383, 0.034756256103515626, 0.03490611267089844, 0.03447974395751953, 0.03444083023071289, 0.03477171325683594, 0.03474227142333984, 0.034799713134765625, 0.03493212890625, 0.03486518478393555, 0.03463004684448242, 0.034773567199707034, 0.034524223327636716, 0.034616256713867186, 0.03479347229003906, 0.034758880615234376, 0.034747585296630856, 0.03463433456420899, 0.034269054412841796, 0.03399488067626953, 0.03420774459838867, 0.03405926513671875, 0.03489820861816406, 0.035164894104003905, 0.035095584869384765, 0.035029087066650394, 0.03501055908203125, 0.035340320587158205, 0.03490902328491211, 0.03503104019165039, 0.03527494430541992, 0.035202880859375, 0.03531161499023437, 0.03506108856201172, 0.03508083343505859, 0.035028350830078124, 0.034923168182373045, 0.03485615921020508, 0.035202880859375, 0.03527129745483398, 0.03522563171386719, 0.03528121566772461, 0.03525603103637695, 0.035162399291992184, 0.03537004852294922, 0.0352751693725586, 0.035213504791259766, 0.03737369537353516, 0.035186431884765626, 0.03521760177612305, 0.03550233459472656, 0.03550479888916016, 0.03527791976928711, 0.035146400451660155, 0.03538655853271484, 0.03516704177856445, 0.03516166305541992, 0.035146175384521486, 0.03539174270629883, 0.03536870574951172, 0.035448833465576174, 0.035299327850341795, 0.03491337585449219, 0.03480400085449219, 0.03452156829833984, 0.034523296356201175, 0.034680831909179685, 0.03452928161621094, 0.03448361587524414, 0.03441020965576172, 0.034753406524658206, 0.03491430282592774, 0.034929729461669924, 0.035017120361328126, 0.035776512145996094, 0.03480985641479492, 0.03462105560302734, 0.03455539321899414, 0.03436019134521484, 0.03441254425048828, 0.03456515121459961, 0.03483907318115234, 0.035146366119384764, 0.035123008728027344, 0.03516944122314453, 0.034976608276367185, 0.03505152130126953, 0.03469635009765625, 0.03453763198852539, 0.03428752136230469, 0.03432912063598633, 0.0343732795715332, 0.034548576354980466, 0.03457408142089844, 0.037708927154541015, 0.03682751846313476, 0.035051742553710935, 0.03571852874755859, 0.03504848098754883, 0.03453939056396484, 0.03479046249389649, 0.034760990142822266, 0.03565615844726563, 0.0349285774230957, 0.03517452621459961, 0.03476259231567383, 0.0348059196472168, 0.03476287841796875, 0.034950271606445316, 0.03492534255981445, 0.03514303970336914, 0.034953025817871096, 0.03516953659057617, 0.03498998260498047, 0.03521712112426758, 0.035057472229003905, 0.03545427322387695, 0.035087265014648435, 0.03520483016967774, 0.03515955352783203, 0.03519158554077149, 0.03520415878295898, 0.035232223510742185, 0.03513167953491211, 0.035421630859375, 0.0351833610534668, 0.03523379135131836, 0.03519513702392578, 0.03524787139892578, 0.035116737365722656, 0.035283424377441405, 0.035469215393066404, 0.035289024353027346, 0.03516416168212891, 0.03518463897705078, 0.03483238220214844, 0.0349698257446289, 0.03507791900634766, 0.03493500900268555, 0.03491993713378906, 0.03471206283569336, 0.034531326293945314, 0.034358558654785154, 0.03451363372802734, 0.03439820861816406, 0.0346577262878418, 0.03477107238769531, 0.03590524673461914, 0.035267295837402346, 0.035835105895996096, 0.03495401763916016, 0.03464992141723633, 0.03465628814697266, 0.03493030548095703, 0.03496768188476562, 0.03466857528686523, 0.03485148620605469, 0.03511167907714844, 0.03524512100219727, 0.03495052719116211, 0.03502364730834961, 0.0353889274597168, 0.03613756942749023, 0.03517433547973633, 0.035022335052490236, 0.03487091064453125, 0.03471830368041992, 0.03496550369262695, 0.03498950576782227, 0.0347606086730957, 0.03466239929199219, 0.03478623962402344, 0.03478396987915039, 0.03657155227661133, 0.035160030364990234, 0.035197566986083985, 0.035160064697265625, 0.03500783920288086, 0.034925086975097656, 0.03505062484741211, 0.03504844665527344, 0.034896926879882814, 0.03511395263671875, 0.03505503845214844, 0.034826465606689457, 0.035127647399902345, 0.035125247955322264, 0.03541196823120117, 0.03553887939453125, 0.035397441864013675, 0.0354447021484375, 0.03546345520019531, 0.03610736083984375, 0.0353084487915039, 0.035248031616210936, 0.03523183822631836, 0.03541775894165039, 0.0354185905456543, 0.03542617416381836, 0.03540582275390625]",tokens/s,28.404110805720883,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1843.068928,2899.247104,0.0,2503.999488,2349.010944,s,1,10.456041015625,10.456041015625,0.0,10.456041015625,10.456041015625,10.456041015625,10.456041015625,[10.456041015625],,kWh,8.935959978747783e-05,9.849528328410333e-06,3.258947051595906e-05,0.00013179859863184722,,MB,1900.003328,3310.288896,0.0,2902.458368,2642.29888,s,10,2.0673072662353515,0.2067307266235352,0.0008982250054187398,0.20664173126220703,0.20775204620361326,0.20799231033325194,0.20818452163696288,"[0.20692445373535157, 0.20612460327148438, 0.2050182342529297, 0.20689356994628907, 0.2063273620605469, 0.206389892578125, 0.20607283020019532, 0.20762509155273437, 0.20769865417480468, 0.20823257446289062]",tokens/s,1238.3258366144385,kWh,6.304979010637554e-06,6.953266835215809e-07,4.206173577701955e-06,1.1206479271861089e-05,tokens/kWh,22843927.498514477,MB,1928.25344,3310.288896,0.0,2902.458368,2642.30144,s,10,27.798505859375002,2.7798505859375,0.005764667646533437,2.7790865478515627,2.78595625,2.7890772460937496,2.7915740429687497,"[2.7921982421875, 2.77327392578125, 2.784954345703125, 2.77960205078125, 2.774796875, 2.77301171875, 2.778571044921875, 2.779670654296875, 2.777164306640625, 2.7852626953125]",tokens/s,22.663088555442396,kWh,8.138066803019641e-05,8.976314009735155e-06,4.321750325009662e-05,0.0001335744852900282,tokens/kWh,471646.9605943761,,s,630,27.79591621780394,0.04412050193302215,0.000516710041676491,0.0439946231842041,0.04450067062377929,0.04506403846740722,0.04631580402374268,"[0.044828033447265624, 0.04420832061767578, 0.0445997428894043, 0.044377952575683596, 0.0446869125366211, 0.044015262603759764, 0.043929695129394535, 0.04409417724609375, 0.04395596694946289, 0.043976959228515626, 0.04378432083129883, 0.04444979095458984, 0.043996768951416014, 0.043932064056396485, 0.04451327896118164, 0.04449484634399414, 0.04423788833618164, 0.044372928619384765, 0.04410761642456055, 0.04410383987426758, 0.044486560821533204, 0.04422364807128906, 0.044208385467529296, 0.04423126220703125, 0.044324222564697265, 0.044283649444580075, 0.0444958381652832, 0.044012542724609374, 0.04399411010742187, 0.044224510192871096, 0.04400921630859375, 0.04558643341064453, 0.04415536117553711, 0.04415875244140625, 0.04443340682983398, 0.04434124755859375, 0.04403200149536133, 0.044146270751953126, 0.044157344818115236, 0.04425932693481445, 0.043919361114501954, 0.04392550277709961, 0.04409366226196289, 0.04408707046508789, 0.043905025482177736, 0.04395827102661133, 0.04412416076660156, 0.0481629753112793, 0.044257118225097654, 0.04415692901611328, 0.044281856536865234, 0.04526899337768555, 0.04436550521850586, 0.04407843017578125, 0.044430305480957034, 0.0442347526550293, 0.044339199066162106, 0.0441366081237793, 0.044029792785644534, 0.04398636627197266, 0.044228225708007815, 0.043971359252929686, 0.04554908752441406, 0.044955646514892575, 0.04426156616210938, 0.04414236831665039, 0.044270782470703124, 0.043827713012695314, 0.044034400939941404, 0.043954177856445314, 0.04416259384155274, 0.04402214431762695, 0.043888511657714846, 0.04381497573852539, 0.044312736511230466, 0.0439417610168457, 0.04402969741821289, 0.04429647827148438, 0.04401926422119141, 0.043812641143798826, 0.04373977661132813, 0.04399116897583008, 0.043894302368164065, 0.04430281448364258, 0.04376332855224609, 0.04365555191040039, 0.043898017883300784, 0.04376611328125, 0.04399980926513672, 0.04425459289550781, 0.0440038070678711, 0.04427916717529297, 0.04411164855957031, 0.04416198348999024, 0.04397875213623047, 0.043821407318115235, 0.04395894241333008, 0.04409590530395508, 0.044067424774169923, 0.04398662567138672, 0.044017822265625, 0.04401168060302734, 0.043937793731689455, 0.0443675537109375, 0.04392086410522461, 0.043737953186035156, 0.04469952011108398, 0.043913345336914066, 0.04382428741455078, 0.04384444808959961, 0.04402928161621094, 0.04386207962036133, 0.04424499130249023, 0.04384771347045899, 0.04393548965454101, 0.04371696090698242, 0.04393417739868164, 0.04364492797851562, 0.043796607971191406, 0.04376972961425781, 0.04496822357177734, 0.04392726516723633, 0.04398479843139649, 0.04372079849243164, 0.04403519821166992, 0.043852161407470704, 0.045122974395751955, 0.044552799224853515, 0.04426342391967773, 0.04404838562011719, 0.044181503295898435, 0.043640830993652346, 0.04356262588500977, 0.04372876739501953, 0.04357980728149414, 0.043934814453125, 0.04440524673461914, 0.04399267196655274, 0.0444169921875, 0.04378675079345703, 0.043950111389160156, 0.043919231414794924, 0.04418956756591797, 0.044698272705078125, 0.04395622253417969, 0.04404800033569336, 0.04388288116455078, 0.04561103820800781, 0.043870174407958984, 0.043911167144775394, 0.04375305557250977, 0.04382147216796875, 0.043821056365966796, 0.04370841598510742, 0.043753471374511715, 0.04670198440551758, 0.043960929870605465, 0.043886592864990234, 0.0438579216003418, 0.04393164825439453, 0.044308353424072265, 0.044179073333740236, 0.04399564743041992, 0.04398899078369141, 0.04451737594604492, 0.0440684814453125, 0.04390131378173828, 0.044083198547363284, 0.04394803237915039, 0.046118911743164064, 0.047486942291259764, 0.04407455825805664, 0.044110305786132814, 0.04392745590209961, 0.0438172492980957, 0.044353343963623046, 0.04415283203125, 0.04407910537719727, 0.04450060653686523, 0.044026241302490235, 0.04400566482543945, 0.04385516738891602, 0.04410723114013672, 0.044429824829101565, 0.044052928924560544, 0.04429414367675781, 0.04393686294555664, 0.043982975006103514, 0.043938591003417966, 0.045195167541503906, 0.044284000396728515, 0.04431702423095703, 0.044159809112548826, 0.04410796737670898, 0.04448332977294922, 0.04419164657592774, 0.04393155288696289, 0.04369740676879883, 0.043950592041015625, 0.043657569885253905, 0.04382720184326172, 0.04409513473510742, 0.04378249740600586, 0.04434534454345703, 0.04413849639892578, 0.04377804946899414, 0.04417740631103516, 0.04411404800415039, 0.0444087028503418, 0.044418785095214845, 0.044292320251464845, 0.04381907272338867, 0.043974655151367184, 0.04367577743530274, 0.04383932876586914, 0.04365929412841797, 0.04408438491821289, 0.04374819183349609, 0.043859519958496095, 0.043951553344726564, 0.044153057098388675, 0.04420483016967774, 0.04428799819946289, 0.044227584838867184, 0.04481536102294922, 0.044111488342285156, 0.044181537628173825, 0.04387363052368164, 0.04414361572265625, 0.04386611175537109, 0.043993087768554685, 0.0438249282836914, 0.0439769287109375, 0.04371212768554687, 0.043964126586914065, 0.04396908950805664, 0.044042335510253904, 0.043845630645751955, 0.044069183349609374, 0.04405420684814453, 0.044294113159179686, 0.0438109130859375, 0.0441855354309082, 0.04587724685668945, 0.045590526580810545, 0.044041889190673825, 0.04480422210693359, 0.04386991882324219, 0.04420454406738281, 0.04387014389038086, 0.043775840759277346, 0.04374755096435547, 0.044886688232421874, 0.04413788986206055, 0.044485214233398435, 0.044265472412109375, 0.04807884979248047, 0.04404207992553711, 0.04431683349609375, 0.04420556640625, 0.04394966506958008, 0.044167743682861325, 0.04405692672729492, 0.04401561737060547, 0.044191871643066406, 0.04393535995483398, 0.04385817718505859, 0.043845630645751955, 0.043786239624023435, 0.04391686248779297, 0.04375494384765625, 0.044163265228271485, 0.04398899078369141, 0.043780929565429685, 0.044015201568603515, 0.044043807983398436, 0.04392617416381836, 0.04426953506469727, 0.04380697631835938, 0.043902366638183594, 0.04367136001586914, 0.04392806243896484, 0.04362473678588867, 0.04381292724609375, 0.04376383972167969, 0.04440969467163086, 0.043817951202392576, 0.044490463256835935, 0.04398688125610352, 0.04420016098022461, 0.0440874252319336, 0.04388249588012695, 0.044043807983398436, 0.04392995071411133, 0.043548191070556644, 0.04397116851806641, 0.04378009414672852, 0.04391945648193359, 0.04362128067016602, 0.043784832000732424, 0.043778430938720705, 0.044025856018066405, 0.043870208740234375, 0.043601665496826175, 0.04389503860473633, 0.04379238510131836, 0.043796478271484376, 0.04399718475341797, 0.04372172927856445, 0.04452233505249024, 0.04402191925048828, 0.043783679962158206, 0.04385411071777344, 0.0440219841003418, 0.043802623748779294, 0.04484713745117187, 0.04450649642944336, 0.04398076629638672, 0.04394784164428711, 0.04374348831176758, 0.043911201477050785, 0.04366739273071289, 0.043875038146972654, 0.04457231903076172, 0.044447967529296875, 0.04414214324951172, 0.04390956878662109, 0.04407814407348633, 0.04392832183837891, 0.04367536163330078, 0.043905120849609375, 0.04362688064575195, 0.043541919708251955, 0.04342572784423828, 0.044137119293212894, 0.043375808715820315, 0.04391814422607422, 0.043730945587158204, 0.04399513626098633, 0.04386108779907227, 0.04388751983642578, 0.043753025054931644, 0.045131744384765624, 0.04390959930419922, 0.04385756683349609, 0.043811168670654294, 0.04385507202148437, 0.043780895233154295, 0.04383334350585937, 0.04377731323242187, 0.04401635360717773, 0.044467201232910154, 0.04394313430786133, 0.043797279357910154, 0.04382371139526367, 0.04382352066040039, 0.043659233093261716, 0.04363267135620117, 0.0437657585144043, 0.04366124725341797, 0.04541622543334961, 0.04379644775390625, 0.04390943908691406, 0.043862014770507815, 0.04404633712768555, 0.044023391723632815, 0.04440031814575195, 0.045103839874267575, 0.04544924926757812, 0.043764991760253905, 0.043936481475830076, 0.043757568359375, 0.04393772888183594, 0.04437139129638672, 0.04405632019042969, 0.04390591812133789, 0.04399871826171875, 0.04407551956176758, 0.04632620620727539, 0.04447641754150391, 0.04417536163330078, 0.04393497467041016, 0.04398771286010742, 0.043905025482177736, 0.04381846237182617, 0.0438625602722168, 0.044021343231201174, 0.04410793685913086, 0.04399539184570313, 0.04390102386474609, 0.04388035202026367, 0.04470579147338867, 0.043804672241210936, 0.044010784149169924, 0.043880542755126956, 0.04412416076660156, 0.04378416061401367, 0.04379084777832031, 0.04374748611450195, 0.04385123062133789, 0.04382742309570312, 0.04382751846313476, 0.043648193359375, 0.043805503845214845, 0.04383129501342774, 0.04380672073364258, 0.04379852676391602, 0.044010688781738284, 0.04390380859375, 0.044611583709716796, 0.04365107345581055, 0.04365311813354492, 0.04365673446655274, 0.04409801483154297, 0.04450124740600586, 0.044077056884765625, 0.04401273727416992, 0.04460355377197266, 0.04715708923339844, 0.04436640167236328, 0.04416307067871094, 0.04399513626098633, 0.04401776123046875, 0.044574623107910154, 0.04399718475341797, 0.04401372909545898, 0.04393967819213867, 0.0440709114074707, 0.04388249588012695, 0.04396182250976562, 0.04382486343383789, 0.043915233612060546, 0.043878528594970705, 0.043909854888916015, 0.043862014770507815, 0.04404339218139648, 0.044311424255371094, 0.044519233703613284, 0.04400505447387695, 0.04421683120727539, 0.04426137542724609, 0.04469887924194336, 0.044218463897705076, 0.043981121063232424, 0.04406070327758789, 0.04400979232788086, 0.04415488052368164, 0.044070049285888674, 0.04378915023803711, 0.04397590255737305, 0.04425315093994141, 0.044663616180419925, 0.04657171249389649, 0.04411782455444336, 0.04419184112548828, 0.04500060653686523, 0.0439496955871582, 0.04398102569580078, 0.043745662689208986, 0.04382287979125977, 0.04386975860595703, 0.04403350448608399, 0.045386112213134766, 0.04393331146240234, 0.04387052917480469, 0.04376630401611328, 0.04386624145507813, 0.04381081771850586, 0.044298110961914064, 0.04571968078613281, 0.04579244613647461, 0.043977344512939456, 0.04386732864379883, 0.044080127716064454, 0.044439552307128906, 0.04396783828735352, 0.043698753356933594, 0.04418569564819336, 0.043931838989257815, 0.04391916656494141, 0.04377395248413086, 0.04368588638305664, 0.043802623748779294, 0.04370943832397461, 0.04363161468505859, 0.04429619216918945, 0.04385516738891602, 0.04411257553100586, 0.04382534408569336, 0.044000129699707034, 0.04402272033691406, 0.044072769165039063, 0.04426716613769531, 0.043772735595703126, 0.04395798492431641, 0.04357734298706055, 0.04368809509277344, 0.043796607971191406, 0.04376287841796875, 0.04390044784545898, 0.043977920532226565, 0.044154464721679686, 0.04414486312866211, 0.04395609664916992, 0.044929088592529295, 0.044113246917724606, 0.044243167877197266, 0.0439156494140625, 0.044133792877197264, 0.04394659042358398, 0.04427571105957031, 0.04396156692504883, 0.04397545623779297, 0.04391424179077148, 0.044055553436279295, 0.043902976989746094, 0.04397369766235352, 0.043928512573242186, 0.04440883255004883, 0.04392784118652344, 0.04410543823242188, 0.04393318557739258, 0.0438823356628418, 0.04384739303588867, 0.04437702560424805, 0.04402294540405274, 0.04371462249755859, 0.04361065673828125, 0.043587841033935544, 0.0437657585144043, 0.043763233184814454, 0.04374166488647461, 0.04381491088867188, 0.043872257232666016, 0.04376166534423828, 0.04384470367431641, 0.04461846542358398, 0.04397439956665039, 0.04394044876098633, 0.04411580657958984, 0.0460167350769043, 0.044684864044189455, 0.04405295944213867, 0.04448748779296875, 0.04389177703857422, 0.04390694427490234, 0.04375660705566406, 0.043920318603515626, 0.043914848327636716, 0.044120128631591794, 0.04518265533447265, 0.045015392303466795, 0.044046657562255856, 0.04400678253173828, 0.04408793640136719, 0.04443312072753906, 0.04408044815063476, 0.04367459106445312, 0.04395161437988281, 0.043926017761230465, 0.04394803237915039, 0.044071937561035154, 0.04363161468505859, 0.04433852767944336, 0.04401795196533203, 0.04397673416137695, 0.04386003112792969, 0.04535500717163086, 0.04629033660888672, 0.044274272918701174, 0.043963520050048825, 0.0440840950012207, 0.044074878692626954, 0.0440239372253418, 0.04402380752563476, 0.045147327423095705, 0.04591203308105469, 0.043893600463867186, 0.04386611175537109, 0.04390707015991211, 0.04385126495361328, 0.0443922233581543, 0.04425187301635742, 0.0437592658996582, 0.04407126235961914, 0.04397875213623047, 0.04385721588134766, 0.04371510314941406, 0.04394927978515625, 0.04373600006103515, 0.04583769607543945, 0.04435212707519531, 0.0439400634765625, 0.04369935989379883, 0.04390879821777344, 0.043780448913574216, 0.04401776123046875, 0.04398745727539063, 0.04370636749267578, 0.043703392028808595, 0.04411427307128906, 0.043737567901611325, 0.04393360137939453, 0.04391110229492187, 0.04434560012817383, 0.04413433456420898, 0.04441708755493164, 0.04422860717773437, 0.04404537582397461, 0.04385475158691406, 0.04522601699829101, 0.04541644668579101, 0.04417740631103516, 0.04442521667480469, 0.04389823913574219, 0.043976478576660157, 0.04405539321899414, 0.044056129455566403, 0.04437369537353516, 0.044065536499023436, 0.044050430297851564, 0.044047679901123044, 0.043920063018798826, 0.04377743911743164, 0.04408947372436523, 0.044028385162353516, 0.04439769744873047, 0.04409433746337891, 0.044090625762939456, 0.04479244613647461]",tokens/s,22.665199990654376,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,True 4bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,897.257472,577.69984,0.0,182.452224,179.733504,s,1,7.877572265625,7.877572265625,0.0,7.877572265625,7.877572265625,7.877572265625,7.877572265625,[7.877572265625],,kWh,2.558793690833454e-05,2.8154384288212715e-06,9.265562967999963e-06,3.7668938305155774e-05,,MB,1205.055488,674.168832,0.0,266.338304,224.293376,s,10,0.26332313537597657,0.026332313537597657,0.000610555558624612,0.026153087615966797,0.0267448860168457,0.027288587379455564,0.027723548469543457,"[0.026606048583984375, 0.02783228874206543, 0.026089855194091797, 0.02617750358581543, 0.026577024459838866, 0.02662406349182129, 0.025579135894775392, 0.026088127136230467, 0.026128671646118165, 0.025620416641235353]",tokens/s,9721.895481552714,kWh,7.676794833879956e-07,8.462414545547406e-08,5.081871957008012e-07,1.3604908245442707e-06,tokens/kWh,188167384.43330067,MB,1238.601728,686.751744,0.0,278.921216,224.295936,s,10,11.490330200195315,1.1490330200195316,0.01679524657240238,1.1495841064453125,1.16569306640625,1.1701708740234376,1.1737531201171876,"[1.164697998046875, 1.1567578125, 1.174648681640625, 1.14178076171875, 1.16360595703125, 1.13744384765625, 1.12833935546875, 1.142410400390625, 1.119871826171875, 1.1607735595703126]",tokens/s,54.82871153600887,kWh,3.332297936244537e-05,3.6750928415303726e-06,1.3648851907698941e-05,5.064692411167469e-05,tokens/kWh,1243905.747584734,,s,630,11.485126411438001,0.018230359383234904,0.0005112548961969608,0.018171520233154298,0.01880431652069092,0.018962901496887206,0.019943947334289565,"[0.018439231872558595, 0.01866339111328125, 0.018562015533447267, 0.018786624908447267, 0.01859756851196289, 0.018696384429931642, 0.018680704116821288, 0.018768447875976563, 0.01888889694213867, 0.018825408935546874, 0.01873481559753418, 0.018884767532348634, 0.018837568283081054, 0.018847232818603517, 0.01878041648864746, 0.01879449653625488, 0.01886240005493164, 0.01865465545654297, 0.018614784240722656, 0.01889244842529297, 0.018651552200317383, 0.018642080307006835, 0.018993600845336914, 0.019159839630126952, 0.018760608673095702, 0.01876483154296875, 0.018767551422119142, 0.018751487731933594, 0.01871254348754883, 0.018669120788574217, 0.018997728347778322, 0.01865727996826172, 0.01872307205200195, 0.018747135162353514, 0.018733119964599608, 0.018628864288330077, 0.018670400619506835, 0.018710527420043945, 0.018918272018432616, 0.018571231842041017, 0.018412832260131837, 0.018470848083496094, 0.01838371276855469, 0.018323423385620118, 0.018042560577392577, 0.017882944107055664, 0.01795123291015625, 0.017831808090209962, 0.01785625648498535, 0.017836416244506836, 0.01785206413269043, 0.01781491279602051, 0.01791689682006836, 0.018007167816162108, 0.017929056167602538, 0.018064863204956056, 0.01795129585266113, 0.017974880218505858, 0.017905248641967773, 0.018010751724243164, 0.017913728713989257, 0.017920320510864257, 0.01792854309082031, 0.018087072372436522, 0.018312000274658204, 0.01863680076599121, 0.01884547233581543, 0.01850332832336426, 0.018425567626953125, 0.018295648574829102, 0.0181711368560791, 0.018076416015625, 0.018091552734375, 0.018323392868041993, 0.017973888397216798, 0.019459999084472657, 0.018114303588867186, 0.018006336212158202, 0.018280384063720703, 0.01801366424560547, 0.017959583282470704, 0.0182291202545166, 0.01833087921142578, 0.01811942481994629, 0.01926380729675293, 0.018413248062133788, 0.018160991668701172, 0.01817888069152832, 0.018222944259643555, 0.018433664321899416, 0.018229631423950194, 0.019092832565307617, 0.018766464233398436, 0.018614303588867186, 0.018473024368286132, 0.01861030387878418, 0.01933401679992676, 0.019438623428344726, 0.01968230438232422, 0.018762304306030275, 0.01846681594848633, 0.01818454360961914, 0.018422815322875978, 0.01789411163330078, 0.01792639923095703, 0.018190336227416993, 0.018378751754760742, 0.018219200134277344, 0.01799884796142578, 0.017906496047973633, 0.017922048568725587, 0.017897087097167967, 0.017891359329223634, 0.01800831985473633, 0.018032127380371094, 0.018299615859985352, 0.018300800323486327, 0.018249727249145507, 0.018130048751831055, 0.018365312576293945, 0.018343584060668945, 0.018393407821655272, 0.018468544006347655, 0.018223424911499024, 0.018124832153320312, 0.0180982723236084, 0.018126144409179687, 0.018141824722290038, 0.01901568031311035, 0.018462400436401367, 0.018114879608154298, 0.01816166305541992, 0.01819379234313965, 0.01808563232421875, 0.018104352951049806, 0.018232416152954102, 0.018239231109619142, 0.018585599899291993, 0.018135040283203126, 0.018251840591430663, 0.01834592056274414, 0.018346176147460938, 0.0183621768951416, 0.018786304473876952, 0.019564544677734375, 0.018759679794311524, 0.01868560028076172, 0.018627199172973632, 0.01886012840270996, 0.018640512466430663, 0.01861984062194824, 0.018639423370361327, 0.01872467231750488, 0.018883039474487304, 0.018880224227905272, 0.018796768188476563, 0.018868000030517577, 0.018724863052368163, 0.018819103240966795, 0.019064767837524414, 0.018784223556518556, 0.018669408798217775, 0.018666784286499025, 0.018643903732299804, 0.018905248641967774, 0.019111072540283203, 0.018805440902709962, 0.01882316780090332, 0.018759424209594727, 0.018876384735107422, 0.018817119598388672, 0.018800832748413085, 0.018894847869873048, 0.0187391357421875, 0.01873721694946289, 0.018755584716796874, 0.01872617530822754, 0.01858563232421875, 0.018837535858154297, 0.018987520217895508, 0.018685247421264647, 0.018786815643310546, 0.019511680603027343, 0.01848931121826172, 0.018337791442871093, 0.018595199584960937, 0.018342527389526367, 0.01827052879333496, 0.01834592056274414, 0.017897024154663085, 0.018120319366455077, 0.018475648880004882, 0.0180118408203125, 0.017993791580200195, 0.017995647430419922, 0.018128896713256838, 0.0182728328704834, 0.01823686408996582, 0.01834454345703125, 0.018230464935302733, 0.0181911678314209, 0.018081375122070312, 0.018038335800170897, 0.018010688781738282, 0.018067968368530272, 0.018048704147338866, 0.017905344009399415, 0.017852832794189453, 0.017952768325805665, 0.01782809638977051, 0.017860416412353516, 0.017888639450073244, 0.017857088088989257, 0.017838336944580077, 0.01788003158569336, 0.01787766456604004, 0.017918399810791016, 0.017970783233642578, 0.017916095733642577, 0.01792195129394531, 0.017905664443969727, 0.01795686340332031, 0.017955007553100585, 0.01814713668823242, 0.0182807674407959, 0.018402687072753908, 0.01845689582824707, 0.018367712020874023, 0.018315935134887697, 0.018139263153076172, 0.01802604866027832, 0.017954336166381837, 0.018262943267822265, 0.01809212875366211, 0.018159008026123045, 0.01830963134765625, 0.01858723258972168, 0.018257759094238282, 0.018180992126464842, 0.0182609920501709, 0.018119552612304687, 0.018253631591796875, 0.018354175567626953, 0.018612192153930663, 0.018640512466430663, 0.018073888778686525, 0.0180184326171875, 0.018083200454711915, 0.018041311264038087, 0.018000032424926756, 0.01803264045715332, 0.01838876724243164, 0.01776470375061035, 0.018001920700073244, 0.0177903995513916, 0.01799635124206543, 0.017934335708618163, 0.018020383834838866, 0.017950687408447265, 0.017817024230957032, 0.017854463577270507, 0.01785094451904297, 0.018098175048828127, 0.017834175109863282, 0.017806943893432618, 0.017901952743530274, 0.01787228775024414, 0.017864831924438475, 0.017879648208618162, 0.017778495788574218, 0.0177806396484375, 0.017905664443969727, 0.017751455307006836, 0.017988351821899413, 0.01821673583984375, 0.018307167053222655, 0.01840025520324707, 0.01846540832519531, 0.018581855773925782, 0.01848320007324219, 0.018308095932006836, 0.018222272872924803, 0.01930441665649414, 0.020285343170166014, 0.018415231704711914, 0.018433664321899416, 0.018299455642700194, 0.01858572769165039, 0.018651264190673828, 0.01947225570678711, 0.018766048431396485, 0.01884752082824707, 0.01882931137084961, 0.0188538875579834, 0.018677759170532226, 0.018625663757324218, 0.018721664428710937, 0.018924768447875977, 0.018773792266845703, 0.018637823104858398, 0.0186943359375, 0.018739007949829103, 0.01884160041809082, 0.01868185615539551, 0.019314176559448244, 0.020423168182373046, 0.018675167083740233, 0.018636863708496095, 0.01875119972229004, 0.018706880569458007, 0.01865532875061035, 0.018790624618530274, 0.01884172821044922, 0.0187674560546875, 0.018928192138671876, 0.018383007049560546, 0.01911180877685547, 0.0189833927154541, 0.018937856674194335, 0.01876291275024414, 0.018572128295898438, 0.01861222457885742, 0.018558143615722656, 0.01867804718017578, 0.018591327667236326, 0.018519296646118164, 0.018783935546875, 0.018552671432495116, 0.018382944107055665, 0.018494592666625977, 0.01835523223876953, 0.018136192321777343, 0.01818448066711426, 0.01776255989074707, 0.017764671325683594, 0.017987039566040038, 0.01781920051574707, 0.017826719284057616, 0.017722496032714842, 0.018014528274536132, 0.018021184921264647, 0.01789107131958008, 0.01817190361022949, 0.017748064041137695, 0.017639328002929687, 0.017708703994750975, 0.017740224838256834, 0.01771244812011719, 0.017648416519165037, 0.01762006378173828, 0.01767193603515625, 0.01765475273132324, 0.017624576568603514, 0.01774473571777344, 0.01795552062988281, 0.01800217628479004, 0.01906537628173828, 0.01839023971557617, 0.018117599487304688, 0.01790979194641113, 0.01779836845397949, 0.017554176330566405, 0.017498111724853514, 0.017541120529174805, 0.01750204849243164, 0.017576095581054687, 0.017473407745361328, 0.017583360671997072, 0.017730432510375975, 0.017459199905395507, 0.01755673599243164, 0.017492128372192384, 0.017465408325195313, 0.017443679809570314, 0.017476959228515623, 0.01744316864013672, 0.01854182434082031, 0.02026380729675293, 0.01785241508483887, 0.017913856506347657, 0.01781171226501465, 0.017888256072998047, 0.0178920955657959, 0.018091936111450196, 0.018077152252197266, 0.01778879928588867, 0.017787647247314454, 0.017837087631225587, 0.017953760147094728, 0.01787654495239258, 0.017920480728149415, 0.017924064636230468, 0.017985919952392578, 0.018091039657592775, 0.018255552291870116, 0.01819139289855957, 0.018390815734863283, 0.018151679992675782, 0.018032480239868164, 0.017912128448486327, 0.017702592849731445, 0.017741823196411134, 0.017956159591674806, 0.01765635108947754, 0.01760892868041992, 0.01790355110168457, 0.017641471862792968, 0.017598207473754884, 0.017674495697021484, 0.017769887924194337, 0.01776291275024414, 0.01880419158935547, 0.017959455490112304, 0.017896575927734373, 0.01782054328918457, 0.017795072555541993, 0.017899168014526366, 0.01798793601989746, 0.017823808670043944, 0.017712736129760744, 0.017670495986938477, 0.01768684768676758, 0.017811199188232422, 0.017799104690551758, 0.017862655639648437, 0.017958303451538087, 0.017893503189086914, 0.01786419105529785, 0.01778278350830078, 0.017738719940185547, 0.017628576278686522, 0.017582687377929687, 0.017566848754882812, 0.01768886375427246, 0.01773833656311035, 0.017812896728515625, 0.017921760559082033, 0.018150272369384764, 0.018296640396118165, 0.01841119956970215, 0.01855539131164551, 0.01799577522277832, 0.018405376434326173, 0.018394784927368166, 0.019083295822143555, 0.018364736557006836, 0.018322751998901366, 0.018330144882202148, 0.018512224197387694, 0.01842355155944824, 0.018923583984375, 0.01856208038330078, 0.018430944442749023, 0.018400543212890624, 0.01830790328979492, 0.01833683204650879, 0.018536895751953126, 0.01825833511352539, 0.018232608795166017, 0.01853308868408203, 0.018335039138793946, 0.01840358352661133, 0.018372671127319336, 0.01834432029724121, 0.018550048828125, 0.019481311798095702, 0.018677312850952147, 0.01840787124633789, 0.018368703842163086, 0.0183438720703125, 0.018482847213745116, 0.018645248413085937, 0.01839321517944336, 0.01832943916320801, 0.018191423416137695, 0.01813804817199707, 0.01804902458190918, 0.01799100875854492, 0.01777110481262207, 0.017710847854614256, 0.017543807983398437, 0.017491552352905275, 0.017647712707519532, 0.017475584030151366, 0.017506048202514647, 0.017506111145019532, 0.01751215934753418, 0.01749068832397461, 0.01767523193359375, 0.017627967834472656, 0.017670431137084962, 0.018237344741821288, 0.01777471923828125, 0.017827423095703124, 0.017889568328857422, 0.017712480545043947, 0.017781408309936523, 0.017743072509765624, 0.017806112289428713, 0.017590240478515626, 0.017645280838012697, 0.017813823699951173, 0.017699935913085937, 0.017898399353027342, 0.01773036766052246, 0.01773664093017578, 0.017696767807006835, 0.01762713623046875, 0.017508352279663086, 0.017661951065063478, 0.017679647445678712, 0.01758451271057129, 0.017727840423583986, 0.017946624755859376, 0.017541120529174805, 0.017841728210449218, 0.01805766487121582, 0.018256927490234377, 0.018207584381103516, 0.020897823333740233, 0.018595935821533204, 0.01809119987487793, 0.017761375427246092, 0.01776406478881836, 0.017780895233154296, 0.01760268783569336, 0.017775583267211913, 0.017532831192016603, 0.017599327087402344, 0.017719615936279298, 0.017599903106689452, 0.01758172798156738, 0.01758019256591797, 0.017506784439086914, 0.017579616546630858, 0.01758835220336914, 0.017493919372558595, 0.01833203125, 0.017549312591552735, 0.017485408782958983, 0.017561792373657226, 0.01763555145263672, 0.017530879974365234, 0.017729536056518554, 0.017982784271240233, 0.017863359451293945, 0.017735551834106446, 0.017756288528442382, 0.01785638427734375, 0.01765830421447754, 0.017761951446533204, 0.01769424057006836, 0.01776896095275879, 0.01765376091003418, 0.017602752685546875, 0.01746073532104492, 0.017531423568725585, 0.017544992446899416, 0.017481727600097655, 0.01754681587219238, 0.017637760162353515, 0.01797283172607422, 0.01764352035522461, 0.017650400161743164, 0.01763699150085449, 0.017601760864257812, 0.017681312561035157, 0.01739967918395996, 0.01804966354370117, 0.01767203140258789, 0.01787446403503418, 0.017766271591186523, 0.01776291275024414, 0.017822719573974608, 0.01904332733154297, 0.018176416397094726, 0.018220640182495116, 0.017909759521484374, 0.01788703918457031, 0.017811647415161135, 0.01784012794494629, 0.01788083267211914, 0.017852672576904295, 0.018157215118408204, 0.017708511352539064, 0.017875839233398437, 0.017954879760742188, 0.018229183197021485, 0.018266111373901366, 0.018427072525024416, 0.018518783569335937, 0.018513599395751954, 0.018534175872802733, 0.018430496215820314, 0.01847324752807617, 0.018398271560668946, 0.01836310386657715, 0.018354175567626953, 0.018531391143798828, 0.018539072036743164, 0.020104703903198243, 0.02005081558227539, 0.0187108154296875, 0.018590944290161133, 0.018887168884277345, 0.018315263748168945, 0.01843404769897461, 0.018321279525756837, 0.018527551651000975, 0.01856384086608887, 0.01854080009460449, 0.018419519424438476, 0.01838307189941406, 0.01862224006652832, 0.018694143295288086, 0.01859584045410156, 0.018616031646728516, 0.018655584335327147, 0.018589599609375, 0.01870649528503418, 0.018569536209106445, 0.018463903427124024, 0.018489824295043946, 0.01838697624206543, 0.018337791442871093, 0.018305088043212892, 0.018067136764526367, 0.018082048416137694, 0.021626752853393556, 0.019337343215942382]",tokens/s,54.85355384269742,,,True 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 60, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,14032.388096,7835.942912,0.0,7440.695296,7427.899392,s,1,31.7943828125,31.7943828125,0.0,31.7943828125,31.7943828125,31.7943828125,31.7943828125,[31.7943828125],,kWh,0.0007155273096708242,7.892078725048271e-05,0.00027156299502797965,0.0010660110919492867,,MB,1232.048128,8416.854016,0.0,8000.63488,7884.32384,s,10,1.18458154296875,0.11845815429687498,0.0005181299128065095,0.11830814361572266,0.11893774261474609,0.11936403961181641,0.11970507720947265,"[0.11794691467285157, 0.11884300994873047, 0.11811257934570313, 0.11855452728271484, 0.1179632339477539, 0.11837686157226562, 0.11820524597167968, 0.11854940795898437, 0.11979033660888672, 0.11823942565917969]",tokens/s,2161.1006985506738,kWh,3.5326098436747686e-06,3.8957991699163003e-07,2.3319262296625744e-06,6.254115990328973e-06,tokens/kWh,40933043.19840959,MB,1254.621184,8458.797056,0.0,8042.57792,7975.158272,s,10,51.00871875,5.100871875,0.011844859828030999,5.098849853515625,5.114230322265625,5.118435229492188,5.121799155273437,"[5.08038671875, 5.0956025390625, 5.11215966796875, 5.12264013671875, 5.0955703125, 5.1132958984375, 5.09888427734375, 5.0988154296875, 5.103091796875, 5.08827197265625]",tokens/s,12.350829729476395,kWh,0.00014881954907215942,1.6415289741388052e-05,8.737122886613683e-05,0.0002526060676796843,tokens/kWh,249400.1849547288,,s,630,51.005125488281166,0.08096051664806547,0.0009011453758256804,0.08084107208251953,0.08152753982543945,0.08219734191894532,0.08396592407226564,"[0.08004889678955078, 0.07973391723632813, 0.07983353424072266, 0.0806155548095703, 0.08050505828857422, 0.08078720092773438, 0.07982838439941406, 0.07984630584716797, 0.07974092864990234, 0.08012009429931641, 0.07998220825195312, 0.08041891479492187, 0.07992320251464843, 0.08207769775390625, 0.08002559661865234, 0.0801334686279297, 0.08059318542480469, 0.08030451202392579, 0.0802117462158203, 0.08243987274169921, 0.08063030242919922, 0.08403353881835937, 0.08063385772705078, 0.08059490966796876, 0.08050211334228516, 0.0803081283569336, 0.08033139038085937, 0.08036777496337891, 0.08077926635742187, 0.08039833831787109, 0.07983103942871093, 0.08004402923583985, 0.08027942657470703, 0.08069132995605469, 0.08065599822998047, 0.08053388977050781, 0.0803082275390625, 0.08032665252685547, 0.07985151672363282, 0.07983881378173828, 0.08025334167480469, 0.08040447998046875, 0.08015257263183594, 0.0828006362915039, 0.08027954864501953, 0.08037785339355469, 0.0799109115600586, 0.0802806396484375, 0.08057337951660157, 0.080442626953125, 0.08004841613769531, 0.07978160095214844, 0.08043292999267578, 0.08143711853027344, 0.08266336059570313, 0.08175062561035157, 0.08216140747070312, 0.08129881286621093, 0.08122252655029297, 0.0813465576171875, 0.08169881439208984, 0.08120223999023438, 0.08140032196044922, 0.08118975830078125, 0.08097689819335938, 0.08098303985595703, 0.08020582580566406, 0.08011366271972656, 0.07996211242675781, 0.07992249298095704, 0.08051757049560547, 0.08083884429931641, 0.080404541015625, 0.08034413146972656, 0.08112429046630859, 0.0808980484008789, 0.08125440216064453, 0.08090160369873046, 0.08059324645996094, 0.08050911712646484, 0.0808095703125, 0.08083293151855468, 0.08079277038574219, 0.08090707397460938, 0.0809144287109375, 0.08101888275146485, 0.08074649810791015, 0.08096514892578124, 0.08108489227294922, 0.08265510559082032, 0.08139170837402344, 0.08125440216064453, 0.0809552993774414, 0.08106610870361328, 0.08107212829589844, 0.08144281768798828, 0.08083660888671874, 0.08111270141601562, 0.08083084869384766, 0.08103321838378906, 0.08073011016845703, 0.08105165100097657, 0.08126054382324219, 0.08063085174560547, 0.08065449523925781, 0.08044035339355468, 0.08105753326416015, 0.0806903076171875, 0.08100953674316407, 0.08043692779541016, 0.08067833709716797, 0.079967041015625, 0.08032249450683594, 0.08075071716308593, 0.08089177703857423, 0.08078054046630859, 0.0804668197631836, 0.08044915008544921, 0.08434633636474609, 0.08055490875244141, 0.08090764617919922, 0.08079341125488282, 0.08096441650390625, 0.08107612609863281, 0.08100179290771484, 0.08087334442138672, 0.08291065979003906, 0.08087238311767578, 0.08107955169677734, 0.08168319702148437, 0.08113152313232422, 0.0807383041381836, 0.08071766662597656, 0.08222281646728516, 0.08216620635986328, 0.08061542510986328, 0.08112127685546874, 0.08092025756835937, 0.08056588745117188, 0.08085574340820313, 0.08094310760498047, 0.08212480163574219, 0.08206130981445313, 0.08191382598876953, 0.08083245086669921, 0.08097187042236328, 0.0809814682006836, 0.08033062744140625, 0.08018806457519531, 0.08141209411621093, 0.08112332916259765, 0.08071782684326172, 0.08326143646240235, 0.08258560180664062, 0.08129945373535157, 0.08103862762451172, 0.08113839721679687, 0.08151859283447266, 0.08114380645751954, 0.08117453002929688, 0.08117862701416016, 0.08144838714599609, 0.08123859405517578, 0.08148172760009766, 0.0809349136352539, 0.08097567749023438, 0.08053369903564453, 0.0805516128540039, 0.08087551879882812, 0.08033721923828124, 0.08062889862060547, 0.0804497299194336, 0.08049839782714843, 0.08056845092773438, 0.08132691192626954, 0.08110079956054687, 0.08049404907226562, 0.0802043228149414, 0.08057619476318359, 0.08092626953125, 0.08078550720214844, 0.08090636444091796, 0.08090064239501953, 0.08057036590576172, 0.08291123199462891, 0.08111011505126953, 0.08103932952880859, 0.08149292755126954, 0.08139366149902344, 0.08096649932861329, 0.08095958709716797, 0.08088889312744141, 0.08139228820800781, 0.08089826965332031, 0.0810558090209961, 0.08134041595458984, 0.08101478576660157, 0.08101251220703125, 0.08053533172607422, 0.08062137603759766, 0.0915882568359375, 0.08059161376953125, 0.08042015838623047, 0.08066294097900391, 0.08052377319335938, 0.08016281890869141, 0.08044748687744141, 0.08073126220703125, 0.08040035247802735, 0.08440064239501953, 0.09083535766601562, 0.08089766693115234, 0.08106009674072266, 0.0811357421875, 0.08083455657958985, 0.08096934509277344, 0.08074483489990235, 0.08132403564453125, 0.08083622741699219, 0.08070909118652343, 0.08089488220214844, 0.08169801330566406, 0.0811743392944336, 0.08082736206054687, 0.08098925018310547, 0.08123388671875, 0.08099734497070313, 0.0809202880859375, 0.08095772552490234, 0.08094924926757813, 0.08061542510986328, 0.08086937713623046, 0.08088780975341797, 0.08078540802001953, 0.08082637023925782, 0.08070953369140625, 0.08069308471679687, 0.08096953582763672, 0.08057401275634765, 0.08450956726074219, 0.08097586822509766, 0.08085913848876954, 0.0804290542602539, 0.08049868774414062, 0.08143030548095703, 0.0804085464477539, 0.08128492736816406, 0.08056368255615234, 0.08176710510253907, 0.0807503662109375, 0.08054220581054687, 0.08071340942382813, 0.08091126251220702, 0.08127251434326171, 0.08380038452148438, 0.08160665893554687, 0.08105779266357421, 0.08145101165771484, 0.0815308837890625, 0.0810618896484375, 0.0811473617553711, 0.08192607879638672, 0.08238345336914063, 0.08071903991699218, 0.08081081390380859, 0.08052249908447266, 0.0805445098876953, 0.08058060455322266, 0.08064595031738281, 0.08101074981689453, 0.08066780853271484, 0.08064665222167969, 0.08113404846191406, 0.08073654174804687, 0.08034480285644531, 0.08060022735595704, 0.08074050903320312, 0.08024281311035156, 0.08073990631103516, 0.08038706970214844, 0.08062566375732422, 0.08103116607666015, 0.08034265899658204, 0.08129074859619141, 0.08023948669433593, 0.0802529296875, 0.08023859405517578, 0.08051039886474609, 0.08079212951660156, 0.08089190673828126, 0.08092784118652344, 0.08074742126464844, 0.08110284423828125, 0.08100454711914062, 0.08120320129394532, 0.08103282928466797, 0.08135664367675781, 0.08090678405761718, 0.0802911376953125, 0.08042697906494141, 0.08101961517333985, 0.08063999938964844, 0.08032978820800782, 0.0802948455810547, 0.08022179412841797, 0.0836710433959961, 0.0809005126953125, 0.0814571533203125, 0.08068096160888671, 0.08060313415527344, 0.08034674835205079, 0.07997382354736328, 0.08023545837402343, 0.08017715454101562, 0.08024172973632812, 0.08123939514160156, 0.08133904266357422, 0.08138098907470703, 0.08116591644287109, 0.08124425506591797, 0.08126329803466797, 0.08127078247070313, 0.08150947570800782, 0.08163581085205078, 0.08141458892822266, 0.08143257904052735, 0.0809512939453125, 0.08140799713134765, 0.08148915100097656, 0.08261068725585938, 0.08113123321533203, 0.08202909088134766, 0.08114304351806641, 0.08084146881103516, 0.08121139526367188, 0.08123299407958984, 0.0810912322998047, 0.08125443267822266, 0.08090032196044922, 0.08061542510986328, 0.0805580825805664, 0.08080793762207031, 0.0808427505493164, 0.0807995834350586, 0.08105299377441406, 0.08079216003417969, 0.08053289794921875, 0.08067120361328126, 0.08055846405029297, 0.08075263977050781, 0.08085094451904297, 0.08082841491699219, 0.0811878433227539, 0.08141497802734375, 0.08142457580566406, 0.08128717041015625, 0.08237789154052734, 0.08122454071044923, 0.08117862701416016, 0.08277094268798828, 0.08162723541259766, 0.08117670440673828, 0.08117081451416015, 0.08144528198242187, 0.08143462371826173, 0.08103846740722656, 0.0808600311279297, 0.08057357025146485, 0.08130239868164063, 0.0809144287109375, 0.08043094635009766, 0.08051875305175782, 0.08091907501220703, 0.08052329254150391, 0.08084067535400391, 0.08097180938720704, 0.08166925048828125, 0.08079814147949219, 0.08140204620361328, 0.08101888275146485, 0.08317478179931641, 0.08085363006591798, 0.08183586883544922, 0.08112115478515625, 0.08056861114501954, 0.08135065460205078, 0.08077721405029296, 0.08056832122802735, 0.08079564666748047, 0.08159228515625, 0.0813199691772461, 0.08103321838378906, 0.08123168182373047, 0.08070982360839844, 0.08101683044433594, 0.08100563049316406, 0.08061023712158204, 0.0810322265625, 0.08115094757080078, 0.08021593475341797, 0.08009740447998047, 0.08063795471191407, 0.08045299530029297, 0.08105391693115234, 0.08084931182861328, 0.08064720153808594, 0.08066950225830079, 0.0803430404663086, 0.08022566223144531, 0.08052531433105468, 0.08069404602050781, 0.08019967651367188, 0.08020301055908204, 0.08060176086425781, 0.08104934692382812, 0.08076528167724609, 0.08087142181396484, 0.08062566375732422, 0.08054579162597657, 0.08055712127685546, 0.08075154876708984, 0.08087075042724609, 0.08075325012207031, 0.08074237060546875, 0.08084489440917969, 0.0826235809326172, 0.08175913238525391, 0.08191795349121093, 0.08123916625976563, 0.08141820526123048, 0.08096246337890625, 0.08109260559082031, 0.08105165100097657, 0.08069734191894531, 0.08076083374023438, 0.08137865447998047, 0.08043170928955078, 0.08011504364013672, 0.08052310180664063, 0.08127334594726562, 0.08130802917480469, 0.08099430084228515, 0.08097996520996094, 0.08072767639160157, 0.08068134307861329, 0.08069660949707032, 0.08046870422363281, 0.08032169342041015, 0.08022911834716796, 0.08085453033447265, 0.08131439971923828, 0.08150761413574219, 0.0812033920288086, 0.08137372589111327, 0.08083455657958985, 0.08109017944335938, 0.08152716827392578, 0.08161228942871093, 0.08114636993408203, 0.08093875122070313, 0.08092697906494141, 0.08156732940673828, 0.08150640106201172, 0.08116665649414062, 0.08132198333740234, 0.08115724945068359, 0.08122000122070312, 0.08005248260498046, 0.0801487045288086, 0.07994080352783203, 0.08049657440185547, 0.08038079833984375, 0.08004605102539063, 0.07992323303222656, 0.08062710571289063, 0.08065673828125, 0.08066483306884766, 0.08076668548583985, 0.08093110656738281, 0.08085081481933594, 0.08140940856933594, 0.08228457641601562, 0.080650146484375, 0.08051181030273437, 0.08005836486816406, 0.07957478332519531, 0.07974118041992187, 0.08030352020263672, 0.08082006072998046, 0.08195353698730469, 0.08097100830078124, 0.08158428955078124, 0.08113008117675781, 0.08093081665039062, 0.080932861328125, 0.0810414047241211, 0.08338822174072266, 0.08137075042724609, 0.08134217834472657, 0.08150828552246094, 0.08105792236328126, 0.08148397064208984, 0.08108707427978516, 0.08048226928710937, 0.08066416168212891, 0.08056310272216796, 0.08062322998046875, 0.08073458862304687, 0.08014643096923828, 0.08061542510986328, 0.08065023803710937, 0.08083232116699218, 0.08081161499023437, 0.08032316589355469, 0.08068035125732421, 0.08053616333007813, 0.0807852783203125, 0.08068313598632812, 0.08058265686035156, 0.0810250244140625, 0.08065638732910156, 0.0803737564086914, 0.08051020812988281, 0.08195954895019532, 0.08096112060546876, 0.08099894714355468, 0.08116143798828125, 0.08078825378417968, 0.08065760040283203, 0.08083683013916015, 0.08108092498779297, 0.08096153259277344, 0.08125186920166015, 0.08105788421630859, 0.08095782470703125, 0.08235826873779296, 0.08088780975341797, 0.08094499206542968, 0.08076509094238281, 0.08044454193115234, 0.08045148468017578, 0.08071676635742188, 0.08059699249267578, 0.08047821044921875, 0.0805946273803711, 0.08053548431396484, 0.08214073944091797, 0.08063632202148438, 0.08095763397216797, 0.08097200012207031, 0.08092467498779297, 0.08097792053222656, 0.08060313415527344, 0.08065229034423828, 0.08093695831298828, 0.08077311706542968, 0.08065424346923829, 0.08088127899169922, 0.08126306915283203, 0.08127078247070313, 0.08161603546142578, 0.08125116729736329, 0.08143993377685547, 0.08660809326171875, 0.08251433563232421, 0.0809959716796875, 0.08138604736328126, 0.08077455902099609, 0.08055414581298828, 0.08166143798828125, 0.081355712890625, 0.08076070404052735, 0.08063807678222656, 0.08092655944824219, 0.08052751922607422, 0.08066028594970703, 0.0798082275390625, 0.08052374267578125, 0.08027545928955078, 0.08044892883300782, 0.08014851379394532, 0.08025145721435546, 0.08020582580566406, 0.080648193359375, 0.08056832122802735, 0.08072176361083984, 0.08075443267822266, 0.0803117446899414, 0.08000800323486328, 0.07980867004394532, 0.0807442855834961, 0.0808951644897461, 0.08038658905029297, 0.08074285125732422, 0.08074960327148438, 0.08101577758789062, 0.08071887969970704, 0.08105878448486328, 0.08091648101806641, 0.08107606506347656, 0.08022370910644532, 0.08024467468261719, 0.08276863861083984, 0.08038768005371094, 0.07992361450195312, 0.07995954895019532, 0.08014899444580079, 0.08000921630859376, 0.08063180541992188, 0.08131734466552734, 0.08291712188720703, 0.08053225708007812, 0.08039628601074218, 0.0802930908203125, 0.08041142272949219, 0.08047615814208985, 0.0805580825805664, 0.0825384979248047, 0.08075414276123047, 0.08031215667724609, 0.08017375946044922, 0.08142642974853516, 0.08091033935546875, 0.08060022735595704, 0.08081455993652344, 0.08329663848876953, 0.08177436828613281, 0.08138755035400391, 0.0812171859741211, 0.08086102294921875]",tokens/s,12.351699833475491,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,2003.881984,1252.982784,0.0,857.735168,829.14304,s,1,9.8537939453125,9.8537939453125,0.0,9.8537939453125,9.8537939453125,9.8537939453125,9.8537939453125,[9.8537939453125],,kWh,7.172844104999664e-05,7.904875418538698e-06,2.6393354448012163e-05,0.0001060266709165475,,MB,2047.086592,1542.38976,0.0,1126.170624,1096.740864,s,10,0.9329628829956054,0.09329628829956055,0.000590798934897056,0.093272705078125,0.09374768829345703,0.09418995666503907,0.09454377136230468,"[0.09364940643310547, 0.09361481475830079, 0.0946322250366211, 0.09248512268066406, 0.09264518737792969, 0.09275679779052734, 0.093359619140625, 0.09352851104736327, 0.093185791015625, 0.09310540771484375]",tokens/s,2743.946245514312,kWh,2.7397998220013332e-06,3.0215033128273586e-07,1.2486588390092126e-06,4.290608992293282e-06,tokens/kWh,59665189.827323526,MB,2051.227648,1565.458432,0.0,1149.239296,1096.743424,s,10,56.882767089843746,5.688276708984374,0.012514615985806062,5.689633056640625,5.6981533203125005,5.704061767578125,5.708788525390625,"[5.69684033203125, 5.65869384765625, 5.6828154296875, 5.6897890625, 5.68760546875, 5.692412109375, 5.6944736328125, 5.70997021484375, 5.68068994140625, 5.68947705078125]",tokens/s,11.075410572149973,kWh,0.00016401599858299512,1.809152795798569e-05,6.381000431898992e-05,0.00024591753085997074,tokens/kWh,256183.44401755228,,s,630,56.88017211914056,0.09028598749069941,0.0009492433131047906,0.09004508590698243,0.09131274108886718,0.09205830993652343,0.09396634460449219,"[0.0898764190673828, 0.09033638763427734, 0.08989510345458984, 0.09205836486816406, 0.09064653015136719, 0.08993692779541015, 0.08951007843017578, 0.08959423828125, 0.09084470367431641, 0.09250291442871093, 0.0904044189453125, 0.08929529571533203, 0.08987001800537109, 0.0895880355834961, 0.08929293060302734, 0.08963468933105469, 0.09154354858398438, 0.09045606231689453, 0.08943202972412109, 0.09005686187744141, 0.09050460815429688, 0.09324153900146484, 0.08959820556640626, 0.09007516479492188, 0.08967577362060547, 0.08933990478515624, 0.09658086395263672, 0.089697021484375, 0.09261670684814453, 0.0900669403076172, 0.08992153930664062, 0.09029222106933593, 0.09006658935546875, 0.08983792114257813, 0.09046630096435547, 0.09008332824707031, 0.08990509033203126, 0.08978816223144531, 0.08988009643554687, 0.08987932586669922, 0.08989241790771485, 0.09026195526123047, 0.09126707458496094, 0.08991664123535156, 0.09031526184082031, 0.089923583984375, 0.09085740661621093, 0.09397283172607422, 0.0902981414794922, 0.08992610931396484, 0.08970569610595704, 0.08931382751464843, 0.09002153778076172, 0.09094560241699219, 0.0902347869873047, 0.09036796569824218, 0.09355510711669922, 0.08978636932373046, 0.0896424331665039, 0.08932157135009766, 0.08969264221191406, 0.08993084716796874, 0.0911379852294922, 0.09002751922607422, 0.08973363494873046, 0.09233408355712891, 0.09000057220458985, 0.09087484741210937, 0.08972611236572266, 0.08972563171386719, 0.08959999847412109, 0.089378173828125, 0.08978495788574219, 0.09022463989257813, 0.09036323547363281, 0.08968873596191407, 0.09052496337890625, 0.08964713287353515, 0.0895986557006836, 0.08978793334960937, 0.09003990173339843, 0.0898466567993164, 0.09025126647949219, 0.09017747497558594, 0.08949721527099609, 0.0896455078125, 0.0895052490234375, 0.08979084777832032, 0.08987868499755859, 0.09053612518310547, 0.08937398529052734, 0.08944416046142578, 0.08942179107666015, 0.0903298568725586, 0.08972083282470703, 0.08908550262451172, 0.08994473266601563, 0.08997660827636719, 0.08924905395507812, 0.08960688018798828, 0.08948086547851562, 0.08973689270019532, 0.08916969299316406, 0.08958041381835938, 0.0892286376953125, 0.08989148712158203, 0.09061357116699219, 0.08981100463867188, 0.08936255645751953, 0.08912630462646484, 0.08907123565673829, 0.08984799957275391, 0.08934255981445313, 0.08994016265869141, 0.0898713607788086, 0.08930406188964844, 0.09056249237060547, 0.0900455322265625, 0.08998397064208985, 0.09031199645996094, 0.08964959716796875, 0.08989663696289063, 0.09016102600097656, 0.08951622772216797, 0.089080322265625, 0.08951602935791016, 0.090133056640625, 0.08997885131835938, 0.08954255676269532, 0.08997869110107422, 0.09272048187255859, 0.08950876617431641, 0.09040080261230468, 0.0897927017211914, 0.09127823638916016, 0.09000847625732422, 0.08958566284179688, 0.09554313659667969, 0.09145536041259765, 0.09044611358642578, 0.08999644470214843, 0.08914620971679688, 0.08915904235839844, 0.09012697601318359, 0.08984575653076173, 0.08987648010253907, 0.09007718658447265, 0.09375949096679688, 0.09035980987548828, 0.08986156463623046, 0.0895555191040039, 0.09022259521484376, 0.08939929962158204, 0.08940748596191406, 0.09012553405761718, 0.08960079956054688, 0.08978377532958984, 0.08940720367431641, 0.09037830352783204, 0.08917596435546875, 0.08949219512939453, 0.08950489807128906, 0.08969302368164063, 0.09016950225830078, 0.08935833740234375, 0.09258393859863281, 0.09028217315673828, 0.09008927917480469, 0.08918630218505859, 0.08967782592773438, 0.08946688079833984, 0.09200230407714843, 0.09016716766357422, 0.08977545928955079, 0.08919276428222657, 0.09243484497070313, 0.0898082275390625, 0.09055715179443359, 0.08976179504394531, 0.09093881225585937, 0.08988515472412109, 0.08989295959472657, 0.0898682861328125, 0.0900341796875, 0.08952413177490234, 0.08940342712402344, 0.09079199981689454, 0.0895215072631836, 0.08985625457763671, 0.090063232421875, 0.09324403381347657, 0.0899636459350586, 0.09017024230957031, 0.08963481903076172, 0.08975888061523438, 0.09047052764892578, 0.08912969970703125, 0.08984371185302735, 0.0903024673461914, 0.08962064361572265, 0.08969932556152344, 0.0903031005859375, 0.08996681976318359, 0.08969420623779296, 0.09131212615966797, 0.09131827545166016, 0.08946073913574219, 0.08950921630859375, 0.08936720275878907, 0.09004463958740234, 0.09023792266845704, 0.08969913482666016, 0.09025116729736328, 0.0907305908203125, 0.08978358459472656, 0.0898403549194336, 0.08992768096923828, 0.08972451019287109, 0.08926659393310547, 0.08960610961914063, 0.09000348663330078, 0.08972284698486328, 0.08954268646240235, 0.090219970703125, 0.09027846527099609, 0.09060870361328124, 0.08956976318359375, 0.09004080200195312, 0.0896737289428711, 0.08970409393310547, 0.0898133773803711, 0.09012790679931641, 0.09068294525146485, 0.08978726196289062, 0.08974076843261719, 0.09161167907714844, 0.0899110107421875, 0.09308393859863281, 0.09069878387451172, 0.09389791870117188, 0.09144822692871094, 0.08991014099121093, 0.09143692779541016, 0.09217219543457031, 0.0901624984741211, 0.08999539184570313, 0.0918597412109375, 0.09082649230957031, 0.09001395416259765, 0.09048268890380859, 0.09051545715332031, 0.09003008270263672, 0.09090793609619141, 0.09075180816650391, 0.0896899185180664, 0.09012451171875, 0.08921907043457031, 0.08953855895996093, 0.08960982513427734, 0.08979293060302734, 0.09026764678955078, 0.09038953399658203, 0.09039356994628907, 0.09004259490966797, 0.0906053466796875, 0.09002361297607422, 0.0896987533569336, 0.08934182739257812, 0.0895467529296875, 0.0895831069946289, 0.089797119140625, 0.09043148803710938, 0.08957952117919922, 0.08955903625488282, 0.09452063751220703, 0.09052435302734375, 0.08985395050048828, 0.08945017242431641, 0.08917664337158203, 0.08924134063720703, 0.08982733154296875, 0.09067427062988281, 0.09205238342285156, 0.09111551666259765, 0.09404825592041016, 0.09064857482910156, 0.09162239837646484, 0.09202982330322265, 0.09147596740722656, 0.09033033752441406, 0.09022918701171875, 0.08947711944580078, 0.08986457824707031, 0.09089004516601562, 0.08971810913085937, 0.08999008178710938, 0.09071625518798829, 0.08997468566894531, 0.09159232330322266, 0.09049945831298828, 0.09089024353027343, 0.0906792984008789, 0.09010575866699219, 0.08936048126220703, 0.08925091552734375, 0.08903545379638672, 0.08962889862060547, 0.09032704162597656, 0.09057689666748046, 0.09009686279296875, 0.08917072296142578, 0.0895283203125, 0.0900505599975586, 0.08947698974609375, 0.09072447967529297, 0.09021849822998047, 0.09008454132080078, 0.08987942504882812, 0.09043913269042969, 0.09060717010498047, 0.08997727966308594, 0.09019235229492187, 0.09062339019775391, 0.08999382019042969, 0.09008946990966797, 0.09184457397460938, 0.09015708923339844, 0.08964915466308594, 0.09019197082519531, 0.09004022216796875, 0.0904949722290039, 0.09014681243896484, 0.09010176086425781, 0.09057855987548828, 0.08985011291503907, 0.09042649841308593, 0.08985906982421875, 0.09016729736328125, 0.08960614776611328, 0.09006678771972657, 0.09040406036376954, 0.09048159790039062, 0.08983757019042969, 0.08959318542480468, 0.09104239654541016, 0.0902042236328125, 0.09010131072998047, 0.09019026947021484, 0.09271826934814453, 0.09004086303710937, 0.0898782730102539, 0.09036016082763672, 0.09081196594238282, 0.09186742401123046, 0.09370454406738281, 0.09172112274169922, 0.0902081298828125, 0.089676513671875, 0.08969840240478516, 0.09027779388427734, 0.08985600280761719, 0.09035151672363281, 0.09003852844238282, 0.08980239868164062, 0.09076918029785157, 0.09013699340820312, 0.09022643280029297, 0.08975794982910157, 0.09015251159667968, 0.08978876495361328, 0.0901776351928711, 0.08966553497314453, 0.09159446716308593, 0.09019420623779296, 0.08985603332519532, 0.09103266906738282, 0.090251708984375, 0.09039231872558594, 0.09047042846679687, 0.09102745819091797, 0.09006607818603515, 0.08943907165527344, 0.09037763214111329, 0.09512201690673829, 0.08981913757324218, 0.09097830200195313, 0.09040211486816406, 0.09002374267578125, 0.08967667388916016, 0.09015090942382813, 0.08995769500732421, 0.08976659393310547, 0.09087324523925781, 0.09309040069580078, 0.09069529724121093, 0.0906731185913086, 0.09124086761474609, 0.09056460571289063, 0.08975564575195312, 0.08961135864257813, 0.09012723541259765, 0.09017142486572266, 0.0898744354248047, 0.0897551040649414, 0.08961859130859375, 0.08986662292480468, 0.09077536010742188, 0.09043721771240235, 0.09183200073242187, 0.0903927001953125, 0.09001017761230469, 0.0905401611328125, 0.09178121948242188, 0.09020537567138671, 0.08982342529296874, 0.08966390228271484, 0.09046243286132813, 0.08977817535400391, 0.0899051513671875, 0.09043526458740235, 0.08996227264404297, 0.08978476715087891, 0.09021858978271484, 0.09055340576171875, 0.09013116455078125, 0.08944457244873047, 0.08978022766113282, 0.09041423797607422, 0.09025007629394531, 0.09064035034179688, 0.09064246368408203, 0.09046221160888672, 0.09005827331542969, 0.09002015686035156, 0.08992726135253906, 0.09026345825195313, 0.09000962829589844, 0.09077347564697266, 0.09107881927490234, 0.09036598205566407, 0.0901903076171875, 0.0896426239013672, 0.08960857391357421, 0.09131008148193359, 0.0909222412109375, 0.08941379547119141, 0.08991190338134766, 0.09008537292480469, 0.09004415893554688, 0.08948172760009766, 0.08945638275146485, 0.09035775756835937, 0.09233817291259766, 0.09019497680664063, 0.08953282928466796, 0.090325439453125, 0.09077977752685547, 0.0929280014038086, 0.09008537292480469, 0.09204326629638672, 0.09092915344238281, 0.09089411163330079, 0.09079625701904297, 0.09144287872314454, 0.09248735809326172, 0.09024575805664062, 0.09078076934814452, 0.09104067230224609, 0.090363037109375, 0.08969712066650391, 0.09040422058105468, 0.09232653045654297, 0.09015296173095703, 0.09093325042724609, 0.09101312255859376, 0.09036370849609375, 0.08984390258789063, 0.09139571380615234, 0.09020403289794922, 0.09084467315673828, 0.0907315216064453, 0.09115411376953125, 0.08991705322265625, 0.08980345916748046, 0.08991334533691406, 0.09068953704833985, 0.08969625854492187, 0.08993376159667969, 0.08964019012451171, 0.08944518280029297, 0.09033318328857422, 0.09081839752197265, 0.09016284942626954, 0.09395046234130859, 0.090359619140625, 0.08968390655517579, 0.09032662200927734, 0.08984028625488281, 0.09178726196289062, 0.09092915344238281, 0.09072434997558594, 0.09052569580078125, 0.09335977935791015, 0.09136335754394531, 0.09035279846191406, 0.09016099548339844, 0.09099673461914062, 0.08969830322265625, 0.0897474594116211, 0.0895134048461914, 0.09052323150634765, 0.08982598114013672, 0.08934633636474609, 0.08980070495605469, 0.0902116470336914, 0.09026451110839843, 0.09004729461669922, 0.09303247833251953, 0.08939971160888673, 0.0893834228515625, 0.0894741439819336, 0.08978934478759766, 0.08966349029541015, 0.09041458892822266, 0.09042991638183594, 0.08964918518066406, 0.08929821014404297, 0.08975138854980469, 0.08986099243164063, 0.09048992156982422, 0.09039689636230469, 0.08993251037597656, 0.09089965057373046, 0.09018450927734376, 0.09017504119873047, 0.0906830062866211, 0.09151302337646484, 0.09012697601318359, 0.09014640045166016, 0.08977654266357422, 0.0901500473022461, 0.08945750427246094, 0.08941497802734374, 0.09056121826171876, 0.09076454162597657, 0.08965196990966796, 0.09040486145019531, 0.0900157470703125, 0.08918630218505859, 0.0907610855102539, 0.08994217681884766, 0.09079555511474609, 0.08937721252441407, 0.08926544189453126, 0.08970518493652344, 0.08991126251220703, 0.0907919692993164, 0.0895667495727539, 0.08992108917236329, 0.08959187316894532, 0.08932982635498046, 0.08973932647705078, 0.09205824279785156, 0.08962230682373047, 0.09012655639648437, 0.0903741455078125, 0.09497996520996094, 0.09177792358398437, 0.08991539001464843, 0.08998818969726563, 0.08975071716308594, 0.08999702453613281, 0.0918097915649414, 0.09102336120605468, 0.09078326416015625, 0.09097837066650391, 0.09079644775390625, 0.08959385681152343, 0.09043558502197266, 0.09034880065917969, 0.08994278717041015, 0.09041715240478515, 0.09082662200927734, 0.08996672058105469, 0.08956838226318359, 0.08937773132324218, 0.08936646270751954, 0.09068544006347656, 0.09000345611572266, 0.08993753814697265, 0.08985574340820313, 0.08979315185546875, 0.09002543640136719, 0.09040541076660157, 0.08992294311523437, 0.09066569519042969, 0.08981228637695313, 0.0906690902709961, 0.09050086212158204, 0.09079788970947265, 0.09157939147949219, 0.09171260833740234, 0.09160591888427734, 0.08969356536865235, 0.0897091827392578, 0.08940953826904297, 0.09013862609863281, 0.09101289367675781, 0.09090480041503907, 0.09043500518798828, 0.09053241729736328, 0.09335161590576171, 0.0904357452392578, 0.09004252624511719, 0.09021849822998047, 0.08968396759033204, 0.089525634765625, 0.09001538848876953, 0.08943011474609375, 0.08989170837402344, 0.09031884765625, 0.09107059478759766, 0.0900598373413086, 0.08976057434082031, 0.08980684661865235, 0.08945458984375, 0.09014201354980468, 0.08953311920166016, 0.08987673950195313, 0.09013632202148437]",tokens/s,11.075915851316491,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,12395.528192,6990.790656,0.0,6595.54304,6586.72896,s,1,28.8501015625,28.8501015625,0.0,28.8501015625,28.8501015625,28.8501015625,28.8501015625,[28.8501015625],,kWh,0.0006341500994791687,6.99444564694861e-05,0.00023871046874599997,0.0009428050246946548,,MB,1252.286464,7332.626432,0.0,6916.407296,6839.599104,s,10,1.1810994644165038,0.11810994644165038,0.000396576550289102,0.1181243667602539,0.11869516143798828,0.11872881393432616,0.11875573593139647,"[0.11758112335205079, 0.11809552001953125, 0.11868768310546875, 0.11876246643066406, 0.11821939086914063, 0.11815321350097656, 0.11805347442626953, 0.11794924926757812, 0.11741353607177735, 0.11818380737304687]",tokens/s,2167.471984473985,kWh,3.5150750766566676e-06,3.876503588161218e-07,2.334165187277139e-06,6.236890622749929e-06,tokens/kWh,41046094.197356015,MB,1274.49088,7374.569472,0.0,6958.350336,6910.272512,s,10,62.07667578125,6.207667578125,0.159087421132171,6.15906494140625,6.231176708984375,6.457334448242187,6.638260639648437,"[6.14124267578125, 6.14519091796875, 6.18091943359375, 6.1425810546875, 6.13955224609375, 6.1597587890625, 6.15880078125, 6.16580859375, 6.1593291015625, 6.6834921875]",tokens/s,10.148739314264132,kWh,0.0001798534508991761,1.9838553442701504e-05,9.415996756352254e-05,0.0002938519719054002,tokens/kWh,214393.66083369893,,s,630,62.07341981506347,0.09852923780168805,0.003793224256528723,0.09755729675292968,0.0990539306640625,0.10562251091003416,0.116593251953125,"[0.09696540832519532, 0.09753353881835937, 0.09761580657958985, 0.09757484436035156, 0.09748332977294921, 0.09760905456542969, 0.09744038391113281, 0.09725856018066406, 0.097170654296875, 0.09735456085205078, 0.09710304260253906, 0.09688652801513672, 0.0971335678100586, 0.09762723541259766, 0.09740480041503906, 0.09724518585205078, 0.09757657623291016, 0.09806499481201172, 0.09752127838134765, 0.09764249420166016, 0.09720438385009765, 0.09697257232666015, 0.09728841400146485, 0.09741276550292968, 0.09663056182861328, 0.09733177947998047, 0.09661440277099609, 0.09646396636962891, 0.09682425689697266, 0.09659200286865234, 0.09654051208496094, 0.09650176239013672, 0.09670009613037109, 0.09744620513916015, 0.09676799774169922, 0.09645875549316406, 0.09689529418945313, 0.09690694427490235, 0.09789158630371093, 0.09698783874511718, 0.0973210220336914, 0.09927062225341797, 0.09761312103271484, 0.09732755279541015, 0.09772061157226562, 0.097451904296875, 0.09886732482910156, 0.09790585327148438, 0.10541343688964844, 0.09823846435546875, 0.09815039825439453, 0.09759539031982421, 0.09692160034179688, 0.09750323486328125, 0.09784925079345703, 0.09721785736083985, 0.09688758087158203, 0.0976527328491211, 0.09726383972167969, 0.09765865325927735, 0.09721855926513671, 0.09758924865722657, 0.09764864349365235, 0.09775222778320312, 0.09713782501220704, 0.09729853057861328, 0.09719583892822266, 0.09734236907958985, 0.0974919662475586, 0.09747561645507813, 0.09722956848144532, 0.09821324920654297, 0.09762246704101563, 0.09892291259765625, 0.09761177825927735, 0.09733257293701172, 0.09767993927001953, 0.09804338836669922, 0.09764717102050781, 0.10099686431884766, 0.0972314224243164, 0.09719779205322265, 0.10010214233398437, 0.09786777496337891, 0.09772646331787109, 0.09727180480957032, 0.097185791015625, 0.09700527954101562, 0.09739933013916016, 0.09713638305664063, 0.09708863830566407, 0.09743577575683594, 0.09724620819091796, 0.09714002990722656, 0.09787232208251953, 0.09708338928222657, 0.09740902709960937, 0.09733020782470703, 0.09760355377197266, 0.09761894226074219, 0.0975370864868164, 0.09771295928955079, 0.09750556945800781, 0.09761366271972656, 0.09775513458251953, 0.09787548828125, 0.09803414154052735, 0.09738854217529297, 0.09712870025634765, 0.09830368041992188, 0.09701923370361328, 0.09688764953613281, 0.09711949157714844, 0.09703865814208984, 0.09763641357421875, 0.09771164703369141, 0.0969039077758789, 0.09733529663085938, 0.09695795440673828, 0.09746073913574219, 0.09702188873291015, 0.09694384002685547, 0.09717330932617188, 0.0969191665649414, 0.09732189178466796, 0.09760704040527343, 0.09765779113769531, 0.09844048309326171, 0.0981943359375, 0.09768531036376953, 0.09751769256591797, 0.09817862701416015, 0.0979967041015625, 0.09850511932373048, 0.09821593475341797, 0.09710329437255859, 0.09674195098876953, 0.09764383697509765, 0.09743756866455078, 0.09730134582519531, 0.09687350463867188, 0.09899922943115234, 0.09752371215820313, 0.09747865295410156, 0.09722438049316406, 0.09833299255371093, 0.097112060546875, 0.09964134216308594, 0.1003724822998047, 0.09729853057861328, 0.09683328247070312, 0.09722077178955078, 0.09880166625976562, 0.09725782775878906, 0.098868896484375, 0.09785753631591797, 0.0980316162109375, 0.09792511749267578, 0.09793276977539063, 0.09842742156982422, 0.09770598602294922, 0.09837363433837891, 0.09827532958984375, 0.0976135711669922, 0.09820595550537109, 0.09800498962402343, 0.09787702178955078, 0.09729337310791016, 0.09738182067871094, 0.09770646667480469, 0.09713868713378906, 0.09694617462158203, 0.09710694122314453, 0.09760387420654297, 0.09892556762695312, 0.09805385589599609, 0.09802690887451172, 0.09739119720458984, 0.0972484130859375, 0.09805852508544922, 0.10770079803466796, 0.09811558532714844, 0.09762108612060547, 0.10020956420898437, 0.0994978256225586, 0.09857405090332032, 0.09973600006103515, 0.09774479675292969, 0.09780233764648437, 0.09769004821777344, 0.097438720703125, 0.0969208984375, 0.0973267822265625, 0.0979330596923828, 0.09740489959716797, 0.09716121673583984, 0.09950246429443359, 0.09692176055908203, 0.09721011352539062, 0.09713890838623047, 0.09691484832763672, 0.09690313720703125, 0.09720054626464844, 0.09715020751953125, 0.0972869415283203, 0.0971816635131836, 0.09680076599121094, 0.0975257568359375, 0.09709795379638672, 0.09729334259033204, 0.09740742492675782, 0.09744802856445313, 0.09746454620361328, 0.09754214477539062, 0.09806438446044922, 0.09785958099365234, 0.09730662536621094, 0.09780633544921875, 0.09724301147460937, 0.09713062286376953, 0.09703740692138672, 0.09664604949951172, 0.09708338928222657, 0.0976579818725586, 0.09756556701660156, 0.09717884826660156, 0.09952873229980469, 0.09715583801269531, 0.0975189437866211, 0.0973395233154297, 0.09704633331298829, 0.09690799713134765, 0.09740902709960937, 0.09670861053466796, 0.09743666839599609, 0.09726387023925781, 0.09681990051269532, 0.0971014404296875, 0.0970101089477539, 0.09750313568115235, 0.09762006378173828, 0.09734070587158203, 0.09948614501953125, 0.09815289306640625, 0.09773554992675781, 0.09768141174316407, 0.09778240203857422, 0.09719229125976563, 0.09804185485839843, 0.1019775390625, 0.09741779327392579, 0.09667378997802735, 0.0969466552734375, 0.09692364501953125, 0.09724313354492188, 0.09719113922119141, 0.09638582611083985, 0.09623481750488282, 0.09680464172363282, 0.0969896011352539, 0.09684019470214844, 0.09712406158447266, 0.0971103057861328, 0.09711001586914063, 0.09803775787353515, 0.09795513916015625, 0.0974691162109375, 0.09751353454589844, 0.09759532928466796, 0.09802457427978516, 0.09786009979248046, 0.09791356658935547, 0.09768524932861328, 0.09764774322509766, 0.09797238159179687, 0.09713318634033204, 0.09861865234375, 0.09699827575683594, 0.09690438079833984, 0.09711382293701172, 0.09693484497070312, 0.1039974365234375, 0.09687446594238282, 0.09873923492431641, 0.09806758117675782, 0.09722662353515625, 0.09725660705566407, 0.09707810974121094, 0.09686573028564453, 0.09697494506835938, 0.09711443328857422, 0.09710610961914062, 0.09700361633300782, 0.0981226577758789, 0.09779225921630859, 0.09829759979248047, 0.09787696075439453, 0.09751328277587891, 0.09760377502441406, 0.09738854217529297, 0.09802540588378907, 0.09724630737304688, 0.09685091400146484, 0.09697676849365235, 0.09692111968994141, 0.09751721954345703, 0.09731372833251953, 0.09736761474609375, 0.0973193588256836, 0.09678265380859374, 0.09777097320556641, 0.09704255676269531, 0.09688492584228516, 0.0970462417602539, 0.09698934173583984, 0.09723577880859376, 0.09714278411865235, 0.09729804992675781, 0.0973700180053711, 0.09746070098876954, 0.09781145477294922, 0.09768991851806641, 0.09772723388671875, 0.09787795257568359, 0.09865408325195313, 0.09784333038330079, 0.09878323364257813, 0.0981909408569336, 0.09758761596679688, 0.09720182037353516, 0.09721510314941406, 0.09709500885009766, 0.09707151794433594, 0.0971263656616211, 0.09769107055664063, 0.09775312042236328, 0.09732150268554687, 0.09725746917724609, 0.09755580902099609, 0.09720694732666016, 0.09728975677490234, 0.09787836456298828, 0.10003657531738282, 0.09798796844482421, 0.09771190643310547, 0.0989665298461914, 0.09749497222900391, 0.09758313751220703, 0.10172937774658203, 0.10127388763427735, 0.09799648284912109, 0.09752470397949219, 0.0977427215576172, 0.09757497406005859, 0.09768931579589844, 0.09880409240722657, 0.09783599853515625, 0.09711833953857422, 0.09710454559326172, 0.09730687713623047, 0.09798652648925782, 0.09780873870849609, 0.09764173126220703, 0.09797878265380859, 0.09701187133789063, 0.09833360290527343, 0.09803469085693359, 0.09723670196533203, 0.09739469146728516, 0.09696192169189453, 0.09821222686767578, 0.09682377624511719, 0.09702604675292968, 0.09672704315185547, 0.09736985778808593, 0.09754425811767578, 0.0981542739868164, 0.09732937622070313, 0.09745391845703125, 0.09815830230712891, 0.09772486114501953, 0.10134687805175781, 0.09757663726806641, 0.10163890838623046, 0.09768048095703125, 0.0981083221435547, 0.09684278106689453, 0.09684207916259766, 0.096482177734375, 0.09705171203613282, 0.09717420959472656, 0.0966328353881836, 0.09660620880126954, 0.0963864974975586, 0.09668665313720703, 0.09787564849853515, 0.0972886734008789, 0.0972470703125, 0.0973469467163086, 0.09739110565185546, 0.09739791870117187, 0.09690415954589844, 0.09707315063476563, 0.09727798461914063, 0.09725510406494141, 0.09806025695800781, 0.09778336334228516, 0.09781916809082031, 0.09760307312011719, 0.09780713653564453, 0.09807046508789062, 0.09691779327392579, 0.09668704223632812, 0.09682358551025391, 0.09685453033447265, 0.09755238342285157, 0.09807001495361328, 0.09723481750488282, 0.09682393646240234, 0.09752722930908203, 0.09700409698486329, 0.09722374725341797, 0.09861209869384766, 0.09739065551757813, 0.09658367919921874, 0.09743974304199218, 0.09788825225830078, 0.10003865814208984, 0.10473881530761718, 0.09739376068115234, 0.09801821136474609, 0.09805209350585938, 0.09806556701660156, 0.09792969512939453, 0.09768131256103516, 0.09796246337890625, 0.09859212493896484, 0.09865484619140626, 0.0982999038696289, 0.09842073822021484, 0.09739878082275391, 0.0968852767944336, 0.09936201477050781, 0.09741734313964844, 0.09740486145019531, 0.09800291442871094, 0.097870849609375, 0.09736934661865235, 0.09847977447509766, 0.09902985382080078, 0.09806134033203125, 0.09730147552490234, 0.09785472106933593, 0.09725414276123047, 0.097091552734375, 0.09753600311279297, 0.09814617919921875, 0.09776860809326172, 0.10032434844970703, 0.09769481658935547, 0.09880636596679687, 0.09799817657470702, 0.09776947021484375, 0.09758342742919922, 0.09723356628417969, 0.09784268951416016, 0.09766553497314454, 0.09726771545410157, 0.09755878448486328, 0.09683299255371093, 0.09747833251953125, 0.09764243316650391, 0.09706358337402343, 0.09719602966308594, 0.0972220458984375, 0.09772198486328125, 0.09770902252197265, 0.09726528167724609, 0.09727616119384766, 0.09692787170410157, 0.09791849517822265, 0.09764201354980469, 0.10104307556152343, 0.09757907104492188, 0.0989675521850586, 0.09807574462890625, 0.09794652557373047, 0.09851273345947266, 0.09767542266845704, 0.09798006439208984, 0.09772271728515625, 0.09822777557373047, 0.09808531188964843, 0.09702642822265625, 0.09720178985595704, 0.0970478744506836, 0.09742540740966797, 0.09791522979736328, 0.0975722885131836, 0.09781954956054688, 0.09729363250732422, 0.09813676452636719, 0.10174262237548828, 0.09803363037109375, 0.09718479919433594, 0.09807341003417969, 0.09729561614990234, 0.09723177337646484, 0.0971277084350586, 0.09723363494873047, 0.09743875122070313, 0.09733382415771484, 0.097766845703125, 0.09807357025146485, 0.09788416290283203, 0.09859609222412109, 0.09808358764648438, 0.09822550201416015, 0.09805891418457031, 0.09818045043945313, 0.09758310699462891, 0.09741145324707032, 0.09704627227783204, 0.09711260986328125, 0.09714911651611328, 0.09721222686767578, 0.09799449920654296, 0.0993828125, 0.09784390258789062, 0.09732713317871093, 0.09782179260253906, 0.097272705078125, 0.09686656188964844, 0.09696803283691406, 0.09692316436767578, 0.09766806030273438, 0.09799852752685546, 0.09726496124267578, 0.09716806030273438, 0.09677619171142578, 0.09778201293945313, 0.09802137756347656, 0.09787171173095703, 0.10172022247314454, 0.09834223937988282, 0.09880671691894531, 0.09861090850830079, 0.09843917083740235, 0.09785529327392578, 0.0986272964477539, 0.09977903747558593, 0.0978634262084961, 0.09729856109619141, 0.09728383636474609, 0.09699686431884766, 0.09702285003662109, 0.09791808319091796, 0.0982847671508789, 0.0976767349243164, 0.09757308959960938, 0.09723075103759765, 0.09739273834228515, 0.09748220825195313, 0.09742115020751953, 0.09739539337158203, 0.09715312194824219, 0.09855084991455078, 0.09786924743652343, 0.09778387451171874, 0.09765715026855469, 0.097974365234375, 0.09777974700927734, 0.0975255355834961, 0.09798246765136719, 0.09776438140869141, 0.09765174102783203, 0.09752336120605469, 0.09723632049560547, 0.09812700653076172, 0.0973629150390625, 0.09778275299072266, 0.09796387481689453, 0.10113433837890624, 0.09850675201416016, 0.0997266845703125, 0.09761039733886719, 0.09785753631591797, 0.0974457244873047, 0.0977143325805664, 0.09708544158935548, 0.09742054748535156, 0.097970947265625, 0.09726726531982421, 0.09879801940917969, 0.11686080169677734, 0.11873017883300781, 0.1197339859008789, 0.11571094512939453, 0.11624038696289063, 0.11676831817626954, 0.1166094741821289, 0.11470642852783203, 0.1151338882446289, 0.11478438568115235, 0.11532742309570312, 0.11550694274902344, 0.11477340698242187, 0.11440624237060547, 0.11785215759277344, 0.10521702575683593, 0.10968109130859376, 0.10991468811035156, 0.1159331817626953, 0.1149908447265625, 0.11611714935302735, 0.11606486511230468, 0.11169586944580077, 0.11484979248046875, 0.1167831039428711, 0.10999603271484375, 0.09804595184326172, 0.10643222045898437, 0.11655353546142579, 0.10579357147216797, 0.10686508941650391, 0.09892390441894532, 0.09874905395507813, 0.09810329437255859, 0.1093570556640625, 0.11542521667480468]",tokens/s,10.149271650844613,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3798, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1709, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,6504.046592,3721.330688,0.0,3326.083072,3249.416192,s,1,17.414017578125,17.414017578125,0.0,17.414017578125,17.414017578125,17.414017578125,17.414017578125,[17.414017578125],,kWh,0.0002995280871333383,3.30327763859229e-05,0.00011265064567600458,0.00044521150919526575,,MB,1889.583104,4042.194944,0.0,3625.975808,3532.033024,s,10,0.867203971862793,0.0867203971862793,0.0005968515790660602,0.08662177658081055,0.08762337341308593,0.08768650360107422,0.08773700775146484,"[0.08588054656982422, 0.08587254333496094, 0.08660931396484375, 0.08633586883544922, 0.08662614440917969, 0.08698297882080078, 0.0877496337890625, 0.08760934448242187, 0.08692018890380859, 0.08661740875244141]",tokens/s,2952.0159997664737,kWh,2.579792823355307e-06,2.844798573809867e-07,1.7135613123684615e-06,4.5778339931047555e-06,tokens/kWh,55921643.37666971,MB,1902.194688,4105.109504,0.0,3688.890368,3607.643648,s,10,50.7168623046875,5.0716862304687504,0.03591276366724254,5.0900815429687505,5.106282861328125,5.106867993164062,5.107336098632812,"[5.0016416015625, 5.012900390625, 5.0552880859375, 5.063921875, 5.090361328125, 5.10615283203125, 5.107453125, 5.0963046875, 5.09303662109375, 5.0898017578125]",tokens/s,12.421904103909288,kWh,0.00014732052235080748,1.624966519550249e-05,6.893524520663266e-05,0.00023250543275294257,tokens/kWh,270961.4104670966,,s,630,50.71322232055666,0.08049717828659785,0.0009795821879450537,0.08053220748901369,0.08134216079711913,0.0819085422515869,0.08407345359802247,"[0.07928537750244141, 0.07928717041015625, 0.07934070587158203, 0.07931795501708984, 0.07935785675048829, 0.07912857818603515, 0.07912006378173828, 0.079107421875, 0.0795322265625, 0.07950825500488282, 0.07913881683349609, 0.07914208221435547, 0.0790536346435547, 0.07914905548095703, 0.07874559783935547, 0.07897071838378907, 0.07879081726074219, 0.07854694366455078, 0.07908509063720703, 0.07946217346191406, 0.08268870544433594, 0.07963190460205079, 0.07993312072753907, 0.07856617736816406, 0.07898931121826172, 0.07890329742431641, 0.08491165161132813, 0.07867235565185547, 0.0790282211303711, 0.07913632202148438, 0.07879724884033203, 0.0789599380493164, 0.07950816345214844, 0.08011280059814453, 0.07996482849121093, 0.07969331359863281, 0.07948719787597656, 0.07973353576660157, 0.07992288208007813, 0.07955859375, 0.0793515853881836, 0.07952617645263672, 0.080031005859375, 0.07949193572998046, 0.07977561950683594, 0.07977276611328125, 0.07890211486816406, 0.07849581146240234, 0.07886643218994141, 0.07878028869628906, 0.07904876708984375, 0.07922080230712891, 0.07914208221435547, 0.07864803314208985, 0.078991455078125, 0.07916726684570312, 0.07994582366943359, 0.07913484954833984, 0.07920358276367187, 0.0790676498413086, 0.07955891418457031, 0.07928012847900391, 0.07864729309082032, 0.07894393920898438, 0.07901564788818359, 0.0790063018798828, 0.07875379180908203, 0.07932109069824218, 0.0794972152709961, 0.07955235290527343, 0.07943183898925782, 0.07967510223388671, 0.07953164672851562, 0.07971087646484375, 0.07950534057617188, 0.0800416030883789, 0.07953453063964844, 0.07923097229003906, 0.07919974517822266, 0.07959193420410156, 0.07922191619873047, 0.08002582550048828, 0.07936812591552735, 0.07917577362060547, 0.08002754974365234, 0.07906988525390625, 0.07927193450927734, 0.07952588653564453, 0.07938028717041015, 0.07920006561279297, 0.07921475219726562, 0.07889676666259765, 0.07914701080322266, 0.08052591705322265, 0.07971635437011719, 0.07905023956298828, 0.07917823791503906, 0.07873744201660156, 0.07914460754394531, 0.07932550048828126, 0.08224153900146484, 0.08424425506591797, 0.08102729797363281, 0.07972978973388672, 0.07937836456298829, 0.07954879760742188, 0.079731201171875, 0.07948499298095703, 0.07930879974365235, 0.07984537506103516, 0.08050994873046875, 0.079395263671875, 0.07920697784423827, 0.07935164642333985, 0.07908163452148438, 0.07884595489501953, 0.07923641967773437, 0.07964128112792969, 0.07904243469238281, 0.07912188720703126, 0.07923779296875, 0.079388671875, 0.07892787170410157, 0.08155136108398438, 0.07939481353759766, 0.07929446411132812, 0.0792765121459961, 0.0792548828125, 0.07960169219970703, 0.07935145568847657, 0.07996444702148438, 0.0798966064453125, 0.07987062072753906, 0.07999811553955079, 0.0805383071899414, 0.08046403503417969, 0.08124598693847657, 0.08017100524902344, 0.08020400238037109, 0.08060265350341797, 0.081681884765625, 0.0803927001953125, 0.08056057739257813, 0.08104147338867188, 0.08264498901367187, 0.0811434555053711, 0.0802081298828125, 0.07989871978759766, 0.08018943786621094, 0.0814039077758789, 0.08365647888183594, 0.08006678771972656, 0.08008633422851562, 0.08045638275146484, 0.07982809448242187, 0.07979894256591796, 0.08017638397216797, 0.07980745697021484, 0.08083455657958985, 0.07980441284179687, 0.07999056243896484, 0.08030150604248047, 0.08034178924560546, 0.07985151672363282, 0.07966639709472656, 0.07957353973388671, 0.07949542236328125, 0.08009097290039062, 0.08028790283203124, 0.08037171173095703, 0.08089600372314452, 0.08034918212890625, 0.0804513931274414, 0.08038419342041016, 0.08036147308349609, 0.08001945495605468, 0.08036351776123046, 0.08008860778808594, 0.0797984619140625, 0.07931728363037109, 0.08054927825927734, 0.0798922576904297, 0.07983596801757813, 0.08002150726318359, 0.08010246276855469, 0.07895321655273438, 0.07962028503417969, 0.0802529296875, 0.07960118103027344, 0.0798023681640625, 0.0794642562866211, 0.07977289581298828, 0.07983529663085938, 0.0799181137084961, 0.0799125747680664, 0.07901609802246094, 0.07884934234619141, 0.07935785675048829, 0.0810146255493164, 0.08003475189208985, 0.0806317138671875, 0.07984339141845703, 0.08015875244140624, 0.08019910430908203, 0.08021049499511719, 0.08026345825195312, 0.08080150604248047, 0.08207974243164062, 0.08087757110595703, 0.08089395141601563, 0.08051097869873047, 0.08095123291015625, 0.0797225570678711, 0.07954134368896484, 0.07949199676513671, 0.07947779083251953, 0.08234595489501953, 0.07989740753173828, 0.07977193450927735, 0.08019753265380859, 0.08035123443603516, 0.07980850982666016, 0.07984742736816407, 0.08110079956054687, 0.08247705841064454, 0.08085708618164063, 0.08064614105224609, 0.07965286254882813, 0.0797286376953125, 0.0804290542602539, 0.08042240142822266, 0.08057875061035157, 0.08041458892822266, 0.07969776153564453, 0.07986646270751953, 0.0824486083984375, 0.08069657897949219, 0.08032105255126953, 0.08349696350097656, 0.08037171173095703, 0.08081817626953125, 0.0806789093017578, 0.08060646057128906, 0.0804769287109375, 0.08170496368408203, 0.08034300994873046, 0.08041852569580078, 0.08009145355224609, 0.08030003356933593, 0.07996819305419922, 0.08028697967529297, 0.07983369445800781, 0.07990057373046874, 0.07987948608398437, 0.08020252990722657, 0.08054541015625, 0.08024102020263672, 0.07984233856201171, 0.08009827423095703, 0.08042700958251953, 0.08012499237060547, 0.08013491058349609, 0.07957933044433593, 0.07957465362548828, 0.07957315063476562, 0.0807405776977539, 0.08373004913330079, 0.08193456268310546, 0.08078864288330079, 0.08061763000488281, 0.08081494140625, 0.08086707305908203, 0.08333132934570313, 0.08178688049316406, 0.08096940612792969, 0.08162131500244141, 0.0820101089477539, 0.08126815795898437, 0.08080441284179687, 0.08044876861572266, 0.08078412628173828, 0.08017011260986329, 0.08082112121582032, 0.08404582214355469, 0.08078521728515625, 0.08044534301757812, 0.08012009429931641, 0.08050688171386719, 0.08074649810791015, 0.08074444580078124, 0.08058396911621094, 0.08029872131347657, 0.07989568328857422, 0.08036441802978515, 0.0804290542602539, 0.08025907135009766, 0.08048230743408204, 0.08112537384033203, 0.08050409698486329, 0.08064892578125, 0.08058172607421875, 0.08129747009277344, 0.0811385269165039, 0.08089993286132813, 0.08197135925292968, 0.08109779357910156, 0.0810341796875, 0.08078336334228516, 0.08108441925048829, 0.0808652801513672, 0.08077439880371094, 0.08068787384033203, 0.08058841705322266, 0.08033932495117188, 0.0812290267944336, 0.08026112365722657, 0.08049459075927734, 0.08035737609863282, 0.08055366516113281, 0.0818271713256836, 0.08074291229248047, 0.0811564178466797, 0.08085657501220703, 0.0815970230102539, 0.08063740539550782, 0.08116422271728516, 0.0807266845703125, 0.08061542510986328, 0.08197238159179687, 0.08075350189208984, 0.08065200042724609, 0.08077954864501953, 0.0809752960205078, 0.0806016616821289, 0.0808729248046875, 0.08099251556396485, 0.08126287841796875, 0.083884033203125, 0.08204637145996094, 0.08100105285644531, 0.08154521942138672, 0.08090569305419922, 0.0811402587890625, 0.08123113250732422, 0.08135958099365234, 0.08372000122070312, 0.08185628509521484, 0.08071616363525391, 0.08041449737548828, 0.08050918579101562, 0.08133602905273438, 0.08070787048339843, 0.08066995239257813, 0.08122035217285156, 0.08053298950195313, 0.08028211212158202, 0.08107008361816406, 0.08058265686035156, 0.08071167755126953, 0.08080079650878906, 0.0807740478515625, 0.08082998657226563, 0.08096412658691406, 0.08114380645751954, 0.08094924926757813, 0.08068096160888671, 0.08073795318603516, 0.08080790710449219, 0.0809324493408203, 0.0812552032470703, 0.0811171875, 0.08087286376953125, 0.0809703369140625, 0.08081798553466797, 0.0809125747680664, 0.0815841293334961, 0.08112441253662109, 0.08119596862792969, 0.08062886047363281, 0.08061837005615234, 0.08076493072509766, 0.0811151351928711, 0.08068447875976563, 0.08097644805908204, 0.08067791748046875, 0.08088470458984375, 0.0809512939453125, 0.08472774505615234, 0.08102304077148438, 0.08097337341308594, 0.08045613098144531, 0.08085298919677734, 0.08078313446044921, 0.0803985595703125, 0.08412979125976562, 0.08123596954345703, 0.08078646087646485, 0.08106082916259766, 0.08093695831298828, 0.08150748443603516, 0.08142451477050781, 0.08115030670166015, 0.08138790130615234, 0.08106752014160157, 0.08083235168457031, 0.0810564193725586, 0.08097792053222656, 0.08099558258056641, 0.08082713317871094, 0.08071488189697265, 0.08074658966064453, 0.08141222381591796, 0.0810318374633789, 0.08233567810058594, 0.0813091812133789, 0.08104812622070312, 0.08088575744628906, 0.08063999938964844, 0.08094866943359375, 0.08103600311279296, 0.08061116790771485, 0.08044953918457032, 0.08029705810546875, 0.08050371551513671, 0.08088678741455078, 0.08076134490966796, 0.0811484146118164, 0.08082431793212891, 0.08057344055175782, 0.08037888336181641, 0.08091852569580078, 0.08060313415527344, 0.08092169952392578, 0.08175094604492188, 0.08093436431884765, 0.08087763214111328, 0.08179145812988281, 0.08120127868652344, 0.08142221069335938, 0.08094924926757813, 0.08124604797363282, 0.08092057800292969, 0.08082137298583984, 0.08080384063720703, 0.08049549102783203, 0.08026521301269532, 0.0804659194946289, 0.08054169464111328, 0.081157470703125, 0.08098175811767579, 0.08180201721191406, 0.08038633728027343, 0.08026914978027344, 0.08052909088134766, 0.08065248107910156, 0.08090217590332031, 0.08045577239990234, 0.08060425567626953, 0.0804320297241211, 0.08253440093994141, 0.0814911651611328, 0.08122652435302734, 0.08081613159179687, 0.08046134185791015, 0.08093743896484375, 0.0840847396850586, 0.08147138977050782, 0.08639836883544921, 0.0810370864868164, 0.08067984008789063, 0.08070963287353515, 0.08078125, 0.08108348846435547, 0.08155043029785156, 0.08109862518310547, 0.0806329574584961, 0.08111929321289063, 0.08078828430175782, 0.08035327911376954, 0.08027750396728515, 0.08003990173339844, 0.08006454467773437, 0.08088553619384765, 0.08043746948242188, 0.08033017730712891, 0.08109318542480469, 0.08154691314697265, 0.08022022247314453, 0.08035356903076171, 0.08062156677246093, 0.0808980484008789, 0.08050240325927735, 0.08048223876953126, 0.08090054321289063, 0.08031027221679687, 0.080216064453125, 0.08038604736328125, 0.08053964996337891, 0.08109465789794922, 0.08082550048828124, 0.08053142547607423, 0.08059503936767579, 0.08033564758300782, 0.08068013000488282, 0.08104499053955078, 0.08056473541259766, 0.0807383041381836, 0.08082790374755859, 0.08099440002441406, 0.08380377960205078, 0.08108521270751953, 0.08089395141601563, 0.08054783630371094, 0.08071340942382813, 0.0807573471069336, 0.08132374572753906, 0.08558080291748046, 0.08030303955078125, 0.08010348510742188, 0.08026557159423828, 0.08029286193847657, 0.08075740814208984, 0.08122140502929688, 0.08055830383300781, 0.08039186859130859, 0.08015833282470704, 0.08061612701416015, 0.08043843078613282, 0.08054192352294921, 0.0798032989501953, 0.07994338989257813, 0.08010883331298828, 0.08052194976806641, 0.08078950500488281, 0.08155923461914062, 0.08111135864257812, 0.08091238403320312, 0.08122492980957031, 0.08094364929199219, 0.08097551727294922, 0.08092527770996094, 0.08117033386230468, 0.08107222747802735, 0.08088495635986329, 0.08060189056396484, 0.08115718078613281, 0.08097212982177734, 0.08174566650390624, 0.0802619857788086, 0.08047561645507813, 0.08035587310791016, 0.08058879852294921, 0.0807874526977539, 0.08073136138916015, 0.08013699340820313, 0.08032160186767578, 0.08026771545410157, 0.08044185638427734, 0.08051302337646485, 0.08153702545166015, 0.08068688201904296, 0.0805513916015625, 0.08081689453125, 0.08063734436035157, 0.08054230499267578, 0.08141648101806641, 0.08064176177978516, 0.07997404479980469, 0.0802943344116211, 0.08029798126220702, 0.08126258850097656, 0.08240332794189453, 0.08063795471191407, 0.08086892700195313, 0.08066687774658203, 0.08123564910888671, 0.08106854248046876, 0.08127487945556641, 0.08132300567626953, 0.08067378997802735, 0.08340275573730468, 0.08059478759765625, 0.08028380584716797, 0.08162899017333984, 0.08032889556884766, 0.08063385772705078, 0.08074179077148437, 0.08047062683105469, 0.08284159851074219, 0.08057558441162109, 0.08017298889160156, 0.08058159637451172, 0.08022188568115235, 0.0801488037109375, 0.08041580963134766, 0.08093382263183593, 0.08074240112304687, 0.08035734558105469, 0.0799334716796875, 0.08019305419921875, 0.0803326416015625, 0.08052540588378906, 0.08038249969482422, 0.0804302749633789, 0.08069612884521485, 0.080801025390625, 0.08128361511230468, 0.0814097900390625, 0.08126716613769532, 0.08069894409179687, 0.08068550109863282, 0.08098201751708985, 0.08093081665039062, 0.08187673950195312, 0.08059446716308594, 0.07996691131591797, 0.0797511978149414, 0.08012185668945312, 0.08059494018554687, 0.08079708862304688, 0.08082649230957031, 0.08076335906982422, 0.08091798400878907, 0.08102143859863281, 0.08042201232910157, 0.08080892944335938, 0.08134022521972656, 0.08125247955322265, 0.08058396911621094, 0.08020764923095704]",tokens/s,12.422795696510672,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 60, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,26452.058112,13903.003648,0.0,13507.756032,13505.835008,s,1,53.90316796875,53.90316796875,0.0,53.90316796875,53.90316796875,53.90316796875,53.90316796875,[53.90316796875],,kWh,0.0013569878731208365,0.0001496739902800994,0.0005118981872960088,0.002018560050696945,,MB,1244.868608,14796.3904,0.0,14380.171264,14187.445248,s,10,1.9127048645019529,0.19127048645019534,0.0022393600134042175,0.19167635345458983,0.19329945373535157,0.19392224121093748,0.19442047119140624,"[0.18762191772460937, 0.18912060546875, 0.1879237823486328, 0.19106585693359374, 0.19111424255371093, 0.1931610565185547, 0.19454502868652343, 0.19306732177734376, 0.19223846435546876, 0.19284658813476563]",tokens/s,1338.4187218379845,kWh,5.607734266430845e-06,6.184143504098953e-07,3.719279704981173e-06,9.945428321821913e-06,tokens/kWh,25740470.064853184,MB,1267.437568,14859.30496,0.0,14443.085824,14355.886592,s,10,64.43047265625,6.443047265625,0.032960088473115615,6.45414990234375,6.4775452636718756,6.478823413085937,6.479845932617187,"[6.39431640625, 6.40673486328125, 6.417478515625, 6.3996123046875, 6.4531806640625, 6.47464306640625, 6.47202490234375, 6.47726123046875, 6.4801015625, 6.455119140625]",tokens/s,9.777981970754448,kWh,0.00018670186194481874,2.059298404982509e-05,0.0001227757937132182,0.000330070639707862,tokens/kWh,190868.23370827487,,s,630,64.42633708190921,0.10226402711414156,0.0012898268375584128,0.10208265686035156,0.1036439224243164,0.10448080749511718,0.10602578125,"[0.10036243438720703, 0.10116118621826171, 0.10032310485839843, 0.10073308563232422, 0.10051315307617187, 0.10073664093017579, 0.10029334259033203, 0.10072908782958985, 0.10038684844970704, 0.10088169860839843, 0.10077008056640625, 0.10091155242919922, 0.1010708465576172, 0.11337318420410156, 0.10201036834716797, 0.10176358032226562, 0.10084352111816407, 0.10132249450683593, 0.10085810852050782, 0.10034998321533203, 0.10034992218017579, 0.10017091369628907, 0.10019692993164063, 0.1025231704711914, 0.10110269165039062, 0.10062735748291016, 0.1002795181274414, 0.10045836639404297, 0.10055363464355468, 0.10041548919677734, 0.09961676788330077, 0.09992716979980469, 0.10060070037841796, 0.10089462280273437, 0.10093782043457031, 0.10034761810302735, 0.10047004699707031, 0.1007269744873047, 0.10151939392089844, 0.1015693130493164, 0.10300006103515626, 0.10227097320556641, 0.10274800109863282, 0.10267049407958985, 0.10381517028808594, 0.10236070251464843, 0.10202556610107422, 0.10197023773193359, 0.10315542602539063, 0.10119366455078126, 0.10151328277587891, 0.101644287109375, 0.10241024017333984, 0.10305741119384766, 0.10152518463134766, 0.1032391357421875, 0.10216534423828125, 0.10253311920166015, 0.10168057250976563, 0.10115129852294921, 0.10144502258300782, 0.10209750366210937, 0.10154505920410156, 0.10373939514160156, 0.10356326293945313, 0.10150707244873047, 0.10268262481689452, 0.10245529937744141, 0.10182208251953125, 0.1012391357421875, 0.10196380615234375, 0.10117324829101562, 0.10051321411132813, 0.10216710662841796, 0.10089891052246094, 0.1009249267578125, 0.10104457855224609, 0.10154195404052735, 0.10142710113525391, 0.10302470397949219, 0.10197609710693359, 0.10093363189697266, 0.10045362854003906, 0.10206665802001953, 0.10473910522460937, 0.10562969970703125, 0.10100444793701172, 0.10224259185791015, 0.10178822326660156, 0.10115277099609375, 0.10176633453369141, 0.10317497253417969, 0.10111759948730469, 0.10132924652099609, 0.10182041931152344, 0.10151033782958985, 0.10185196685791016, 0.10187155151367187, 0.101338623046875, 0.10053485107421875, 0.10220953369140626, 0.10108525085449219, 0.1008946533203125, 0.10245465850830078, 0.10111449432373047, 0.10150457763671875, 0.10135801696777344, 0.10119168090820313, 0.10073808288574218, 0.10113279724121094, 0.10365100860595704, 0.10152550506591797, 0.10118169403076172, 0.1014662094116211, 0.10059820556640625, 0.10144358062744141, 0.10078412628173829, 0.10058294677734375, 0.10081715393066407, 0.10548793792724609, 0.101370849609375, 0.10060511779785156, 0.10100991821289063, 0.1010444793701172, 0.10027731323242188, 0.10081948852539062, 0.10025478363037109, 0.10009900665283203, 0.10085715484619141, 0.10062451171875, 0.10020317077636719, 0.10122434997558594, 0.10147590637207031, 0.10184339141845702, 0.10235517120361329, 0.10118940734863281, 0.10153778839111328, 0.10124396514892578, 0.10258322906494141, 0.10143743896484375, 0.10191168212890625, 0.10259852600097656, 0.10182553863525391, 0.10339532470703125, 0.101718017578125, 0.10163398742675782, 0.10208262634277344, 0.10181964874267578, 0.10376473236083984, 0.10134697723388672, 0.10138188934326171, 0.1018024673461914, 0.10167922973632812, 0.10235289764404297, 0.10155817413330079, 0.1032968978881836, 0.10142092895507812, 0.10262358093261718, 0.10320230102539063, 0.10153421020507812, 0.10148659515380859, 0.10238108825683594, 0.1030656967163086, 0.10292876434326172, 0.10121113586425781, 0.10259056091308594, 0.10120489501953125, 0.10171392059326172, 0.10194303894042969, 0.102914306640625, 0.10191667175292969, 0.10158723449707031, 0.10309529876708984, 0.10177849578857422, 0.1016193618774414, 0.10308354949951172, 0.1021956787109375, 0.10147225952148438, 0.10088652801513671, 0.10145378875732422, 0.10198963165283204, 0.10328345489501953, 0.10243280029296875, 0.10183817291259765, 0.10120464324951171, 0.10121414184570313, 0.10114182281494141, 0.102580322265625, 0.10099699401855469, 0.10244313812255859, 0.10162620544433594, 0.10062774658203125, 0.10309056091308594, 0.1017257308959961, 0.10115084838867187, 0.1013064956665039, 0.10229167938232422, 0.10128179168701172, 0.10163996887207032, 0.10105808258056641, 0.10236393737792969, 0.10083318328857421, 0.10045970916748047, 0.10133920288085937, 0.10063241577148438, 0.1040777587890625, 0.10109983825683594, 0.10187113952636719, 0.10124556732177735, 0.10109951782226563, 0.10196125030517578, 0.10138841247558594, 0.10070633697509766, 0.10089868927001953, 0.10243116760253906, 0.10068991851806641, 0.10030073547363282, 0.09989756774902343, 0.10114403533935547, 0.10202057647705078, 0.10198518371582031, 0.10482015991210937, 0.10167894744873048, 0.10177200317382812, 0.10171392059326172, 0.10203075408935547, 0.10180818939208984, 0.10077145385742188, 0.10470492553710938, 0.1021435546875, 0.10176351928710937, 0.10125862121582031, 0.10084400177001954, 0.10150415802001952, 0.10220003509521484, 0.10124317169189453, 0.10113843536376953, 0.10048089599609375, 0.10187379455566406, 0.10125721740722657, 0.10095410919189453, 0.10115657806396484, 0.10068348693847656, 0.1016336669921875, 0.10167801666259765, 0.10140467071533203, 0.10152041625976563, 0.10140547180175781, 0.10156649780273437, 0.10213168334960937, 0.1015882568359375, 0.10177753448486328, 0.1016341781616211, 0.10081075286865235, 0.10081795501708984, 0.10056166076660156, 0.10112432098388671, 0.1019566421508789, 0.10209401702880859, 0.10171571350097657, 0.10250220489501953, 0.10266851043701172, 0.10273990631103516, 0.10145900726318359, 0.10204259490966797, 0.10268396759033203, 0.10271612548828125, 0.10252217864990235, 0.10450809478759765, 0.10297698974609375, 0.1062733154296875, 0.10311885070800782, 0.10305055999755859, 0.10228921508789063, 0.10263581085205079, 0.10330281829833984, 0.10465122985839843, 0.10291165161132812, 0.10267648315429688, 0.10201785278320312, 0.10172825622558594, 0.1036410903930664, 0.10275836944580079, 0.10153372955322265, 0.10272972869873047, 0.10145692443847656, 0.10133808135986327, 0.10327654266357422, 0.10221363067626953, 0.10117324829101562, 0.10238953399658203, 0.10244322967529297, 0.10134114837646484, 0.10322128295898438, 0.10178752136230469, 0.10142642974853516, 0.1034044189453125, 0.1022525405883789, 0.10173593902587891, 0.10151936340332031, 0.10357196807861328, 0.10545970916748047, 0.10246896362304687, 0.1042597427368164, 0.10185913848876953, 0.10162361907958985, 0.10393689727783204, 0.10114457702636719, 0.10106470489501954, 0.10284012603759765, 0.10444745635986329, 0.10164911651611327, 0.10243276977539062, 0.1015040283203125, 0.10071920013427735, 0.10152012634277344, 0.10233356475830079, 0.10175753784179688, 0.1015156478881836, 0.10244198608398437, 0.10200899505615234, 0.10189266967773437, 0.10399078369140625, 0.10306822204589844, 0.10202067565917969, 0.10245779418945312, 0.10231820678710937, 0.10139593505859375, 0.10258076477050782, 0.10382131195068359, 0.10134937286376954, 0.10128998565673829, 0.10261273956298828, 0.10213999938964843, 0.10184925079345702, 0.10207379150390625, 0.1019459228515625, 0.10142649841308594, 0.1022757797241211, 0.10197811126708985, 0.10199040222167968, 0.10210508728027344, 0.10693427276611328, 0.1017200927734375, 0.10173824310302734, 0.10210940551757812, 0.10190348815917968, 0.10133798217773438, 0.10308729553222656, 0.10161030578613281, 0.10194534301757813, 0.10181552124023438, 0.1022116470336914, 0.10233110046386719, 0.10347315216064454, 0.10262857818603516, 0.10310530853271484, 0.10319593811035156, 0.10360086059570313, 0.1032204818725586, 0.10220416259765625, 0.10290073394775391, 0.1026344985961914, 0.10375548553466797, 0.10194972991943359, 0.10535929870605469, 0.10303215789794921, 0.10319465637207031, 0.1027910385131836, 0.1036583023071289, 0.11184333038330078, 0.10397901153564452, 0.10555126190185547, 0.10255779266357422, 0.10329929351806641, 0.10338893127441406, 0.10256793975830078, 0.10338972473144531, 0.10318895721435548, 0.10333529663085937, 0.10367263793945312, 0.10289497375488281, 0.10263638305664062, 0.10266400146484375, 0.10198630523681641, 0.10245702362060546, 0.10275462341308594, 0.10531782531738282, 0.10222361755371094, 0.10242454528808594, 0.10282854461669921, 0.10223446655273437, 0.10190672302246094, 0.10218876647949218, 0.10263346862792969, 0.10668441772460938, 0.10250444793701172, 0.10354483032226562, 0.10216822052001953, 0.1024781723022461, 0.10489145660400391, 0.10200774383544922, 0.10163168334960937, 0.10589241790771485, 0.10376771545410156, 0.10183074951171875, 0.1028136978149414, 0.10213529968261718, 0.1016591339111328, 0.10216758728027343, 0.10259555053710938, 0.10191462707519532, 0.1018918685913086, 0.10296578979492188, 0.10215392303466797, 0.10120191955566406, 0.10336847686767578, 0.10237920379638672, 0.101432861328125, 0.1026693115234375, 0.1024135971069336, 0.10123654174804687, 0.10170665740966797, 0.10359603118896485, 0.10299161529541015, 0.10222822570800781, 0.1026355209350586, 0.10362841796875, 0.10217817687988281, 0.10236006164550782, 0.10385724639892578, 0.10238153839111327, 0.1027491226196289, 0.10323558044433594, 0.10312032318115234, 0.10265382385253906, 0.10238582611083985, 0.1031236801147461, 0.10246089935302734, 0.10247196960449219, 0.10211084747314453, 0.10158694458007812, 0.1016380157470703, 0.10183897399902343, 0.10212761688232422, 0.10316185760498046, 0.10330252838134765, 0.10396070098876953, 0.10338502502441406, 0.1034000015258789, 0.10401996612548828, 0.1031492462158203, 0.1026337890625, 0.10459750366210938, 0.10364313507080078, 0.10323919677734375, 0.10303740692138671, 0.10400768280029297, 0.10228326416015625, 0.10362265777587891, 0.10324153900146485, 0.10384611511230468, 0.10195555114746094, 0.10161740875244141, 0.10353279876708985, 0.10453584289550781, 0.10293782043457031, 0.10331033325195313, 0.10237747192382812, 0.10194866943359375, 0.10263401794433594, 0.10265007781982421, 0.1017567367553711, 0.1032806396484375, 0.105455810546875, 0.10206412506103515, 0.10266969299316406, 0.1040943374633789, 0.10320076751708984, 0.10240153503417969, 0.1051632308959961, 0.10312911987304688, 0.10295417785644531, 0.10329942321777344, 0.10180592346191407, 0.10196176147460938, 0.10162236785888672, 0.10176921844482421, 0.10142105865478515, 0.10133673858642578, 0.10211158752441406, 0.10535321807861328, 0.10179766082763672, 0.10225718688964844, 0.10141664123535156, 0.10165631866455078, 0.10186367797851563, 0.1021333770751953, 0.10139830780029296, 0.10500975799560547, 0.10263865661621094, 0.1015603485107422, 0.10199308776855469, 0.1030450210571289, 0.10221766662597656, 0.10192607879638672, 0.10219200134277344, 0.10197196960449219, 0.10176694488525391, 0.10133526611328125, 0.10186579132080079, 0.10542025756835938, 0.10138758087158203, 0.10188687896728515, 0.10147382354736328, 0.10199292755126953, 0.10280960083007812, 0.1036390380859375, 0.1026131820678711, 0.10345452880859375, 0.10253919982910156, 0.1025549087524414, 0.10460358428955079, 0.10241677093505859, 0.10505059051513672, 0.10232832336425782, 0.10311824035644532, 0.10327715301513672, 0.10319462585449218, 0.10341149139404297, 0.10375177764892578, 0.10324390411376953, 0.10418540954589844, 0.10356956481933594, 0.10323139190673829, 0.10308236694335937, 0.10286899566650391, 0.10220252990722656, 0.10257494354248047, 0.10275667572021484, 0.10330044555664063, 0.10297814178466796, 0.10342784118652344, 0.10172621154785157, 0.10361427307128906, 0.10210646057128907, 0.10182844543457031, 0.10198528289794923, 0.10301222229003906, 0.10208268737792969, 0.10169058990478516, 0.10153590393066406, 0.10346969604492187, 0.10289356994628907, 0.10500505828857422, 0.10205593872070312, 0.10501465606689453, 0.1023752670288086, 0.10240080261230469, 0.10249830627441406, 0.10608025360107422, 0.10393791961669922, 0.10244313812255859, 0.10295500946044922, 0.10190633392333984, 0.10177072143554687, 0.10363967895507813, 0.10168355560302735, 0.10250605010986329, 0.10230841827392578, 0.1020948486328125, 0.10212092590332031, 0.10318281555175782, 0.10225055694580078, 0.10214195251464844, 0.10323887634277344, 0.10245507049560547, 0.1024561309814453, 0.10370451354980469, 0.1036864013671875, 0.102508544921875, 0.10205181121826172, 0.1047429428100586, 0.10298070526123047, 0.1025479965209961, 0.10793408203125, 0.10382131195068359, 0.10249830627441406, 0.1018408660888672, 0.10372509002685547, 0.10211532592773437, 0.10231734466552735, 0.1041824951171875, 0.10357081604003907, 0.10217855834960937, 0.10175987243652344, 0.10232832336425782, 0.10153369903564453, 0.10149667358398437, 0.10166492462158203, 0.10131865692138672, 0.10103590393066406, 0.10162598419189453, 0.10211081695556641, 0.10158086395263671, 0.10152937316894531, 0.10195001220703125, 0.10125721740722657, 0.10112521362304687, 0.10193807983398437, 0.10131644439697265, 0.10176118469238281, 0.10145177459716796, 0.10282803344726563, 0.10183869171142577, 0.10147593688964844, 0.10112265777587891, 0.10120150756835937, 0.10393369293212891, 0.1031072006225586, 0.10255359649658204, 0.10316799926757812, 0.10262528228759765, 0.10239385223388672, 0.10250342559814453, 0.10254386901855468, 0.10382380676269531, 0.10220550537109375, 0.1032806396484375, 0.10244915008544922]",tokens/s,9.778609626666219,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4000, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 420, in dispatch_model attach_align_device_hook_on_blocks( File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 615, in attach_align_device_hook_on_blocks add_hook_to_module(module, hook) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 160, in add_hook_to_module module = hook.init_hook(module) File ""/usr/local/lib/python3.10/dist-packages/accelerate/hooks.py"", line 282, in init_hook set_module_tensor_to_device(module, name, self.execution_device, tied_params_map=self.tied_params_map) File ""/usr/local/lib/python3.10/dist-packages/accelerate/utils/modeling.py"", line 408, in set_module_tensor_to_device new_value = old_value.to(device) torch.cuda.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,847.179776,565.116928,0.0,169.869312,150.669312,s,1,8.0183232421875,8.0183232421875,0.0,8.0183232421875,8.0183232421875,8.0183232421875,8.0183232421875,[8.0183232421875],,kWh,2.2487715045819335e-05,2.4729185654361377e-06,7.296950282009718e-06,3.225758389326519e-05,,MB,1175.339008,625.934336,0.0,209.7152,193.680384,s,10,0.17156870079040526,0.017156870079040525,7.150824827523619e-05,0.01715390396118164,0.017239720726013184,0.017247108173370364,0.017253018131256105,"[0.01707529640197754, 0.01713747215270996, 0.01722015953063965, 0.01717033576965332, 0.01709449577331543, 0.017076351165771483, 0.017069311141967775, 0.017232704162597656, 0.01725449562072754, 0.017238079071044923]",tokens/s,14921.136478893035,kWh,5.192801562426613e-07,5.726572567478513e-08,2.222930801672911e-07,7.988389620847376e-07,tokens/kWh,320465090.150228,MB,1209.30304,628.031488,0.0,211.812352,193.682944,s,10,10.300430114746094,1.0300430114746093,0.006095907972148767,1.0296256713867187,1.0404574340820314,1.0405339538574219,1.0405951696777345,"[1.0406104736328126, 1.0305194091796874, 1.031248779296875, 1.0312947998046875, 1.02873193359375, 1.02442626953125, 1.0207991333007813, 1.026128173828125, 1.026230712890625, 1.0404404296875]",tokens/s,61.16249447662308,kWh,3.014242029250486e-05,3.324213239412338e-06,1.139491065803112e-05,4.486154418994832e-05,tokens/kWh,1404320.8083353443,,s,630,10.294758337020868,0.016340886249239482,0.00038202871475142127,0.0162696475982666,0.016512060356140137,0.016652121543884277,0.01768288057327271,"[0.01618534469604492, 0.016166080474853517, 0.016149503707885742, 0.016114719390869142, 0.01619228744506836, 0.016207872390747072, 0.01624892807006836, 0.018083103179931642, 0.016312128067016603, 0.01613702392578125, 0.017709087371826172, 0.017514463424682616, 0.016442720413208007, 0.016272031784057617, 0.016226207733154297, 0.0162325439453125, 0.016644096374511717, 0.016863231658935548, 0.01980143928527832, 0.016806528091430663, 0.01628489685058594, 0.016421695709228516, 0.016300031661987305, 0.017071968078613282, 0.02018320083618164, 0.01657276725769043, 0.016338207244873046, 0.01640291213989258, 0.016250240325927735, 0.01624038314819336, 0.01614723205566406, 0.016293600082397462, 0.016119199752807616, 0.016180095672607423, 0.016130367279052735, 0.016138111114501953, 0.016115520477294924, 0.016146720886230467, 0.016512319564819335, 0.016150943756103514, 0.01622198486328125, 0.016152063369750978, 0.016155359268188476, 0.01617305564880371, 0.01645155143737793, 0.016265247344970705, 0.016174591064453125, 0.01626576042175293, 0.016316383361816407, 0.01628335952758789, 0.016334880828857423, 0.01644553565979004, 0.016427167892456053, 0.016374975204467773, 0.016865983963012695, 0.01635865592956543, 0.016429216384887695, 0.016366111755371095, 0.016320192337036132, 0.016338911056518555, 0.016310943603515624, 0.016329824447631838, 0.0163786563873291, 0.016369855880737305, 0.016556896209716798, 0.016360319137573243, 0.016352415084838867, 0.016384063720703126, 0.01628982353210449, 0.01639459228515625, 0.01628438377380371, 0.01651283264160156, 0.01642451286315918, 0.01626358413696289, 0.016348384857177736, 0.016224960327148437, 0.016277151107788088, 0.016281600952148437, 0.016294559478759765, 0.01630745506286621, 0.016321056365966796, 0.016300031661987305, 0.016297983169555663, 0.016281600952148437, 0.016360639572143554, 0.016257856369018556, 0.016383264541625978, 0.01644822311401367, 0.01618534469604492, 0.01628553581237793, 0.0161824951171875, 0.016234527587890624, 0.016337472915649413, 0.016326368331909178, 0.01670003128051758, 0.016451583862304688, 0.016297983169555663, 0.01624025535583496, 0.016447231292724608, 0.016296831130981446, 0.01626041603088379, 0.016308671951293947, 0.016345151901245115, 0.01626054382324219, 0.01649260711669922, 0.016347583770751954, 0.01622220802307129, 0.016229759216308592, 0.016321151733398438, 0.01640457534790039, 0.01632451248168945, 0.016283775329589845, 0.016342432022094726, 0.016592639923095703, 0.01630246353149414, 0.01678985595703125, 0.016316415786743164, 0.016611328125, 0.016271263122558593, 0.016186880111694335, 0.016463743209838868, 0.016263904571533202, 0.01625497627258301, 0.01649692726135254, 0.016396223068237306, 0.016293664932250977, 0.01622185516357422, 0.016336767196655274, 0.016427167892456053, 0.016573087692260742, 0.016338848114013673, 0.016439136505126954, 0.016547712326049804, 0.016216352462768556, 0.016318464279174806, 0.016238431930541992, 0.016175264358520507, 0.01622742462158203, 0.016391071319580078, 0.016252927780151367, 0.016465152740478516, 0.016281375885009764, 0.016202720642089843, 0.016199743270874024, 0.016248767852783202, 0.016275775909423827, 0.01639743995666504, 0.0163023681640625, 0.016212255477905273, 0.016302080154418946, 0.016334272384643553, 0.016247167587280273, 0.016364831924438477, 0.016400991439819337, 0.016254720687866212, 0.016195871353149413, 0.016240928649902345, 0.016285152435302735, 0.016331296920776367, 0.016349407196044923, 0.016222240447998047, 0.01631820869445801, 0.016408000946044922, 0.016411552429199217, 0.016581663131713865, 0.016503648757934572, 0.01638185691833496, 0.016334720611572266, 0.016254047393798828, 0.016377920150756835, 0.01625587272644043, 0.016383968353271484, 0.016351232528686522, 0.01652943992614746, 0.016960832595825197, 0.01637887954711914, 0.016471168518066407, 0.016738847732543947, 0.016405824661254884, 0.016618175506591795, 0.01637705612182617, 0.016276256561279297, 0.016441343307495117, 0.016547775268554686, 0.0163035831451416, 0.016311904907226563, 0.01628848075866699, 0.016369728088378905, 0.016294143676757813, 0.01622889518737793, 0.01638444709777832, 0.01615443229675293, 0.01644918441772461, 0.016158304214477538, 0.016326911926269533, 0.01628553581237793, 0.0162576961517334, 0.01618230438232422, 0.01628668785095215, 0.016408575057983397, 0.01651203155517578, 0.016327648162841796, 0.01641267204284668, 0.016319488525390623, 0.0162478084564209, 0.016284767150878905, 0.01656921577453613, 0.016326976776123048, 0.01628495979309082, 0.01626976013183594, 0.01643071937561035, 0.01627107238769531, 0.016261119842529297, 0.016369951248168944, 0.01626540756225586, 0.016197824478149415, 0.016213375091552733, 0.016300895690917968, 0.016260896682739258, 0.016415807723999025, 0.016270271301269533, 0.01637817573547363, 0.01616044807434082, 0.016236543655395508, 0.016257280349731444, 0.01657360076904297, 0.01695804786682129, 0.017618719100952147, 0.016459808349609376, 0.016363136291503905, 0.016319232940673827, 0.016245920181274413, 0.01641049575805664, 0.016759008407592774, 0.016460351943969727, 0.016451839447021485, 0.01661516761779785, 0.01637171173095703, 0.01626316833496094, 0.01627676773071289, 0.01622268867492676, 0.01619327926635742, 0.016243551254272463, 0.016339839935302733, 0.01631452751159668, 0.01623632049560547, 0.016347423553466797, 0.01624041557312012, 0.016257024765014647, 0.01633910369873047, 0.016319007873535157, 0.016533376693725586, 0.01663983917236328, 0.016226303100585936, 0.016250080108642578, 0.016251680374145507, 0.016216287612915038, 0.016285472869873047, 0.016359424591064452, 0.01637347221374512, 0.016285247802734375, 0.016345535278320313, 0.01629648017883301, 0.016327648162841796, 0.01624163246154785, 0.016225696563720703, 0.016302495956420898, 0.01626316833496094, 0.01621401596069336, 0.016658687591552736, 0.016342880249023438, 0.016347040176391603, 0.01639423942565918, 0.016302303314208986, 0.016248607635498048, 0.016268287658691406, 0.016326847076416014, 0.016255807876586915, 0.016236223220825196, 0.016261472702026367, 0.016246751785278322, 0.016236543655395508, 0.01627136039733887, 0.016257024765014647, 0.016190591812133788, 0.016354400634765624, 0.016437023162841798, 0.01659903907775879, 0.0164003849029541, 0.01637727928161621, 0.016308576583862304, 0.016316032409667967, 0.016280160903930665, 0.016293888092041017, 0.016680959701538087, 0.016367807388305664, 0.01631820869445801, 0.016369407653808593, 0.01630847930908203, 0.01624684715270996, 0.01617100715637207, 0.01627097511291504, 0.016310527801513673, 0.01631161689758301, 0.016333984375, 0.016303775787353515, 0.016200927734375, 0.01624880027770996, 0.016229183197021484, 0.016313472747802735, 0.01624073600769043, 0.01662553596496582, 0.01632758331298828, 0.01639833641052246, 0.016281024932861328, 0.016156320571899415, 0.016189504623413085, 0.016170719146728515, 0.016247360229492188, 0.016241952896118163, 0.016188127517700195, 0.0161976318359375, 0.016174463272094725, 0.01619568061828613, 0.016279104232788087, 0.01620867156982422, 0.016187423706054686, 0.01617475128173828, 0.016152959823608398, 0.01621824073791504, 0.01617100715637207, 0.01614556884765625, 0.016225120544433595, 0.016190719604492188, 0.01609721565246582, 0.016198463439941406, 0.01621785545349121, 0.01624684715270996, 0.01619513511657715, 0.016321184158325196, 0.01707721519470215, 0.016247135162353515, 0.01620035171508789, 0.01617417526245117, 0.01625107192993164, 0.016243104934692384, 0.0162491512298584, 0.01619705581665039, 0.01620012855529785, 0.01624281692504883, 0.016232255935668946, 0.0162674560546875, 0.016287967681884764, 0.016301311492919923, 0.016270143508911133, 0.016251840591430665, 0.016470975875854492, 0.016346719741821288, 0.01635968017578125, 0.01616806411743164, 0.016214176177978514, 0.01644822311401367, 0.01630953598022461, 0.016226720809936524, 0.016206144332885742, 0.016356767654418944, 0.016287391662597656, 0.016232927322387694, 0.016054752349853516, 0.016068607330322265, 0.016269535064697267, 0.016299808502197265, 0.016529184341430664, 0.016265439987182616, 0.01657241630554199, 0.016227743148803712, 0.016142719268798827, 0.01614473533630371, 0.016260927200317382, 0.01618934440612793, 0.016200576782226563, 0.016191488265991212, 0.01614028739929199, 0.01616486358642578, 0.016158720016479493, 0.016174463272094725, 0.016179840087890626, 0.01620582389831543, 0.016084224700927734, 0.016105247497558595, 0.01614886474609375, 0.016088863372802735, 0.0161146240234375, 0.016092063903808594, 0.01608803176879883, 0.01627136039733887, 0.016082944869995116, 0.016107616424560548, 0.016078752517700197, 0.01618124771118164, 0.016084991455078124, 0.01609219169616699, 0.016087263107299805, 0.016111711502075195, 0.016060319900512696, 0.01614300727844238, 0.01621206474304199, 0.016220224380493163, 0.016291296005249024, 0.016241119384765624, 0.016189599990844728, 0.01606844711303711, 0.01607587242126465, 0.016102432250976562, 0.016131168365478517, 0.016290687561035157, 0.01624025535583496, 0.01618060874938965, 0.016243616104125978, 0.016089088439941408, 0.016281471252441407, 0.016117984771728516, 0.016254880905151366, 0.01622547149658203, 0.016239423751831055, 0.016184736251831054, 0.01637424087524414, 0.0161342716217041, 0.016227392196655272, 0.016202943801879883, 0.016323328018188477, 0.01640755271911621, 0.016685056686401366, 0.01622425651550293, 0.01620297622680664, 0.01652751922607422, 0.016227167129516603, 0.016176607131958006, 0.01622047996520996, 0.016213375091552733, 0.01630681610107422, 0.016234464645385742, 0.016236352920532226, 0.016271039962768553, 0.016149023056030273, 0.016213695526123048, 0.016197376251220703, 0.016185279846191405, 0.01617519950866699, 0.016189184188842774, 0.016177663803100584, 0.01618092727661133, 0.016149152755737306, 0.01615056037902832, 0.016262208938598633, 0.01662031936645508, 0.01751862335205078, 0.016235679626464845, 0.01617318344116211, 0.016212736129760742, 0.016240543365478515, 0.01620800018310547, 0.016256864547729493, 0.01618751907348633, 0.016209535598754883, 0.01617958450317383, 0.016121856689453123, 0.016230016708374023, 0.01618556785583496, 0.016197792053222658, 0.016544895172119142, 0.016241695404052733, 0.01619651222229004, 0.016208192825317384, 0.016212480545043945, 0.01652284812927246, 0.016308095932006834, 0.016402624130249024, 0.016492639541625977, 0.01631702423095703, 0.01627836799621582, 0.01612019157409668, 0.01611030387878418, 0.016080223083496093, 0.01612953567504883, 0.016309087753295898, 0.01618937683105469, 0.016158336639404296, 0.016204383850097655, 0.01619273567199707, 0.016274175643920898, 0.016408607482910155, 0.01649260711669922, 0.01627849578857422, 0.016237823486328126, 0.016412256240844726, 0.016193824768066405, 0.016272415161132814, 0.01652332878112793, 0.016394527435302734, 0.016310047149658204, 0.01629484748840332, 0.016317472457885743, 0.016384672164916993, 0.016266656875610352, 0.01635193634033203, 0.016207775115966796, 0.016224384307861328, 0.01623632049560547, 0.016312416076660157, 0.016416767120361327, 0.01638297653198242, 0.016446720123291014, 0.01637715148925781, 0.01624947166442871, 0.016199487686157227, 0.016142335891723633, 0.01615667152404785, 0.016207775115966796, 0.01624073600769043, 0.016214176177978514, 0.016155935287475585, 0.016268863677978515, 0.016169376373291015, 0.0161779842376709, 0.016158239364624023, 0.016234752655029296, 0.016214303970336914, 0.016172767639160156, 0.016172767639160156, 0.016196128845214843, 0.01624393653869629, 0.016200096130371093, 0.01616499137878418, 0.016134143829345703, 0.016231456756591798, 0.016488607406616212, 0.01621036720275879, 0.016297567367553712, 0.016229408264160156, 0.016197376251220703, 0.016141792297363282, 0.016138784408569334, 0.016136192321777345, 0.016191488265991212, 0.016404096603393554, 0.016169343948364258, 0.01628758430480957, 0.01632476806640625, 0.016211231231689452, 0.016173791885375977, 0.016760128021240234, 0.01633350372314453, 0.01641046333312988, 0.016212255477905273, 0.016289663314819337, 0.01669728088378906, 0.01695737648010254, 0.0167589111328125, 0.01629203224182129, 0.016258880615234374, 0.016392223358154295, 0.016162784576416015, 0.01616646385192871, 0.016199840545654296, 0.01620368003845215, 0.016402816772460936, 0.01631395149230957, 0.01631920051574707, 0.01624678421020508, 0.016248832702636717, 0.016178943634033202, 0.016225536346435546, 0.016251039505004884, 0.01621283149719238, 0.016257087707519532, 0.016305280685424806, 0.016239423751831055, 0.016349184036254884, 0.016510463714599608, 0.016834720611572266, 0.017181184768676756, 0.01657859230041504, 0.01641379165649414, 0.016361440658569336, 0.01625164794921875, 0.01623859214782715, 0.016281600952148437, 0.016289024353027343, 0.016282367706298827, 0.016230655670166017, 0.016187135696411132, 0.01619254493713379, 0.01618409538269043, 0.016227712631225587, 0.016533855438232423, 0.016312128067016603, 0.016255647659301757, 0.016242528915405275, 0.016251039505004884, 0.016219135284423827, 0.01634627151489258, 0.016239744186401367, 0.016214847564697266, 0.016195327758789062, 0.016296096801757812, 0.01627891159057617, 0.01627814483642578, 0.016146432876586913, 0.01617296028137207, 0.016127967834472658, 0.0161661434173584, 0.01616896057128906, 0.01642585563659668, 0.016256671905517578, 0.016371328353881835, 0.016536319732666015, 0.016377824783325196, 0.016369760513305662, 0.01719081687927246, 0.020703424453735353, 0.020321727752685547, 0.01634284782409668, 0.01621664047241211, 0.01658857536315918, 0.016308095932006834, 0.016333152770996093, 0.016193599700927735, 0.01853228759765625, 0.016973695755004882]",tokens/s,61.19619124369957,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,7681.9456,3463.380992,0.0,3068.133376,2990.958592,s,1,17.269208984375,17.269208984375,0.0,17.269208984375,17.269208984375,17.269208984375,17.269208984375,[17.269208984375],,kWh,0.00030027408976249605,3.3115269661735504e-05,0.00011231675652000062,0.00044570611594423217,,MB,4325.49888,3576.6272,0.0,3160.408064,3145.649152,s,10,1.0323965148925782,0.10323965148925782,0.0007294582217660479,0.10305582427978516,0.10365469589233399,0.10448214836120605,0.1051441103363037,"[0.1028340835571289, 0.10299177551269531, 0.10347081756591797, 0.10287270355224609, 0.103119873046875, 0.10284572601318359, 0.102572509765625, 0.10322710418701173, 0.10530960083007812, 0.1031523208618164]",tokens/s,2479.6674175778,kWh,3.033725846305835e-06,3.345620975392618e-07,1.9910726123298936e-06,5.359360556174991e-06,tokens/kWh,47766892.582929485,MB,4329.69728,3723.42784,0.0,3307.208704,3216.658432,s,10,62.0150166015625,6.20150166015625,0.01409957620037942,6.2049936523437506,6.2156473144531255,6.219474291992188,6.222535874023437,"[6.1972587890625, 6.18768359375, 6.17258935546875, 6.20286279296875, 6.2073837890625, 6.19045849609375, 6.21155712890625, 6.214796875, 6.20712451171875, 6.22330126953125]",tokens/s,10.158829820971569,kWh,0.0001817351260232784,2.0045434461496105e-05,8.103329936267051e-05,0.000282813859847445,tokens/kWh,222761.35983570025,,s,630,62.01150816345216,0.09843096533881293,0.0009471494447929424,0.0982900161743164,0.09923420028686523,0.09994131202697754,0.10191001358032227,"[0.09723782348632813, 0.0983116455078125, 0.09762255859375, 0.10137519836425782, 0.09755088043212891, 0.09744976043701171, 0.09728169250488282, 0.09756111907958985, 0.09703590393066407, 0.09780684661865234, 0.10037862396240234, 0.0973148193359375, 0.09777766418457032, 0.0974725112915039, 0.09707705688476563, 0.09772828674316406, 0.09792979431152343, 0.09844697570800781, 0.09772054290771484, 0.09857843017578125, 0.09793561553955078, 0.09770771026611329, 0.09800505828857421, 0.09812582397460938, 0.09778995513916015, 0.09822035217285156, 0.09842156982421875, 0.09932275390625, 0.09871084594726562, 0.0985910415649414, 0.09845369720458984, 0.09805023956298828, 0.09848012542724609, 0.09819481658935547, 0.0979807357788086, 0.09826345825195312, 0.09810320281982422, 0.09899027252197265, 0.09824441528320313, 0.09827532958984375, 0.09826850891113281, 0.09778479766845703, 0.09802310180664063, 0.09794329833984375, 0.09807843017578124, 0.1005362548828125, 0.09913788604736327, 0.09873622131347656, 0.09837532806396485, 0.09951692962646484, 0.09856409454345703, 0.09830518341064454, 0.09845209503173828, 0.09969686126708985, 0.09905561828613281, 0.09892969512939453, 0.09897657775878907, 0.09975635528564453, 0.0982936019897461, 0.0993259506225586, 0.09870566558837891, 0.09832217407226562, 0.09861135864257813, 0.09809510040283204, 0.09777561950683594, 0.0982462387084961, 0.09824278259277344, 0.09761004638671875, 0.0987605743408203, 0.09808003234863281, 0.09770877075195313, 0.0976527328491211, 0.09701699066162109, 0.09793405151367188, 0.09780429077148438, 0.10124205017089843, 0.09834815979003907, 0.09773372650146485, 0.09782550048828124, 0.09813113403320313, 0.09808774566650391, 0.09866035461425782, 0.09811293029785156, 0.09853580474853516, 0.09817059326171874, 0.09913954925537109, 0.09872819519042969, 0.09809311676025391, 0.09784742736816407, 0.09989539337158203, 0.09864777374267578, 0.09777999877929687, 0.09727999877929687, 0.09773836517333985, 0.09852761840820312, 0.09782819366455078, 0.09735234832763671, 0.09760562896728515, 0.09762815856933593, 0.09764832305908203, 0.0973171844482422, 0.09777721405029297, 0.09730912017822266, 0.09786179351806641, 0.10003440093994141, 0.09753363037109375, 0.09754041290283202, 0.09801318359375, 0.09854914855957031, 0.1016611557006836, 0.10004902648925781, 0.0999788818359375, 0.0978803482055664, 0.09867887878417969, 0.09843097686767578, 0.09821501159667968, 0.09782796478271484, 0.0989978256225586, 0.09766934204101563, 0.09828105926513672, 0.09748105621337891, 0.09713494110107422, 0.09699298858642579, 0.0980758056640625, 0.09815740966796875, 0.09839337921142578, 0.09763702392578125, 0.09789011383056641, 0.0977103042602539, 0.09774073791503907, 0.09773881530761719, 0.09795993804931641, 0.09767526245117188, 0.09785139465332031, 0.09827327728271484, 0.0983552017211914, 0.09845145416259765, 0.09827942657470704, 0.09828966522216796, 0.09861254119873047, 0.09810195159912109, 0.09817906951904297, 0.09813318634033204, 0.09808159637451172, 0.09777558135986328, 0.10108112335205079, 0.09795993804931641, 0.09749708557128907, 0.09758719635009766, 0.09900624084472656, 0.09749497222900391, 0.09725981140136719, 0.09754547119140625, 0.09744255828857422, 0.09776947021484375, 0.09739673614501954, 0.09831037139892577, 0.09793513488769531, 0.09755238342285157, 0.09802342224121094, 0.09773056030273437, 0.0977940444946289, 0.09826509094238281, 0.0982459487915039, 0.09823712158203125, 0.09810329437255859, 0.09841868591308593, 0.09808281707763672, 0.09851507568359374, 0.09801920318603516, 0.09764995574951171, 0.0975302734375, 0.09727008056640625, 0.0970857925415039, 0.09728540802001953, 0.0976546859741211, 0.1008185272216797, 0.09777359771728515, 0.09788706970214844, 0.09733734130859376, 0.09765830230712891, 0.09829228973388672, 0.09728582763671875, 0.09726326751708984, 0.09803632354736327, 0.09757052612304687, 0.09784051513671875, 0.09817826843261719, 0.09782374572753906, 0.1005355224609375, 0.09880633544921875, 0.09857794952392578, 0.09845938873291016, 0.09871046447753906, 0.098914306640625, 0.09890406036376953, 0.0984268798828125, 0.09831423950195313, 0.09799680328369141, 0.09783465576171875, 0.09841865539550781, 0.09714236450195313, 0.0973094711303711, 0.09781043243408204, 0.10109091186523438, 0.09763062286376953, 0.09882947540283203, 0.09711856079101562, 0.09856665802001953, 0.09800681304931641, 0.10008393859863281, 0.098334716796875, 0.09760940551757813, 0.09752198028564453, 0.09824018859863282, 0.09782918548583984, 0.09851615905761718, 0.09844204711914062, 0.09834806060791015, 0.09869821166992188, 0.09855506896972656, 0.09838902282714844, 0.09859388732910156, 0.09886585235595703, 0.09843231964111328, 0.09891705322265625, 0.09827737426757813, 0.09747856140136718, 0.09772589111328125, 0.09830825805664062, 0.09813359832763671, 0.09807513427734375, 0.09807027435302734, 0.09761449432373047, 0.09760332489013672, 0.09842098999023438, 0.0990597152709961, 0.09862313842773437, 0.09802582550048829, 0.09827152252197266, 0.09862668609619141, 0.0979767074584961, 0.0977696990966797, 0.0984432601928711, 0.09843917083740235, 0.09896086120605468, 0.10192291259765625, 0.09876934051513672, 0.09808265686035156, 0.09876290893554687, 0.09939923095703125, 0.0988815689086914, 0.09734550476074219, 0.09762076568603516, 0.09781350708007812, 0.09798342132568359, 0.09769519805908203, 0.09737276458740235, 0.09735763549804688, 0.10311698913574219, 0.09764787292480469, 0.0976965103149414, 0.09736160278320312, 0.09749049377441406, 0.09875263977050781, 0.09880774688720703, 0.09867743682861328, 0.09776274871826172, 0.09809772491455078, 0.09875862121582031, 0.09925398254394531, 0.0988563232421875, 0.09835820770263672, 0.09850470733642579, 0.09856400299072265, 0.09865023803710937, 0.10345001220703125, 0.0988834228515625, 0.09871337890625, 0.0988845443725586, 0.09897551727294922, 0.09818748474121093, 0.0981310043334961, 0.09763116455078125, 0.09737010955810547, 0.09853715515136718, 0.0983043212890625, 0.09787363433837891, 0.09780048370361329, 0.09841171264648438, 0.09985107421875, 0.09841868591308593, 0.09756441497802734, 0.09792070770263672, 0.09761235046386718, 0.09822822570800781, 0.09900016021728515, 0.09871932983398438, 0.0984865264892578, 0.09876009368896485, 0.09899100494384766, 0.09896991729736328, 0.09848595428466797, 0.09875411224365234, 0.10047862243652343, 0.09967696380615235, 0.09947891235351562, 0.0992037124633789, 0.0981849594116211, 0.09810969543457031, 0.09807904052734374, 0.10052166748046874, 0.09792511749267578, 0.09777276611328126, 0.09717021179199219, 0.09814768218994141, 0.09825580596923827, 0.09750908660888671, 0.09732307434082031, 0.09761186981201173, 0.09820336151123046, 0.09804022216796875, 0.0978043212890625, 0.0975052490234375, 0.09770188903808594, 0.09836466979980468, 0.09869798278808593, 0.09816883087158203, 0.09831833648681641, 0.09854911804199219, 0.0986118392944336, 0.09829376220703125, 0.09831014251708985, 0.09771212768554688, 0.09797209930419921, 0.09819967651367187, 0.09902694702148437, 0.09742950439453125, 0.09774819183349609, 0.09801119995117187, 0.09800761413574219, 0.09749520111083984, 0.09719987487792969, 0.0970909423828125, 0.09802127838134765, 0.09926509094238281, 0.09819551849365235, 0.09779151916503906, 0.09751958465576172, 0.097968994140625, 0.09816464233398438, 0.09780585479736328, 0.09862726593017578, 0.09869606781005859, 0.09859891510009766, 0.09899008178710937, 0.09869312286376954, 0.09886105346679687, 0.0987484130859375, 0.09870870208740234, 0.09865090942382812, 0.09845916748046875, 0.09785391998291015, 0.09769983673095703, 0.09797427368164062, 0.09799680328369141, 0.09934848022460938, 0.10130585479736329, 0.09814704132080078, 0.09845680236816406, 0.09804841613769531, 0.09833897399902344, 0.09837567901611328, 0.09800624084472656, 0.09798531341552734, 0.09794764709472656, 0.09841458892822266, 0.10112204742431641, 0.0983531494140625, 0.09916588592529296, 0.098570556640625, 0.09947955322265625, 0.09857433319091796, 0.09861734771728516, 0.09861666870117188, 0.09859343719482422, 0.09939523315429688, 0.09853577423095704, 0.09787391662597657, 0.09903923034667969, 0.09834300994873046, 0.09803695678710937, 0.09759404754638672, 0.0977589111328125, 0.09808448028564454, 0.09829036712646484, 0.09874432373046875, 0.09811558532714844, 0.09819136047363282, 0.0979415054321289, 0.09819324493408203, 0.09859292602539063, 0.09813196563720702, 0.0979415054321289, 0.10219545745849609, 0.0984901123046875, 0.09882950592041016, 0.09843558502197265, 0.09887503814697265, 0.10187843322753906, 0.10143743896484375, 0.0990904312133789, 0.09850816345214844, 0.09857087707519531, 0.09838966369628906, 0.09871385955810547, 0.0987853775024414, 0.09917030334472657, 0.09829376220703125, 0.09894092559814453, 0.09800886535644532, 0.09834518432617187, 0.09775923156738281, 0.09819750213623046, 0.09703040313720702, 0.09789360046386719, 0.09802188873291015, 0.09769705963134766, 0.09812044525146485, 0.09834700775146485, 0.09961174774169922, 0.09849689483642578, 0.09826972961425781, 0.09787187194824219, 0.09714892578125, 0.09836748504638672, 0.09928607940673828, 0.09912944030761718, 0.09867938995361328, 0.09873423767089844, 0.09877718353271485, 0.0985687026977539, 0.09843065643310547, 0.09807917022705079, 0.09949388885498046, 0.10041734313964844, 0.098836669921875, 0.09818851470947265, 0.09787577819824218, 0.09864393615722657, 0.0988436508178711, 0.09821743774414063, 0.0978969955444336, 0.09814812469482422, 0.09859667205810548, 0.0985030746459961, 0.09920909118652343, 0.0983921890258789, 0.09829942321777344, 0.09852976226806641, 0.0985676498413086, 0.09809564971923829, 0.09807023620605469, 0.09877267456054688, 0.09856265258789063, 0.09879347229003907, 0.09886105346679687, 0.09870236968994141, 0.09866748809814453, 0.09843014526367187, 0.09868576049804688, 0.09841458892822266, 0.09810256195068359, 0.09768780517578125, 0.09814822387695313, 0.09763286590576171, 0.09878880310058594, 0.10842784118652343, 0.09778790283203125, 0.09778585815429687, 0.09768450927734375, 0.09782985687255859, 0.09772748565673828, 0.09948028564453125, 0.09832681274414062, 0.0993623046875, 0.09899673461914063, 0.09872112274169922, 0.098259521484375, 0.09817046356201171, 0.09980159759521484, 0.09881394958496094, 0.09873149108886718, 0.09830249786376953, 0.09870950317382812, 0.0984019546508789, 0.09923200225830078, 0.09841439819335937, 0.09834729766845703, 0.09786777496337891, 0.09915392303466797, 0.09800048065185547, 0.09823609924316407, 0.09773744201660156, 0.0982261734008789, 0.09935004425048828, 0.09791126251220703, 0.09786685180664062, 0.0976553955078125, 0.09799088287353516, 0.09914297485351563, 0.09809590148925781, 0.09798863983154296, 0.09837359619140625, 0.09810739135742187, 0.09913549041748047, 0.0985224609375, 0.09819404602050781, 0.09873411560058594, 0.101572509765625, 0.09852047729492187, 0.09840614318847657, 0.09890614318847656, 0.098947998046875, 0.09857843017578125, 0.09878883361816407, 0.09827977752685547, 0.09770822143554687, 0.09751513671875, 0.09811392211914062, 0.09811920166015625, 0.09821027374267578, 0.09812368011474609, 0.09780758666992187, 0.09706790161132813, 0.0983531494140625, 0.0986270751953125, 0.09848242950439454, 0.09872930908203124, 0.0981390380859375, 0.09919590759277344, 0.09891311645507812, 0.09811507415771484, 0.09813426971435547, 0.09795417785644531, 0.1010893096923828, 0.09917030334472657, 0.09870130920410156, 0.0994119644165039, 0.09875865936279297, 0.09861507415771484, 0.09883875274658203, 0.09894092559814453, 0.09849836730957032, 0.09796422576904297, 0.09813359832763671, 0.0989659194946289, 0.09770384216308593, 0.09863177490234375, 0.10135142517089844, 0.09857023620605469, 0.09838121795654296, 0.09820816040039063, 0.09786592102050781, 0.09771212768554688, 0.0980802230834961, 0.09855257415771485, 0.09847398376464844, 0.09844940948486328, 0.09907814025878907, 0.09884633636474609, 0.09837401580810547, 0.09837129974365234, 0.09841487884521484, 0.09934454345703125, 0.09922745513916016, 0.10249014282226562, 0.09919181060791016, 0.09877401733398437, 0.09970687866210938, 0.09903513336181641, 0.09886224365234375, 0.09828848266601563, 0.09843427276611329, 0.09834780883789063, 0.09866649627685548, 0.09879859161376953, 0.0982845458984375, 0.09865583801269531, 0.09988508605957032, 0.09835968017578126, 0.09839520263671875, 0.0981984634399414, 0.09803366088867188, 0.09814342498779297, 0.09887123107910156, 0.09982041931152344, 0.09819862365722656, 0.09787075042724609, 0.09864806365966797, 0.09846988677978516, 0.09888883209228516, 0.09873833465576172, 0.1013603515625, 0.09945734405517578, 0.09887097930908204, 0.09985020446777344, 0.0993845443725586, 0.10000466918945312, 0.09932185363769531, 0.1023815689086914, 0.0984320297241211, 0.09822492980957032, 0.09774851226806641, 0.09762857818603515, 0.09818342590332031, 0.09874022674560547, 0.09806758117675782, 0.09761811065673828, 0.09796784210205078, 0.09859375762939453, 0.0980090560913086, 0.0978578872680664, 0.09864979553222657, 0.09795152282714843, 0.09810326385498047, 0.09909037017822266, 0.09802783966064453, 0.0982630386352539, 0.09849037170410156]",tokens/s,10.15940457921816,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,14672.134144,7846.428672,0.0,7451.181056,7445.507072,s,1,32.599328125,32.599328125,0.0,32.599328125,32.599328125,32.599328125,32.599328125,[32.599328125],,kWh,0.0007451603443958343,8.218951153430724e-05,0.00028046911326399995,0.0011078189691941414,,MB,1326.424064,7997.423616,0.0,7581.20448,7570.843648,s,10,1.2462345962524415,0.12462345962524415,0.0007616768434001599,0.12444008255004882,0.12562794189453125,0.12605982818603514,0.12640533721923827,"[0.12415731048583985, 0.12649171447753907, 0.12409785461425782, 0.12390544128417968, 0.12442038726806641, 0.12445977783203124, 0.12467171478271484, 0.123998046875, 0.12450038146972656, 0.12553196716308593]",tokens/s,2054.187877385357,kWh,3.679309774947909e-06,4.0550177105870324e-07,2.442557509600002e-06,6.527369055606614e-06,tokens/kWh,39219476.91621811,MB,1346.707456,8010.006528,0.0,7593.787392,7514.46784,s,10,73.59379541015625,7.359379541015626,0.0465667174478991,7.388698486328125,7.402563427734375,7.404894506835937,7.406759370117188,"[7.2944599609375, 7.31129345703125, 7.30241943359375, 7.30375048828125, 7.3937861328125, 7.3947548828125, 7.4072255859375, 7.40204541015625, 7.40044921875, 7.38361083984375]",tokens/s,8.560504271981838,kWh,0.0002156417623467185,2.3786567240443014e-05,0.00011003842136400003,0.0003494667509511616,tokens/kWh,180274.66083262474,,s,630,73.59000663757323,0.11680953434535435,0.0012684493239230965,0.11676417541503906,0.1180474578857422,0.11875861663818359,0.12120064575195315,"[0.1160970230102539, 0.11539778900146484, 0.11523977661132813, 0.11485711669921875, 0.11529408264160156, 0.1157266845703125, 0.11562582397460938, 0.11551209259033203, 0.11531801605224609, 0.11615103912353515, 0.1164362564086914, 0.11628546905517578, 0.11953740692138672, 0.11644000244140625, 0.11668479919433594, 0.11681110382080079, 0.11589862060546875, 0.11515872192382813, 0.11577827453613282, 0.11583798217773437, 0.11587273406982422, 0.11573862457275391, 0.11602070617675782, 0.11547907257080078, 0.11569273376464843, 0.1153257598876953, 0.11519363403320312, 0.11521027374267578, 0.11570604705810547, 0.11493981170654297, 0.1151468505859375, 0.11531263732910156, 0.11581439971923828, 0.11627315521240235, 0.11587789154052734, 0.11596800231933593, 0.11618099212646485, 0.11576319885253906, 0.11576064300537109, 0.11617740631103515, 0.11597824096679688, 0.11645868682861328, 0.11512300872802735, 0.11490211486816407, 0.1153647689819336, 0.11534035491943359, 0.11553478240966797, 0.11531673431396484, 0.1161872329711914, 0.11560540771484375, 0.11546009826660156, 0.11487436676025391, 0.1147883529663086, 0.11538636779785157, 0.11564236450195313, 0.115810302734375, 0.11586492919921874, 0.11569423675537109, 0.11648143768310547, 0.11596367645263672, 0.11632109069824219, 0.11588188934326171, 0.11652243041992187, 0.11559958648681641, 0.11577126312255859, 0.11575103759765625, 0.12446310424804688, 0.11985100555419922, 0.11544985961914063, 0.11503164672851562, 0.11566941070556641, 0.11535715484619141, 0.11593782043457031, 0.11550243377685547, 0.11531740570068359, 0.11495833587646484, 0.11456511688232422, 0.11553997039794922, 0.11571405029296875, 0.11636121368408203, 0.11611545562744141, 0.11615792083740234, 0.11890128326416016, 0.11635667419433594, 0.115914306640625, 0.11584559631347656, 0.11539222717285157, 0.1154136962890625, 0.11470642852783203, 0.11508326721191406, 0.11558707427978515, 0.11582463836669922, 0.11537203216552734, 0.11501158142089844, 0.114957763671875, 0.11516326141357422, 0.11580381011962891, 0.11872745513916015, 0.1157734375, 0.11580210876464844, 0.11587789154052734, 0.11562159729003907, 0.1157573471069336, 0.11652294158935547, 0.11644486236572266, 0.11630630493164062, 0.11598841857910157, 0.11673197174072265, 0.11689564514160156, 0.11694646453857421, 0.11592787170410156, 0.11525615692138672, 0.1158153305053711, 0.11586764526367188, 0.11571360015869141, 0.11586399841308594, 0.11580210876464844, 0.11603148651123046, 0.11544287872314453, 0.11631903839111328, 0.11544976043701172, 0.11595785522460937, 0.11578163146972656, 0.11560047912597657, 0.11586809539794922, 0.11636579132080078, 0.12063958740234375, 0.11580620574951171, 0.11587993621826172, 0.11568675231933594, 0.11545043182373046, 0.1161503677368164, 0.11670527648925781, 0.11588813018798828, 0.1155047378540039, 0.11522908782958985, 0.11525059509277344, 0.11543126678466797, 0.11564723205566406, 0.11516722869873047, 0.11557859039306641, 0.11542556762695312, 0.11607596588134765, 0.11633312225341796, 0.11522870635986328, 0.1154017562866211, 0.11590316772460937, 0.11826006317138672, 0.11534429168701171, 0.11591500854492187, 0.11670809936523438, 0.11613763427734375, 0.12308223724365235, 0.11586771392822266, 0.1161674575805664, 0.11587753295898437, 0.11541129302978516, 0.11523619079589843, 0.11494185638427734, 0.11592985534667968, 0.11571609497070312, 0.11527718353271485, 0.11510848236083984, 0.11574009704589844, 0.115050048828125, 0.11455795288085938, 0.11506893157958985, 0.11540659332275391, 0.11612095642089844, 0.11551833343505859, 0.1152194595336914, 0.11522354888916016, 0.11610438537597656, 0.11565139007568359, 0.11557039642333984, 0.11688748931884765, 0.1160683822631836, 0.1158670425415039, 0.115687744140625, 0.11541951751708984, 0.11537961578369141, 0.11546502685546875, 0.11632434844970703, 0.11558697509765625, 0.11517961883544922, 0.11581132507324218, 0.11644620513916015, 0.11604358673095704, 0.11530825805664062, 0.115884033203125, 0.1155150375366211, 0.11602365112304687, 0.11723366546630859, 0.118742431640625, 0.11600518035888673, 0.11558505249023437, 0.11578755187988281, 0.11614051055908203, 0.11612979125976562, 0.1157201919555664, 0.11601510620117188, 0.11636326599121094, 0.11549696350097656, 0.11541407775878906, 0.11514329528808594, 0.11525971221923828, 0.11578777313232422, 0.11533513641357422, 0.11524918365478516, 0.11522563171386718, 0.11552867126464844, 0.11529183959960937, 0.11505286407470704, 0.1149296646118164, 0.11498700714111328, 0.11531407928466797, 0.11517193603515626, 0.11653289794921876, 0.1161397476196289, 0.11620825958251953, 0.11622185516357422, 0.11541228485107421, 0.11592985534667968, 0.11600665283203125, 0.115593505859375, 0.11508850860595703, 0.11580262756347656, 0.11598588562011719, 0.1156263656616211, 0.115259521484375, 0.11465155029296875, 0.1150750732421875, 0.11833920288085938, 0.11533916473388672, 0.11480111694335937, 0.11530035400390624, 0.11538569641113282, 0.11636803436279297, 0.12136243438720704, 0.11543122863769531, 0.11616275024414062, 0.11640627288818359, 0.11667206573486329, 0.11683017730712891, 0.11662319946289063, 0.11666262054443359, 0.11630825805664062, 0.11626496124267578, 0.11599222564697266, 0.11607689666748047, 0.11547551727294922, 0.11573548889160157, 0.11709235382080078, 0.11671756744384766, 0.1162232666015625, 0.11608751678466797, 0.11692173004150391, 0.11735718536376953, 0.11658649444580078, 0.11714559936523437, 0.11715583801269532, 0.11665203094482422, 0.11717427062988281, 0.11775791931152343, 0.11707599639892578, 0.11885763549804687, 0.1222820816040039, 0.11848438262939454, 0.11823503875732422, 0.11810681915283203, 0.116959228515625, 0.1168015365600586, 0.11633663940429688, 0.11666429138183594, 0.11682614135742188, 0.11663900756835938, 0.11679337310791016, 0.11700838470458984, 0.11733414459228515, 0.11708377838134766, 0.11718873596191406, 0.1166890869140625, 0.11685855865478516, 0.11693148803710937, 0.1179054412841797, 0.11715167999267578, 0.11757353973388672, 0.11725977325439453, 0.11800556945800782, 0.1176514892578125, 0.11745696258544921, 0.11780982208251953, 0.11753852844238281, 0.11695346832275391, 0.11707494354248046, 0.11705862426757813, 0.11680329895019531, 0.11663916778564454, 0.11668316650390625, 0.11693408203125, 0.11776627349853516, 0.11778540802001954, 0.11876544189453125, 0.11841865539550782, 0.1168387222290039, 0.11676118469238281, 0.11687324523925781, 0.11694691467285157, 0.11694489288330077, 0.11751990509033203, 0.1189111328125, 0.11769586944580078, 0.1181910400390625, 0.11804672241210938, 0.11741923522949219, 0.12030483245849609, 0.1166917724609375, 0.1174398422241211, 0.11622467041015624, 0.11866512298583984, 0.1194415054321289, 0.12297529602050782, 0.11594802856445313, 0.11621830749511719, 0.11696947479248047, 0.11690188598632813, 0.11665135955810547, 0.11668956756591797, 0.11745689392089843, 0.11676191711425782, 0.1164909439086914, 0.11732768249511719, 0.11804179382324219, 0.11842662048339844, 0.11953094482421875, 0.11765408325195313, 0.11787264251708984, 0.11691622161865234, 0.117106689453125, 0.11670848083496094, 0.11662834930419921, 0.11696947479248047, 0.11744051361083985, 0.11699814605712891, 0.11681587219238282, 0.11670323181152344, 0.11663565063476562, 0.1160110092163086, 0.11685887908935547, 0.11691958618164063, 0.12249775695800781, 0.11722672271728515, 0.11702531433105469, 0.11724416351318359, 0.11734015655517578, 0.11726831817626954, 0.1171844482421875, 0.1175429458618164, 0.1173873291015625, 0.11728880310058594, 0.1178565444946289, 0.11738658905029296, 0.11741865539550782, 0.11638748931884765, 0.11703126525878907, 0.11667660522460938, 0.11683625793457031, 0.1166562271118164, 0.11718217468261719, 0.11685683441162109, 0.11740393829345704, 0.11711673736572266, 0.1167259521484375, 0.11688150024414062, 0.11669052886962891, 0.11746918487548828, 0.11708175659179687, 0.11732649230957032, 0.11793145751953125, 0.11750662231445312, 0.11720909118652344, 0.11738697814941407, 0.11788111877441407, 0.11718601226806641, 0.12376115417480468, 0.11826179504394531, 0.11685683441162109, 0.11694719696044922, 0.11667021179199219, 0.11736064147949218, 0.11687117004394532, 0.11719270324707032, 0.1178603515625, 0.11743231964111328, 0.11674419403076172, 0.11679129791259765, 0.11702272033691406, 0.11610470581054687, 0.11585692596435547, 0.12057494354248047, 0.11717779541015624, 0.11758649444580078, 0.11725823974609376, 0.1174835205078125, 0.1171981430053711, 0.11946166229248047, 0.11959593963623047, 0.11737088012695313, 0.116662109375, 0.11677507019042968, 0.11657545471191406, 0.11682233428955079, 0.12017711639404297, 0.11693670654296875, 0.11661443328857422, 0.11666505432128907, 0.1173381118774414, 0.1170022430419922, 0.11962319946289063, 0.11792227172851563, 0.11783155059814453, 0.11711676788330078, 0.11718479919433594, 0.11718653106689453, 0.11725360107421876, 0.117279296875, 0.11751628875732421, 0.11868569946289062, 0.11850713348388672, 0.11791715240478516, 0.11707689666748047, 0.11686502075195312, 0.1164510726928711, 0.11749606323242187, 0.11872255706787109, 0.11727257537841797, 0.11672134399414062, 0.11801615905761718, 0.11646979522705078, 0.11642642974853516, 0.11918994903564453, 0.11587519836425782, 0.11606893157958985, 0.11765711975097656, 0.11686281585693359, 0.1177688980102539, 0.11875027465820312, 0.11722809600830078, 0.11769891357421874, 0.11735187530517578, 0.11708067321777343, 0.11793567657470704, 0.11757612609863281, 0.11745216369628907, 0.11718924713134765, 0.11638784027099609, 0.11650457763671875, 0.11673395538330078, 0.11695225524902343, 0.11682012939453125, 0.11732390594482422, 0.11703145599365235, 0.11689539337158203, 0.11748153686523438, 0.11619120025634766, 0.11635731506347656, 0.11675251007080079, 0.11703660583496094, 0.11725007629394531, 0.11968144226074219, 0.1180540771484375, 0.11764371490478516, 0.1177947540283203, 0.11672006225585937, 0.11757107543945312, 0.1176493148803711, 0.1170315170288086, 0.11792179107666016, 0.1172889633178711, 0.11765964508056641, 0.11678240203857422, 0.11712358093261718, 0.11653548431396485, 0.11950617980957032, 0.11724441528320312, 0.11785855865478516, 0.11759001922607422, 0.1176184310913086, 0.11697792053222657, 0.1198202896118164, 0.12078185272216797, 0.11705443572998046, 0.11721839904785156, 0.11765760040283203, 0.11830364990234375, 0.11834982299804687, 0.11831212615966796, 0.11783046722412109, 0.11733350372314454, 0.11734886169433593, 0.11864268493652344, 0.11715753936767578, 0.11757807922363281, 0.11783164978027344, 0.11652550506591797, 0.11598611450195312, 0.11654316711425781, 0.11622259521484375, 0.11635078430175781, 0.11686128234863281, 0.11669618988037109, 0.11802067565917969, 0.11777244567871094, 0.11728281402587891, 0.11799737548828125, 0.11738950347900391, 0.11746304321289062, 0.11924886322021484, 0.11832860565185546, 0.1176764144897461, 0.11802419281005859, 0.11768608093261719, 0.11771865844726563, 0.11719705963134766, 0.1166028823852539, 0.11690838623046874, 0.11774726104736329, 0.11714844512939453, 0.11816345977783203, 0.11805900573730468, 0.11757500457763671, 0.1172220458984375, 0.11679261016845703, 0.11747708892822266, 0.1173043212890625, 0.11807334136962891, 0.11791126251220703, 0.11809616088867188, 0.1183166732788086, 0.11856524658203126, 0.11776204681396485, 0.11726220703125, 0.11810623931884766, 0.11793612670898437, 0.1179625244140625, 0.11761824035644532, 0.11718927764892578, 0.1170749740600586, 0.116685791015625, 0.11656806182861328, 0.11691203308105469, 0.11700012969970704, 0.11725532531738281, 0.11878707122802734, 0.11782553863525391, 0.11744255828857422, 0.11743231964111328, 0.11689574432373047, 0.11685244750976563, 0.11709468841552734, 0.11779824066162109, 0.11746985626220703, 0.11810816192626954, 0.11797833251953126, 0.11742902374267578, 0.11738521575927735, 0.11725619506835938, 0.1168936996459961, 0.11785340881347656, 0.1165811538696289, 0.11707596588134765, 0.12080454254150391, 0.11922032165527344, 0.12016304016113281, 0.11584419250488281, 0.11620854187011719, 0.11675852966308593, 0.11642211151123047, 0.11696380615234375, 0.11718252563476562, 0.11659839630126953, 0.11660313415527344, 0.11710671997070313, 0.11742813110351563, 0.11702828979492187, 0.1177910385131836, 0.11766419219970703, 0.11786201477050781, 0.11735798645019531, 0.11648480224609375, 0.11639427185058594, 0.11695906829833984, 0.11676643371582031, 0.11689183807373046, 0.1168939208984375, 0.11706781005859375, 0.11796275329589843, 0.11734754943847656, 0.11653814697265626, 0.11593523406982421, 0.11634194946289063, 0.11845305633544922, 0.11798732757568359, 0.11673395538330078, 0.11744051361083985, 0.1180014419555664, 0.1168016357421875, 0.11706380462646485, 0.11689075469970703, 0.11706813049316406, 0.11738365173339843, 0.11778054046630859, 0.11686707305908203, 0.11774732971191407, 0.116830078125, 0.11597465515136719, 0.11622783660888672, 0.11639389038085937, 0.11652928161621094, 0.11632867431640626, 0.11680915069580078, 0.11649827575683594, 0.11798191833496094, 0.11723571014404296, 0.11740723419189453, 0.11702092742919921, 0.11734239959716797, 0.11795257568359375, 0.11786649322509765, 0.11758592224121094]",tokens/s,8.560945008507955,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,4147.871744,1936.654336,0.0,1541.40672,1525.63712,s,1,12.555927734375,12.555927734375,0.0,12.555927734375,12.555927734375,12.555927734375,12.555927734375,[12.555927734375],,kWh,0.0001595009761916723,1.7586997426018007e-05,5.85617135159984e-05,0.0002356496871336887,,MB,2569.121792,2083.454976,0.0,1667.23584,1626.061824,s,10,0.7675096588134767,0.07675096588134765,0.0005286314990181236,0.07675532531738281,0.0775672966003418,0.07761641578674316,0.07765571113586425,"[0.07755638122558593, 0.07619564819335937, 0.07629379272460937, 0.07766553497314453, 0.07667078399658203, 0.07685852813720703, 0.07681414031982423, 0.07687443542480468, 0.07588390350341796, 0.0766965103149414]",tokens/s,3335.462909949049,kWh,2.2884811028970994e-06,2.523801323756678e-07,1.3445127943593495e-06,3.885374029632117e-06,tokens/kWh,65888122.494152546,MB,2573.447168,2167.341056,0.0,1751.12192,1664.864256,s,10,46.35454833984375,4.635454833984375,0.021237572042927894,4.638671142578126,4.65755693359375,4.6598412109375,4.6616686328125,"[4.66212548828125, 4.57885400390625, 4.64107568359375, 4.6329560546875, 4.63386181640625, 4.63990234375, 4.6299462890625, 4.63743994140625, 4.64133740234375, 4.65704931640625]",tokens/s,13.590899330551508,kWh,0.00013567638563377008,1.4965091889495944e-05,5.7426534222441e-05,0.00020806801174570706,tokens/kWh,302785.6106828965,,s,630,46.3505684890747,0.07357233093503922,0.0009531439625196623,0.07348726272583009,0.07419739151000977,0.0747845573425293,0.07685058700561524,"[0.07363286590576172, 0.07409756469726562, 0.0744362564086914, 0.07433209228515625, 0.07538934326171876, 0.07499919891357422, 0.0745289306640625, 0.07482441711425782, 0.07418032073974609, 0.07418870544433594, 0.07420249938964844, 0.0743571548461914, 0.07416783905029296, 0.07396227264404297, 0.07418470764160157, 0.07486463928222656, 0.07404326629638672, 0.07400905609130859, 0.07421078491210938, 0.0736135025024414, 0.07364335632324219, 0.0731798095703125, 0.07325251007080077, 0.07327574157714843, 0.07417974090576172, 0.07601004791259766, 0.07395152282714844, 0.07336495971679688, 0.07331046295166016, 0.07320604705810548, 0.07272022247314452, 0.07378345489501953, 0.07450624084472657, 0.07554975891113282, 0.07499257659912109, 0.07408579254150391, 0.07351561737060547, 0.07359270477294921, 0.07390425872802735, 0.0740126724243164, 0.07403724670410156, 0.0737647705078125, 0.0741968994140625, 0.0739186553955078, 0.07388719940185547, 0.07434041595458984, 0.07392675018310547, 0.07404544067382812, 0.07374227142333985, 0.07426227569580078, 0.07396380615234376, 0.07452304077148438, 0.07418418884277343, 0.07326966094970704, 0.07416841888427735, 0.07426048278808593, 0.0731852798461914, 0.07272857666015625, 0.07323820495605468, 0.07423622131347657, 0.07368294525146485, 0.0729393310546875, 0.07306655883789062, 0.07271746826171875, 0.07335830688476562, 0.07344537353515625, 0.07321804809570312, 0.0731852798461914, 0.0731852798461914, 0.07241506958007812, 0.07221673583984375, 0.07236608123779296, 0.07281394958496094, 0.07236006164550782, 0.07230419158935547, 0.07224211120605468, 0.07166761779785157, 0.0713927001953125, 0.07264473724365235, 0.07255072021484375, 0.07473583984375, 0.07268160247802734, 0.07275651550292969, 0.07278435516357422, 0.07289868927001954, 0.07305625915527343, 0.07281868743896484, 0.07282073974609375, 0.07301529693603516, 0.0726566390991211, 0.0725506591796875, 0.0727040023803711, 0.07286579132080079, 0.07556050872802735, 0.07286211395263673, 0.07214316558837891, 0.07196601867675781, 0.07180271911621093, 0.07176371002197265, 0.07261676788330078, 0.07259053039550781, 0.07229727935791015, 0.07170867156982422, 0.07189654541015625, 0.07184028625488281, 0.07237222290039062, 0.07224114990234375, 0.072443359375, 0.07210787200927735, 0.07278224182128906, 0.07250972747802735, 0.07258927917480469, 0.07238220977783204, 0.07290230560302734, 0.07249369812011719, 0.07252374267578125, 0.07265647888183593, 0.07304409790039063, 0.07272684478759765, 0.0730091552734375, 0.07304310607910157, 0.07286415863037109, 0.07288467407226562, 0.07320371246337891, 0.07302758026123046, 0.07314742279052734, 0.07272857666015625, 0.07278931427001953, 0.07241506958007812, 0.07219081878662109, 0.07196466827392578, 0.07222866821289063, 0.07342851257324219, 0.07308354949951172, 0.07262617492675781, 0.07269376373291016, 0.07255449676513671, 0.07333814239501953, 0.07296688079833985, 0.07559101104736328, 0.07369999694824218, 0.07328873443603516, 0.07339107513427734, 0.07309053039550781, 0.07300559997558594, 0.07305625915527343, 0.07314809417724609, 0.07325878143310546, 0.07324931335449218, 0.07400393676757812, 0.07327388763427735, 0.07337983703613281, 0.0735763168334961, 0.07405494689941407, 0.07403401947021485, 0.07396870422363282, 0.07357103729248046, 0.07399183654785156, 0.07347666931152344, 0.07604576110839843, 0.07377958679199219, 0.07350905609130859, 0.07421542358398438, 0.07330201721191407, 0.07312329864501953, 0.07303017425537109, 0.07366655731201172, 0.07864115142822266, 0.08415436553955079, 0.07364742279052734, 0.07329020690917969, 0.07335948944091797, 0.07350486755371094, 0.07385292816162109, 0.07397711944580078, 0.0733716812133789, 0.07359286499023437, 0.07325762939453125, 0.07280748748779296, 0.07296617889404297, 0.07351798248291015, 0.07389759826660157, 0.07337750244140626, 0.07322895812988281, 0.07452009582519531, 0.07337955474853515, 0.07356902313232422, 0.07339008331298828, 0.07353343963623046, 0.07342082977294923, 0.07342489624023438, 0.07695155334472656, 0.07379273223876953, 0.07364892578125, 0.07393689727783204, 0.07352729797363282, 0.07379558563232422, 0.0732938232421875, 0.07336870574951172, 0.07370022583007813, 0.07356928253173828, 0.07346892547607421, 0.073670654296875, 0.07297577667236328, 0.07353376007080079, 0.07315885162353515, 0.07308054351806641, 0.07339628601074219, 0.07283039855957031, 0.07298957061767578, 0.0731025619506836, 0.07237506866455078, 0.0727441635131836, 0.07294236755371093, 0.07471513366699219, 0.07314841461181641, 0.0736048355102539, 0.07299282836914063, 0.07292540740966796, 0.07327523040771484, 0.07334300994873047, 0.0734434585571289, 0.07329154968261718, 0.07288614654541016, 0.07367910766601563, 0.07335740661621094, 0.0735561294555664, 0.07380156707763671, 0.07639987182617188, 0.0742770233154297, 0.0734806365966797, 0.07368924713134765, 0.07410892486572265, 0.07372140502929687, 0.07384722900390625, 0.07395712280273438, 0.07317324829101562, 0.073385986328125, 0.07311974334716796, 0.07318032073974609, 0.07339644622802734, 0.07318592071533203, 0.07310336303710938, 0.07313203430175781, 0.07336131286621093, 0.07365987396240234, 0.07404771423339844, 0.07361558532714843, 0.07413330841064453, 0.07330595397949219, 0.0732267837524414, 0.07335731506347656, 0.07353392028808593, 0.07341056060791015, 0.07363174438476562, 0.07343718719482421, 0.07351423645019531, 0.07327410888671874, 0.07401612854003906, 0.07365036773681641, 0.07361376190185547, 0.07465369415283203, 0.07373619079589844, 0.07375852966308594, 0.07400876617431641, 0.07398194885253906, 0.07402700805664063, 0.07387071990966797, 0.07386518096923828, 0.07487693023681641, 0.07371958160400391, 0.0737608642578125, 0.07510018920898437, 0.07329811096191406, 0.07327391815185547, 0.07270604705810547, 0.07257087707519531, 0.07240022277832031, 0.07253059387207031, 0.07329177856445312, 0.07334912109375, 0.07274444580078125, 0.0728616943359375, 0.0729912338256836, 0.07232307434082032, 0.07251948547363281, 0.07337388610839844, 0.0736153564453125, 0.07432125091552734, 0.07364790344238281, 0.07312236785888672, 0.07343289947509765, 0.07292569732666015, 0.07576985931396485, 0.07389929962158204, 0.0736541748046875, 0.07360185241699219, 0.07345561981201172, 0.07408844757080078, 0.07387340545654297, 0.07383859252929688, 0.07398162841796875, 0.07588006591796875, 0.0740882568359375, 0.07352614593505859, 0.07344278717041015, 0.07388214111328124, 0.07403929901123046, 0.07345938873291015, 0.07262239837646485, 0.07291494750976563, 0.0723719711303711, 0.07230451202392578, 0.07275558471679687, 0.07326515197753906, 0.07347232055664063, 0.07317052459716797, 0.07319344329833985, 0.07348899078369141, 0.07371161651611328, 0.07345331573486329, 0.07324002838134766, 0.07342540740966796, 0.07329596710205079, 0.07360736083984375, 0.07365328216552734, 0.07356893157958984, 0.07330847930908203, 0.07306034851074218, 0.07335116577148437, 0.07307263946533203, 0.07320121765136718, 0.07326969909667969, 0.07365427398681641, 0.07371981048583984, 0.07352114868164063, 0.07386930847167969, 0.07362470245361329, 0.07353024291992187, 0.07378514862060546, 0.07361759948730469, 0.07400857543945312, 0.07386930847167969, 0.07413731384277343, 0.07371910095214844, 0.07327638244628906, 0.07323033905029297, 0.07337471771240234, 0.07535308837890625, 0.07372185516357421, 0.07299276733398438, 0.07303158569335938, 0.07302358245849609, 0.07372799682617187, 0.07341670227050781, 0.07338317108154296, 0.07334783935546875, 0.07330544281005859, 0.07347676849365234, 0.07311542510986328, 0.0735293426513672, 0.07365984344482422, 0.07591385650634766, 0.07370563507080079, 0.07315625762939452, 0.07384496307373047, 0.07345891571044921, 0.07628511810302735, 0.07391808319091797, 0.07362105560302734, 0.07378998565673828, 0.07396937561035156, 0.07345926666259765, 0.07433235168457031, 0.07423011016845703, 0.07394675445556641, 0.07360569763183594, 0.0736911392211914, 0.07377362823486328, 0.07383618927001953, 0.07391295623779297, 0.0738971176147461, 0.07325923156738282, 0.07295244598388671, 0.07318246459960938, 0.0730467529296875, 0.07293341064453125, 0.07340982055664062, 0.07308313751220703, 0.07349247741699219, 0.07309523010253906, 0.07319110107421875, 0.07315325164794922, 0.07337369537353515, 0.07328313446044922, 0.07356460571289063, 0.07350819396972656, 0.07349906921386719, 0.07314771270751953, 0.07299919891357422, 0.07346854400634766, 0.0730439682006836, 0.07285555267333985, 0.07292230224609375, 0.07290480041503906, 0.07315309143066406, 0.07519657897949218, 0.07401881408691406, 0.07404105377197266, 0.07354601287841797, 0.07383245086669922, 0.07370956420898438, 0.07348751831054688, 0.07385929870605469, 0.07365023803710938, 0.07372857666015625, 0.07359487915039062, 0.07417641448974609, 0.07311753845214844, 0.07279641723632813, 0.07354573059082031, 0.07366451263427734, 0.07345542144775391, 0.07298377227783204, 0.07326515197753906, 0.07323699188232421, 0.07360355377197265, 0.0772833251953125, 0.073148193359375, 0.0726876449584961, 0.07279430389404297, 0.07270601654052734, 0.07275933074951171, 0.0733406753540039, 0.07349068450927734, 0.07510015869140625, 0.07467622375488281, 0.07333846282958985, 0.07329526519775391, 0.0729706268310547, 0.07345177459716797, 0.07344287872314453, 0.0732783966064453, 0.07405084991455078, 0.07362582397460937, 0.07370502471923829, 0.07397606658935547, 0.07406668853759765, 0.07388159942626953, 0.0736193618774414, 0.07399203491210937, 0.07390847778320313, 0.07416012573242188, 0.07395308685302734, 0.07350905609130859, 0.07260105895996094, 0.07335507202148438, 0.07308771514892579, 0.07347200012207031, 0.07310131072998047, 0.07352320098876954, 0.07299481964111328, 0.07306649780273437, 0.07273395538330078, 0.07285836791992187, 0.07314828491210937, 0.07322022247314453, 0.07353679656982422, 0.07287471771240234, 0.07285478210449219, 0.07389900970458985, 0.07332838439941407, 0.07323824310302735, 0.07327568054199218, 0.07398512268066407, 0.0735282211303711, 0.07319245147705078, 0.07326822662353516, 0.07330611419677735, 0.07298252868652344, 0.07339622497558594, 0.0739405746459961, 0.07770297241210937, 0.0738289566040039, 0.07369091033935547, 0.0736032943725586, 0.07374348449707031, 0.07405455780029296, 0.0736455078125, 0.07373868560791015, 0.07373625946044922, 0.07392054748535157, 0.07358054351806641, 0.07315660858154296, 0.07368879699707032, 0.07327772521972656, 0.07551385498046875, 0.075683837890625, 0.07353548431396484, 0.07320780944824219, 0.07313203430175781, 0.07348223876953125, 0.0733655014038086, 0.07290265655517578, 0.07260569763183594, 0.07320086669921876, 0.07315916442871094, 0.07353330993652343, 0.07333491516113282, 0.07342832183837891, 0.07371052551269532, 0.07314227294921875, 0.07302143859863282, 0.07370751953125, 0.07368089294433594, 0.07367459106445312, 0.07377935791015625, 0.07438540649414062, 0.07428006744384766, 0.07400537872314453, 0.07431782531738282, 0.07392227172851562, 0.07390995025634765, 0.07448226928710938, 0.07391177368164062, 0.07409062194824219, 0.07399257659912109, 0.07378880310058594, 0.07344400024414062, 0.07318505859375, 0.07306182098388672, 0.07336630249023438, 0.07377715301513672, 0.07372793579101562, 0.07376697540283203, 0.07316070556640625, 0.07347718048095703, 0.07340697479248047, 0.0732656021118164, 0.07319529724121093, 0.07469481658935546, 0.07360723114013672, 0.07332659149169922, 0.07289830780029297, 0.07314969635009766, 0.07314125061035157, 0.07374140930175781, 0.07375555419921875, 0.07340032196044922, 0.0732221450805664, 0.0737628173828125, 0.07332157135009766, 0.07327632141113281, 0.07357014465332032, 0.07454105377197266, 0.07370563507080079, 0.07386930847167969, 0.07355526733398438, 0.0738331527709961, 0.07350873565673828, 0.07565052795410156, 0.07514988708496094, 0.07384893035888672, 0.07375798034667969, 0.07381830596923829, 0.07352374267578125, 0.07337574768066406, 0.08290713500976563, 0.07831756591796875, 0.07396351623535156, 0.07338768005371094, 0.07334127807617187, 0.07366246032714843, 0.07352835083007812, 0.07360157012939453, 0.07343148803710937, 0.0741396484375, 0.07349584197998046, 0.07312662506103515, 0.07303971099853515, 0.07331430053710937, 0.0732733154296875, 0.07362783813476563, 0.07322557067871094, 0.0735013427734375, 0.07346320343017577, 0.07385346984863281, 0.07385298919677734, 0.07446870422363282, 0.07348700714111328, 0.07393484497070313, 0.07396966552734376, 0.074061279296875, 0.07423814392089843, 0.07397411346435546, 0.07403724670410156, 0.07388931274414062, 0.07389231872558594, 0.07379055786132813, 0.07406412506103516, 0.0735666275024414, 0.07350297546386719, 0.0735600357055664, 0.07388368225097657, 0.07351254272460937, 0.07359689331054688, 0.07320006561279296, 0.073635009765625, 0.07366313934326171, 0.07350188446044922, 0.07329027557373047, 0.07324217224121093, 0.07284825897216797, 0.07331948852539062, 0.07365113830566407, 0.0766033935546875, 0.07364749145507812, 0.07342144012451172, 0.07307689666748046, 0.07324604797363281, 0.07338547515869141, 0.07388604736328125, 0.07380035400390625, 0.07372185516357421, 0.0738506851196289, 0.0740040283203125, 0.07414979553222656, 0.07420182037353516, 0.07400857543945312, 0.07376076507568359]",tokens/s,13.592066301160843,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,9997.320192,6193.872896,0.0,5798.62528,5404.427264,s,1,21.414580078125,21.414580078125,0.0,21.414580078125,21.414580078125,21.414580078125,21.414580078125,[21.414580078125],,kWh,0.00040889490950000887,4.509696875311965e-05,0.00015209345500799554,0.0006060853332611241,,MB,5851.189248,6493.765632,0.0,6077.546496,5755.124736,s,10,1.5461013641357424,0.15461013641357424,0.0010709991829966146,0.15421614074707032,0.15553902740478515,0.1563750663757324,0.15704389755249024,"[0.1536700439453125, 0.1537261047363281, 0.15385443115234376, 0.1552923583984375, 0.15485772705078124, 0.15721110534667967, 0.15535324096679687, 0.15371498107910156, 0.15384352111816407, 0.15457785034179689]",tokens/s,1655.7775960769677,kWh,4.525662495128671e-06,4.990994874557971e-07,2.754899639815319e-06,7.779661622399787e-06,tokens/kWh,32906315.521860942,MB,5855.453184,6495.862784,0.0,6079.643648,5755.127296,s,10,93.96771484375,9.396771484374998,0.022288536846970027,9.403882324218749,9.41765830078125,9.423301318359375,9.427815732421875,"[9.3799228515625, 9.384916015625, 9.3537705078125, 9.4024921875, 9.4150244140625, 9.4099326171875, 9.416404296875, 9.4289443359375, 9.4052724609375, 9.37103515625]",tokens/s,6.704430357251609,kWh,0.00027397648633362115,3.0219135124437525e-05,0.00012033097660658569,0.0004245265980646443,tokens/kWh,148400.59559803305,,s,630,93.96464640808102,0.14915023239377945,0.0014576631769397138,0.14882168579101562,0.15035004272460936,0.1517781608581543,0.15478501693725588,"[0.14869322204589844, 0.14762567138671875, 0.1479171447753906, 0.14763845825195313, 0.14811148071289063, 0.14806390380859374, 0.14732322692871094, 0.14759526062011719, 0.1470401611328125, 0.15032931518554687, 0.14794320678710937, 0.14745535278320313, 0.1491240692138672, 0.14827081298828124, 0.14831849670410155, 0.1475722198486328, 0.1472824249267578, 0.14856396484375, 0.1482196502685547, 0.15020841979980468, 0.14824070739746092, 0.14825811767578126, 0.14736863708496092, 0.14863743591308592, 0.1481976623535156, 0.14862748718261717, 0.14834889221191405, 0.14808268737792968, 0.14870527648925783, 0.1483038787841797, 0.14882611083984376, 0.1495715789794922, 0.1484365692138672, 0.1597334747314453, 0.14883030700683594, 0.14878717041015624, 0.14747048950195313, 0.14897372436523437, 0.14865235900878906, 0.1496309814453125, 0.14900428771972657, 0.14825062561035157, 0.1486131134033203, 0.14883021545410155, 0.14932786560058595, 0.14994583129882813, 0.14887580871582032, 0.14981666564941407, 0.14863226318359374, 0.1529896697998047, 0.14901248168945314, 0.14992092895507814, 0.15065589904785157, 0.15179107666015626, 0.14887391662597657, 0.14837120056152345, 0.14838377380371093, 0.14877839660644532, 0.1484888000488281, 0.14904864501953125, 0.14909516906738282, 0.1494608917236328, 0.14846978759765625, 0.1496558074951172, 0.14841241455078125, 0.1487639617919922, 0.14867318725585937, 0.14862339782714845, 0.14770947265625, 0.1483675537109375, 0.148212158203125, 0.14875222778320313, 0.1481748504638672, 0.1482845458984375, 0.1483008575439453, 0.1510084228515625, 0.14925482177734375, 0.1484363250732422, 0.14909698486328124, 0.14778175354003906, 0.1488296661376953, 0.14932199096679688, 0.14869664001464844, 0.1495149688720703, 0.14797145080566407, 0.1482144012451172, 0.14876162719726563, 0.14857110595703124, 0.14824552917480469, 0.1488271026611328, 0.14931942749023439, 0.14919296264648438, 0.14880149841308593, 0.1487803497314453, 0.1488778533935547, 0.14855392456054686, 0.1491531524658203, 0.1489846649169922, 0.1486736297607422, 0.15232412719726562, 0.14863632202148438, 0.14847152709960937, 0.14796214294433593, 0.14859674072265625, 0.1481171875, 0.15005113220214844, 0.1487667236328125, 0.14937210083007812, 0.14936557006835938, 0.1481011199951172, 0.14915788269042968, 0.1490960693359375, 0.14819570922851563, 0.15090188598632812, 0.1498714904785156, 0.15106405639648438, 0.14832896423339845, 0.1481359100341797, 0.15347920227050782, 0.15166259765625, 0.1485723876953125, 0.14847564697265625, 0.14806620788574218, 0.14822412109375, 0.14807215881347657, 0.14873980712890625, 0.14863491821289063, 0.14871775817871094, 0.1475831298828125, 0.14743382263183594, 0.14791830444335938, 0.14846624755859375, 0.1480701446533203, 0.14804605102539062, 0.14847958374023437, 0.1476182403564453, 0.14829388427734375, 0.14847669982910155, 0.14720098876953125, 0.14772781372070312, 0.14734393310546876, 0.1486005096435547, 0.14904071044921874, 0.14856781005859376, 0.14798489379882812, 0.14783074951171876, 0.14863385009765626, 0.14771382141113282, 0.1479664611816406, 0.14811546325683594, 0.14944169616699218, 0.14830677795410158, 0.14739865112304687, 0.1484963836669922, 0.1483937530517578, 0.14879945373535156, 0.15001983642578126, 0.14955570983886718, 0.14793023681640624, 0.14717837524414062, 0.1480492858886719, 0.1470384063720703, 0.14725164794921874, 0.1484963836669922, 0.1487972412109375, 0.14755027770996093, 0.1514108123779297, 0.15187052917480467, 0.14746124267578126, 0.1473697967529297, 0.14760549926757813, 0.14771955871582032, 0.1472357177734375, 0.1477849884033203, 0.14707341003417967, 0.148093017578125, 0.1483857879638672, 0.14858607482910155, 0.14809129333496093, 0.1475440673828125, 0.14888960266113282, 0.15245843505859374, 0.1484923858642578, 0.14758985900878907, 0.14851277160644533, 0.1549168701171875, 0.15084339904785157, 0.1491494140625, 0.14921142578125, 0.1502882843017578, 0.14894355773925783, 0.1477081298828125, 0.14944050598144532, 0.1493071746826172, 0.14865225219726563, 0.1485701141357422, 0.1485722198486328, 0.14908409118652344, 0.14803762817382812, 0.14895718383789064, 0.14777276611328125, 0.14878172302246093, 0.14904115295410156, 0.14849853515625, 0.14821775817871094, 0.1479959716796875, 0.14994912719726564, 0.14810691833496092, 0.15077375793457032, 0.1483290557861328, 0.14976792907714845, 0.14842880249023438, 0.14974771118164062, 0.14851005554199218, 0.14803190612792969, 0.15026406860351563, 0.14997039794921874, 0.15199215698242188, 0.14902342224121093, 0.15004876708984374, 0.14858610534667968, 0.14860736083984374, 0.14988082885742188, 0.14853324890136718, 0.1490370635986328, 0.14890748596191405, 0.15016397094726563, 0.14825065612792968, 0.14872111511230468, 0.14918428039550782, 0.14949862670898437, 0.14947123718261718, 0.15446220397949217, 0.14959820556640624, 0.1488709716796875, 0.14924205017089845, 0.14839558410644532, 0.14954745483398438, 0.14903923034667968, 0.14988047790527342, 0.1484022979736328, 0.14809097290039064, 0.14888755798339845, 0.14835842895507811, 0.1482406005859375, 0.148603271484375, 0.15128997802734376, 0.1540157470703125, 0.14901043701171876, 0.14885842895507811, 0.14881837463378905, 0.1508900146484375, 0.14998348999023436, 0.14970921325683595, 0.14914093017578126, 0.1487073974609375, 0.14893446350097655, 0.1482922821044922, 0.14963302612304688, 0.1492716827392578, 0.14811839294433593, 0.14913449096679687, 0.1487779541015625, 0.14959507751464843, 0.14836361694335937, 0.14952310180664063, 0.1492141418457031, 0.14838272094726562, 0.14861827087402343, 0.14846620178222655, 0.1484640655517578, 0.14897500610351563, 0.15043394470214844, 0.14934780883789062, 0.15354365539550782, 0.14910258483886718, 0.14803506469726563, 0.14862736511230468, 0.14973193359375, 0.14824758911132813, 0.14825161743164061, 0.15297946166992188, 0.14886416625976562, 0.14847634887695313, 0.1480258483886719, 0.1501449279785156, 0.1487930908203125, 0.14885906982421876, 0.14982354736328124, 0.14834597778320313, 0.1485832977294922, 0.14977641296386718, 0.1501390380859375, 0.14935600280761718, 0.14840663146972657, 0.14973747253417968, 0.1487237091064453, 0.15372288513183593, 0.1542021179199219, 0.15008563232421876, 0.14929843139648438, 0.1494535675048828, 0.15041944885253905, 0.14890567016601564, 0.14943994140625, 0.15004547119140624, 0.14868695068359375, 0.14909645080566405, 0.15104730224609375, 0.14900314331054687, 0.14810520935058594, 0.14951190185546875, 0.15054006958007812, 0.15104666137695313, 0.148453369140625, 0.1553720703125, 0.15253619384765624, 0.15005477905273437, 0.15027244567871093, 0.1490392608642578, 0.14948953247070312, 0.14966348266601562, 0.14848284912109375, 0.14821693420410156, 0.14854351806640625, 0.14924864196777343, 0.147783935546875, 0.1483994903564453, 0.15148042297363282, 0.14947177124023436, 0.1485823974609375, 0.14839808654785155, 0.1488773193359375, 0.148494140625, 0.1496938934326172, 0.14873458862304687, 0.1487904052734375, 0.1484127960205078, 0.14929533386230467, 0.15060169982910157, 0.14935833740234375, 0.14962728881835938, 0.14855401611328126, 0.14854908752441406, 0.14969091796875, 0.1484633331298828, 0.14830589294433594, 0.14874237060546874, 0.14845526123046876, 0.14877926635742186, 0.1484035186767578, 0.14987881469726563, 0.1501620788574219, 0.14897970581054687, 0.14876022338867187, 0.14869334411621093, 0.1488504638671875, 0.14823440551757813, 0.14950778198242187, 0.14917610168457032, 0.1492875213623047, 0.14885682678222656, 0.1486267547607422, 0.14904124450683592, 0.15527792358398437, 0.1493828125, 0.1489998779296875, 0.15046054077148438, 0.1489331512451172, 0.1493519744873047, 0.14800035095214845, 0.14866111755371095, 0.14909823608398437, 0.14909788513183594, 0.15014556884765626, 0.15066879272460937, 0.14921014404296876, 0.14942384338378906, 0.1540068817138672, 0.1486313018798828, 0.1485221405029297, 0.14935162353515624, 0.15868313598632813, 0.14874176025390626, 0.14904620361328125, 0.1481169891357422, 0.14850204467773437, 0.14870013427734374, 0.15256480407714842, 0.14831610107421875, 0.1491425323486328, 0.14923887634277344, 0.14864175415039063, 0.1484666290283203, 0.14875852966308595, 0.14868829345703125, 0.14889418029785156, 0.14868826293945311, 0.14865306091308594, 0.14826060485839843, 0.1482936248779297, 0.1487441864013672, 0.14824038696289063, 0.14832640075683592, 0.14848410034179688, 0.15093898010253906, 0.14885136413574218, 0.14819244384765626, 0.14928790283203125, 0.14864918518066406, 0.14966636657714844, 0.14883955383300781, 0.14889248657226561, 0.14866152954101564, 0.14918687438964845, 0.1586386260986328, 0.14936473083496093, 0.14888490295410156, 0.14899606323242187, 0.14933056640625, 0.14898585510253906, 0.14900230407714843, 0.14952415466308594, 0.1493362579345703, 0.1493873291015625, 0.14871347045898436, 0.14941094970703125, 0.14929939270019532, 0.15034233093261717, 0.14849827575683594, 0.15302671813964844, 0.1494466552734375, 0.1496821746826172, 0.14883757019042967, 0.14960928344726562, 0.1500603790283203, 0.14789903259277343, 0.14862156677246094, 0.14918415832519533, 0.14916336059570312, 0.14897967529296874, 0.14984083557128905, 0.14887225341796875, 0.1493144989013672, 0.1508905029296875, 0.14921522521972655, 0.1494970245361328, 0.15022572326660155, 0.1524625244140625, 0.14907379150390626, 0.14920109558105468, 0.14924057006835936, 0.1495421142578125, 0.14883033752441407, 0.14918313598632812, 0.14827705383300782, 0.14834063720703125, 0.14922323608398438, 0.14885699462890625, 0.14836143493652343, 0.14920089721679688, 0.14912666320800783, 0.1480955810546875, 0.1491406707763672, 0.14992057800292968, 0.14986650085449219, 0.1492906494140625, 0.1483206024169922, 0.1494958038330078, 0.1485148468017578, 0.14854960632324218, 0.1516559295654297, 0.15033599853515625, 0.14883602905273438, 0.15114454650878906, 0.1524001007080078, 0.15234979248046876, 0.150059326171875, 0.15003231811523438, 0.15018240356445312, 0.148861083984375, 0.15020191955566406, 0.14874790954589845, 0.14908294677734374, 0.14983331298828126, 0.1494727325439453, 0.1487631072998047, 0.14905596923828124, 0.15092326354980468, 0.14940570068359374, 0.1492699890136719, 0.15017015075683593, 0.15011744689941406, 0.14924691772460938, 0.149712890625, 0.15187557983398436, 0.1520639953613281, 0.14955929565429688, 0.14969241333007813, 0.15002032470703125, 0.15015298461914062, 0.1496303405761719, 0.14942672729492187, 0.14839517211914063, 0.14889573669433595, 0.14893875122070313, 0.1481029510498047, 0.1496693115234375, 0.15147906494140626, 0.15175680541992187, 0.14954202270507813, 0.14962690734863282, 0.14901539611816406, 0.1528070373535156, 0.16052671813964844, 0.14847999572753906, 0.14863999938964845, 0.14976092529296875, 0.14942604064941406, 0.1487962951660156, 0.14859068298339845, 0.14915769958496095, 0.14903091430664062, 0.14839776611328126, 0.1489679412841797, 0.14991973876953124, 0.14869094848632813, 0.14928076171875, 0.1490370635986328, 0.14832231140136717, 0.1493975067138672, 0.14924919128417968, 0.14871034240722655, 0.149972900390625, 0.1496285400390625, 0.14870506286621094, 0.14879356384277342, 0.15167718505859376, 0.1489633026123047, 0.14870906066894532, 0.14868663024902343, 0.14885574340820312, 0.14886431884765625, 0.14913363647460937, 0.148629638671875, 0.14878924560546875, 0.14863063049316405, 0.14892877197265625, 0.14867222595214843, 0.14891702270507812, 0.148428955078125, 0.14850047302246094, 0.14818211364746095, 0.14813043212890625, 0.1492626190185547, 0.14876783752441405, 0.14903146362304687, 0.14881011962890625, 0.14862336730957032, 0.14835507202148437, 0.14837126159667968, 0.14952806091308593, 0.15098358154296876, 0.14858348083496092, 0.14821871948242188, 0.14834471130371094, 0.1480642547607422, 0.1488249969482422, 0.14818304443359376, 0.14829708862304689, 0.14906431579589843, 0.1476807098388672, 0.14768730163574217, 0.14904800415039063, 0.1503406982421875, 0.15102864074707031, 0.1487728576660156, 0.14776934814453124, 0.14785536193847656, 0.1481543731689453, 0.15176237487792968, 0.14841299438476563, 0.14772621154785157, 0.1480316162109375, 0.14856192016601563, 0.14825196838378907, 0.14816712951660158, 0.14846357727050782, 0.14897523498535156, 0.1480587158203125, 0.1484247283935547, 0.1483857879638672, 0.14871347045898436, 0.14871347045898436, 0.14920115661621094, 0.14758476257324218, 0.14891935729980468, 0.14966426086425783, 0.14807699584960937, 0.14752748107910157, 0.14834707641601563, 0.14875033569335938, 0.148094970703125, 0.15096627807617188, 0.14874214172363281, 0.1487667236328125, 0.1492725067138672, 0.14851487731933594, 0.14845709228515624, 0.1480625, 0.14875596618652343, 0.149512451171875, 0.15319485473632813, 0.14843597412109374, 0.14783999633789063, 0.14840179443359375, 0.14834521484375, 0.14817893981933594, 0.14816188049316406, 0.14835148620605468, 0.14889590454101562, 0.14799026489257813, 0.14823869323730468, 0.14845907592773439, 0.1491922607421875, 0.1486835479736328, 0.14825881958007814, 0.14903091430664062, 0.14790042114257812, 0.1525964813232422]",tokens/s,6.704649291861959,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1896.706048,1057.947648,0.0,662.700032,622.833664,s,1,9.2692841796875,9.2692841796875,0.0,9.2692841796875,9.2692841796875,9.2692841796875,9.2692841796875,[9.2692841796875],,kWh,5.9649310204152544e-05,6.572552757026549e-06,2.1403072677991797e-05,8.762493563917089e-05,,MB,1939.795968,1181.679616,0.0,765.46048,733.871104,s,10,0.6155335044860839,0.06155335044860839,0.00022180813588524277,0.0615022087097168,0.061793860244750975,0.0618812822341919,0.06195121982574463,"[0.06154595184326172, 0.06177443313598633, 0.06167647933959961, 0.061381248474121096, 0.061244766235351564, 0.0612828483581543, 0.06174854278564453, 0.06145206451416016, 0.06145846557617188, 0.06196870422363281]",tokens/s,4158.99375313026,kWh,1.8151124012682475e-06,2.0017424931970968e-07,9.000007200000811e-07,2.9152873705880382e-06,tokens/kWh,87812955.45089355,MB,1944.014848,1192.165376,0.0,775.94624,748.240384,s,10,37.76376586914063,3.7763765869140626,0.010053992689648775,3.7759786376953124,3.7856682861328124,3.7918682006835938,3.796828132324219,"[3.7733896484375, 3.780630859375, 3.7761025390625, 3.762035888671875, 3.761979736328125, 3.78429052734375, 3.771435791015625, 3.798068115234375, 3.775854736328125, 3.77997802734375]",tokens/s,16.682658243965452,kWh,0.00011029425315748479,1.2165164813399175e-05,4.338264581719974e-05,0.0001658420637880837,tokens/kWh,379879.4983672095,,s,630,37.75729852676391,0.059932219883752254,0.0006360604396337902,0.05986535835266113,0.06039576988220215,0.060804128265380855,0.06275842662811279,"[0.05902950286865234, 0.05941420745849609, 0.05929391860961914, 0.0595497932434082, 0.06069657516479492, 0.06140550231933594, 0.05985782241821289, 0.05959964752197266, 0.05993401718139649, 0.05960358428955078, 0.05967468643188477, 0.05986713409423828, 0.05979340744018555, 0.06025328063964844, 0.06011209487915039, 0.06067910385131836, 0.060885761260986326, 0.05981798553466797, 0.05941443252563477, 0.059832191467285155, 0.0596453742980957, 0.059781150817871095, 0.0598823356628418, 0.05989980697631836, 0.05949625778198242, 0.06007212829589844, 0.060018848419189454, 0.059637599945068356, 0.059393726348876956, 0.059488574981689454, 0.06198681640625, 0.0605010871887207, 0.05932287979125977, 0.05945180892944336, 0.059891712188720705, 0.05943078231811524, 0.05938188934326172, 0.06029248046875, 0.059579071044921876, 0.059604961395263674, 0.05943088150024414, 0.05950006484985351, 0.060953056335449216, 0.05972524642944336, 0.059736640930175784, 0.05976019287109375, 0.059685310363769534, 0.06000230407714844, 0.0598076171875, 0.060353919982910155, 0.059605758666992185, 0.05964137649536133, 0.059609569549560544, 0.05962710571289063, 0.059781375885009765, 0.05970249557495117, 0.059651359558105466, 0.060028736114501956, 0.06010358428955078, 0.06028908920288086, 0.06004038238525391, 0.06013100814819336, 0.06007625579833984, 0.05989718246459961, 0.059832286834716794, 0.05980448150634766, 0.060006271362304686, 0.05983148956298828, 0.059916511535644534, 0.05975875091552734, 0.05994089508056641, 0.06012911987304687, 0.06172083282470703, 0.05999852752685547, 0.06017638397216797, 0.05978726577758789, 0.05953289413452149, 0.05940620803833008, 0.05956047821044922, 0.059913887023925784, 0.05991049575805664, 0.05984988784790039, 0.05936624145507813, 0.05921791839599609, 0.05910444641113281, 0.059636417388916015, 0.05961068725585938, 0.059593441009521485, 0.05960073471069336, 0.05974649429321289, 0.05950611114501953, 0.06014604949951172, 0.059469825744628904, 0.05955088043212891, 0.06031241607666016, 0.059570049285888674, 0.06382777786254883, 0.059993473052978516, 0.05960540771484375, 0.0599967041015625, 0.05989116668701172, 0.05999055862426758, 0.06005724716186524, 0.059963520050048826, 0.059905952453613284, 0.05998579025268555, 0.05991596984863281, 0.06014009475708008, 0.060179710388183594, 0.05990086364746094, 0.060031326293945315, 0.06001193618774414, 0.059881599426269534, 0.06461436462402344, 0.06070697784423828, 0.059822078704833984, 0.06014976119995117, 0.05958019256591797, 0.059611358642578126, 0.05988457489013672, 0.05974310302734375, 0.059920063018798826, 0.05980815887451172, 0.05979878234863281, 0.060035839080810546, 0.0596492805480957, 0.05893734359741211, 0.05932003021240234, 0.05986556625366211, 0.05959158325195312, 0.06453270721435547, 0.059460319519042966, 0.05935449600219726, 0.0594601936340332, 0.059672031402587894, 0.059423263549804685, 0.05957638549804688, 0.05989574432373047, 0.05937356948852539, 0.05966438293457031, 0.05989580917358398, 0.05941398239135742, 0.05999465560913086, 0.0599285774230957, 0.05957222366333008, 0.06003612899780274, 0.06003152084350586, 0.0595865592956543, 0.06011945724487305, 0.06079286575317383, 0.06024192047119141, 0.059811809539794925, 0.05982620620727539, 0.060037120819091794, 0.05986713409423828, 0.05994697570800781, 0.060053535461425785, 0.060561214447021484, 0.06123779296875, 0.060006366729736325, 0.06000028610229492, 0.0599384651184082, 0.060071617126464846, 0.061071582794189457, 0.05976822280883789, 0.059340862274169924, 0.059142112731933594, 0.0588950080871582, 0.059445247650146485, 0.060746910095214844, 0.0603185920715332, 0.05994838333129883, 0.059764511108398435, 0.06009328079223633, 0.05907257461547852, 0.059316158294677734, 0.06000559997558594, 0.05966713714599609, 0.0610263671875, 0.060190399169921874, 0.06332860946655273, 0.060080127716064455, 0.05918339157104492, 0.05908160018920899, 0.05897097778320313, 0.0591129264831543, 0.05926380920410156, 0.05962688064575195, 0.05989852905273438, 0.059498336791992186, 0.05980313491821289, 0.060453697204589846, 0.05951027297973633, 0.060193279266357425, 0.060085342407226565, 0.06000323104858398, 0.060391681671142575, 0.05984844970703125, 0.05957244873046875, 0.05973728179931641, 0.060091201782226565, 0.059797279357910155, 0.0600384635925293, 0.060157886505126955, 0.05985279846191406, 0.06008668899536133, 0.06008457565307617, 0.059256542205810545, 0.05921206283569336, 0.05919295883178711, 0.05922035217285156, 0.05944054412841797, 0.05947865676879883, 0.05900694274902344, 0.058589183807373046, 0.058448223114013674, 0.058861217498779296, 0.05922796630859375, 0.05954313659667969, 0.0594392318725586, 0.05979388809204102, 0.05943033599853516, 0.05977350234985351, 0.05963481521606445, 0.05946252822875977, 0.059445152282714846, 0.0594741439819336, 0.05924844741821289, 0.0597402229309082, 0.059612895965576174, 0.05934067153930664, 0.059566497802734375, 0.059344894409179685, 0.059436927795410155, 0.059705535888671876, 0.05978339385986328, 0.060058494567871094, 0.061217086791992184, 0.06011958312988281, 0.060386592864990235, 0.05981872177124024, 0.05971068954467774, 0.05962847900390625, 0.05976700973510742, 0.059889278411865234, 0.05996140670776367, 0.059951038360595704, 0.05994905471801758, 0.06019001770019531, 0.06018463897705078, 0.059908447265625, 0.05973430252075195, 0.059392158508300784, 0.059708446502685544, 0.05990572738647461, 0.06007583999633789, 0.05936966323852539, 0.05891206359863281, 0.05847635269165039, 0.05914316940307617, 0.05967184066772461, 0.05973462295532227, 0.059639041900634765, 0.05957708740234375, 0.059535358428955076, 0.05900492858886719, 0.058721694946289066, 0.05905452728271485, 0.059676929473876955, 0.0599463996887207, 0.05945529556274414, 0.05971142578125, 0.060141727447509764, 0.05961724853515625, 0.05935577774047852, 0.05941459274291992, 0.05936848068237305, 0.06106569671630859, 0.05961103820800781, 0.05971308898925781, 0.0596715202331543, 0.05954767990112304, 0.0593919677734375, 0.059991424560546874, 0.05990393447875977, 0.05995187377929687, 0.06003078460693359, 0.060031105041503906, 0.06046121597290039, 0.06048972702026367, 0.05986899185180664, 0.05962895965576172, 0.059703296661376956, 0.05993471908569336, 0.060177024841308595, 0.059996158599853515, 0.05997875213623047, 0.059705886840820316, 0.059478271484375, 0.059619518280029295, 0.05959683227539062, 0.05977088165283203, 0.05957846450805664, 0.05954470443725586, 0.05964905548095703, 0.060953887939453125, 0.05964617538452149, 0.05994112014770508, 0.05961318588256836, 0.059648159027099606, 0.05974204635620117, 0.059868766784667966, 0.05984035110473633, 0.05966700744628906, 0.05974009704589844, 0.059166465759277344, 0.05948441696166992, 0.059743518829345706, 0.059626209259033204, 0.05977088165283203, 0.059466785430908206, 0.060016830444335936, 0.05984700775146484, 0.05983814239501953, 0.05947264099121094, 0.05958860778808594, 0.05971795272827148, 0.06004732894897461, 0.0598504638671875, 0.059963520050048826, 0.059574207305908206, 0.0602391357421875, 0.059908576965332035, 0.060768192291259765, 0.06136975860595703, 0.0603138542175293, 0.060068126678466796, 0.060289344787597655, 0.060337310791015626, 0.06001948928833008, 0.060216991424560544, 0.060170463562011715, 0.06270921707153321, 0.06036108779907227, 0.06159600067138672, 0.06106316757202149, 0.060030975341796876, 0.059569919586181644, 0.059807998657226566, 0.05985452651977539, 0.06025872039794922, 0.059840415954589846, 0.05995481491088867, 0.05985500717163086, 0.060575008392333984, 0.05983942413330078, 0.05963740921020508, 0.05924105453491211, 0.05935488128662109, 0.05969510269165039, 0.059881599426269534, 0.06000966262817383, 0.05968147277832031, 0.05996953582763672, 0.06009980773925781, 0.06042499160766602, 0.06013132858276367, 0.05970095825195312, 0.05981622314453125, 0.05976268768310547, 0.0598355827331543, 0.059919105529785154, 0.05993024063110351, 0.060429790496826175, 0.06083273696899414, 0.06041151809692383, 0.06022409439086914, 0.060437278747558595, 0.0598919677734375, 0.06031167984008789, 0.060192798614501955, 0.0600159683227539, 0.060015392303466794, 0.059947135925292966, 0.05982527923583984, 0.05988230514526367, 0.059694976806640626, 0.05990412902832031, 0.059529216766357425, 0.06002483367919922, 0.05930521774291992, 0.059411201477050785, 0.05961872100830078, 0.05981961441040039, 0.05950156784057617, 0.05984441757202148, 0.06003760147094726, 0.05964566421508789, 0.05923193740844727, 0.059256481170654296, 0.05979344177246094, 0.06241984176635742, 0.05969689559936524, 0.05947600173950195, 0.05908009719848633, 0.059118144989013674, 0.059276512145996094, 0.05927123260498047, 0.05968764877319336, 0.05987945556640625, 0.05979043197631836, 0.05960793685913086, 0.05959804916381836, 0.05955641555786133, 0.05976505661010742, 0.059786975860595705, 0.05980527877807617, 0.060037185668945316, 0.06002067184448242, 0.059894527435302734, 0.060558494567871095, 0.06014841461181641, 0.0600964469909668, 0.05987916946411133, 0.06045727920532227, 0.06029836654663086, 0.06006889724731445, 0.060042335510253904, 0.06002259063720703, 0.06015871810913086, 0.06035612869262695, 0.060186878204345706, 0.06016041564941406, 0.05997772979736328, 0.05983027267456055, 0.05969900894165039, 0.059649856567382815, 0.05974784088134766, 0.059853694915771485, 0.05954719924926758, 0.05958252716064453, 0.060255615234375, 0.06484662628173828, 0.06008921432495117, 0.06019580841064453, 0.060167743682861326, 0.059859390258789065, 0.060052833557128905, 0.06041667175292969, 0.059988094329833985, 0.05996297454833984, 0.05997187042236328, 0.0613397102355957, 0.06001657485961914, 0.0600186882019043, 0.06005385589599609, 0.05993231964111328, 0.060254207611083986, 0.060037120819091794, 0.06009347152709961, 0.06018899154663086, 0.06035932922363281, 0.060108993530273436, 0.05999801635742188, 0.060259742736816405, 0.06040636825561523, 0.06031561660766602, 0.0604549446105957, 0.060313568115234376, 0.06058111953735352, 0.06035257720947266, 0.06013513565063477, 0.06020540618896485, 0.060650142669677734, 0.06029939270019531, 0.06039459228515625, 0.0603015022277832, 0.06026633453369141, 0.060193313598632815, 0.06002070236206054, 0.060090625762939456, 0.0597212142944336, 0.06006604766845703, 0.060209312438964845, 0.05997151947021485, 0.060497600555419924, 0.06003142547607422, 0.0608133430480957, 0.0596338882446289, 0.05938166427612305, 0.060129150390625, 0.06074399948120117, 0.05991187286376953, 0.059582462310791014, 0.05973516845703125, 0.0598287353515625, 0.059910526275634764, 0.060114944458007816, 0.06220764923095703, 0.06090172958374023, 0.06014963150024414, 0.06012688064575195, 0.06020316696166992, 0.060112319946289065, 0.059360832214355466, 0.05961772918701172, 0.06007859039306641, 0.060385280609130856, 0.06143385696411133, 0.06011084747314453, 0.060055553436279295, 0.06010713577270508, 0.06032313537597656, 0.059980224609375, 0.06004851150512695, 0.06005740737915039, 0.0599090576171875, 0.05995315170288086, 0.06082262420654297, 0.06003977584838867, 0.059983840942382814, 0.060229984283447266, 0.06012313461303711, 0.06006079864501953, 0.06026847839355469, 0.05963462448120117, 0.059711360931396486, 0.05983379364013672, 0.059488960266113285, 0.06020473480224609, 0.0596932487487793, 0.05983251190185547, 0.05987526321411133, 0.05975244903564453, 0.059776992797851564, 0.05978319931030274, 0.059719680786132816, 0.05945910263061523, 0.059443233489990234, 0.059393505096435546, 0.05973417663574219, 0.05972598266601563, 0.059392673492431644, 0.059268447875976564, 0.05935801696777344, 0.05955712127685547, 0.059539520263671875, 0.059454143524169924, 0.059686752319335935, 0.059668479919433595, 0.05964822387695313, 0.060093441009521485, 0.05984703826904297, 0.05985456085205078, 0.05988217544555664, 0.05971558380126953, 0.05984467315673828, 0.05987033462524414, 0.06001155090332031, 0.06013708877563476, 0.05984476852416992, 0.05984675216674805, 0.0599054069519043, 0.060087936401367184, 0.059974559783935545, 0.06038457489013672, 0.06227555084228516, 0.06062992095947266, 0.06008339309692383, 0.060057952880859376, 0.05984115219116211, 0.059963039398193356, 0.05997308731079101, 0.05985772705078125, 0.05969510269165039, 0.059799327850341796, 0.059815841674804686, 0.06042371368408203, 0.05955686569213867, 0.05938560104370117, 0.05933599853515625, 0.05978367996215821, 0.05993929672241211, 0.06013443374633789, 0.05986515045166016, 0.059652576446533205, 0.05945337677001953, 0.05935878372192383, 0.06280876922607422, 0.060006401062011716, 0.06009267044067383, 0.06003424072265625, 0.060006977081298825, 0.06028265762329101, 0.05951715087890625, 0.059305950164794924, 0.05924252700805664, 0.05968384170532227, 0.059931198120117185, 0.0599571533203125, 0.06061248016357422, 0.05974822235107422, 0.059797889709472654, 0.060174175262451175, 0.06008505630493164, 0.060419841766357424, 0.06277852630615234, 0.061300575256347654, 0.06042086410522461, 0.060246112823486325, 0.060123584747314454, 0.06023667144775391, 0.06026739120483399, 0.06020083236694336, 0.06077040100097656, 0.060211200714111325, 0.05970246505737305, 0.060146495819091796, 0.059123233795166014, 0.05942335891723633, 0.05982428741455078, 0.05946540832519531, 0.05973811340332031, 0.06003628921508789, 0.05971673583984375, 0.05910086441040039, 0.05920767974853516, 0.05960819244384766, 0.05978015899658203, 0.059606849670410154]",tokens/s,16.68551576997571,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3940.745216,2152.660992,0.0,1757.413376,1736.37632,s,1,12.384103515625,12.384103515625,0.0,12.384103515625,12.384103515625,12.384103515625,12.384103515625,[12.384103515625],,kWh,0.0001575624012083419,1.7373295562253167e-05,5.8802824820000654e-05,0.0002337385215905957,,MB,3949.490176,2389.639168,0.0,1973.420032,1922.784256,s,10,0.6248357162475585,0.06248357162475586,0.00040845917177390273,0.062462688446044926,0.06309321594238282,0.06314102363586425,0.06317926979064942,"[0.06231094360351563, 0.06234572982788086, 0.06176588821411133, 0.0626399040222168, 0.06252425765991211, 0.06308259201049805, 0.06240111923217773, 0.06255193710327149, 0.0631888313293457, 0.062024513244628904]",tokens/s,4097.076933076171,kWh,1.8305450974999835e-06,2.0187770505538705e-07,1.1433533452375953e-06,3.1757761477929657e-06,tokens/kWh,80610215.60915416,MB,3953.778688,2410.610688,0.0,1994.391552,1971.314176,s,10,37.614040283203124,3.7614040283203125,0.009069761434271535,3.7606953125,3.7708549560546873,3.7750571166992186,3.7784188452148437,"[3.7592001953125, 3.769921142578125, 3.752826171875, 3.759096435546875, 3.759722412109375, 3.762710693359375, 3.761668212890625, 3.77925927734375, 3.743501220703125, 3.766134521484375]",tokens/s,16.74906485069438,kWh,0.00010931741821874955,1.2057846609998803e-05,4.829829731916261e-05,0.00016967356214791094,tokens/kWh,371301.2163031061,,s,630,37.60988599777223,0.05969823174249558,0.0006495408563423068,0.05958225631713867,0.060316112518310545,0.06066802577972412,0.06273069122314454,"[0.059224063873291016, 0.0593284797668457, 0.05942825698852539, 0.05926502227783203, 0.05924431991577148, 0.06028684616088867, 0.05945372772216797, 0.059944896697998046, 0.05954841613769531, 0.05943619155883789, 0.05951926422119141, 0.059681663513183596, 0.05923193740844727, 0.05932790374755859, 0.0591099853515625, 0.05921331024169922, 0.059760353088378904, 0.05974095916748047, 0.05978112030029297, 0.059580257415771484, 0.05966864013671875, 0.05972172927856445, 0.060841217041015624, 0.05987744140625, 0.05982454299926758, 0.059902271270751956, 0.05982729721069336, 0.05948915100097656, 0.059799552917480465, 0.05936332702636719, 0.05973606491088867, 0.05953116989135742, 0.05963980865478516, 0.059676769256591794, 0.05936537551879883, 0.05915750503540039, 0.059259647369384764, 0.05924259185791016, 0.05942867279052735, 0.060131072998046875, 0.05946428680419922, 0.05997158432006836, 0.05946774291992187, 0.05980339050292969, 0.059815711975097656, 0.05965465545654297, 0.05939523315429687, 0.05944134521484375, 0.05966505432128906, 0.0596841926574707, 0.059566497802734375, 0.059709121704101566, 0.060635326385498046, 0.061163902282714844, 0.059313823699951175, 0.05962118530273437, 0.0611190071105957, 0.059734016418457034, 0.059719295501708985, 0.0597237434387207, 0.05934246444702149, 0.05961808013916016, 0.05956198501586914, 0.05938412857055664, 0.0601640625, 0.05978521728515625, 0.05979536056518555, 0.0597237434387207, 0.05978291320800781, 0.05971305465698242, 0.06002355194091797, 0.06085027313232422, 0.059870849609375, 0.0609222412109375, 0.05995315170288086, 0.06254572677612305, 0.05985228729248047, 0.05980435180664063, 0.05960489654541016, 0.05949241638183594, 0.059420703887939456, 0.05967052841186524, 0.05947187042236328, 0.05942272186279297, 0.0594411506652832, 0.05948124694824219, 0.059468639373779296, 0.059641857147216794, 0.06030448150634766, 0.060642208099365234, 0.0598337287902832, 0.05967257690429688, 0.059512928009033204, 0.06035411071777344, 0.05919583892822266, 0.05936515045166016, 0.05951504135131836, 0.059543422698974606, 0.059396320343017575, 0.059474334716796876, 0.059340160369873045, 0.05985103988647461, 0.05910940933227539, 0.059148193359375, 0.05972768020629883, 0.06048223876953125, 0.060104705810546874, 0.06002483367919922, 0.05955897521972656, 0.05964896011352539, 0.060294654846191405, 0.05961983871459961, 0.0595865592956543, 0.05946723175048828, 0.059428897857666016, 0.05954406356811524, 0.06356134414672851, 0.05958649444580078, 0.05944364929199219, 0.0598466567993164, 0.059925983428955075, 0.059664928436279296, 0.0605568962097168, 0.05959702301025391, 0.05932566452026367, 0.059079647064208984, 0.05984403228759766, 0.059516864776611327, 0.059152481079101565, 0.05883280181884765, 0.059048320770263674, 0.05913375854492187, 0.06066435241699219, 0.058929153442382816, 0.0589854736328125, 0.059036415100097654, 0.05937587356567383, 0.05997884750366211, 0.05934076690673828, 0.059689632415771486, 0.059275550842285155, 0.059490623474121096, 0.05934867095947265, 0.05923132705688477, 0.059163169860839845, 0.05917030334472656, 0.05938675308227539, 0.05942438507080078, 0.05962380981445312, 0.061257728576660155, 0.05957222366333008, 0.05927056121826172, 0.05924515151977539, 0.059104385375976565, 0.06059001541137695, 0.059611358642578126, 0.05982076644897461, 0.05976473617553711, 0.05970249557495117, 0.060463905334472656, 0.061488800048828125, 0.05970473480224609, 0.059611358642578126, 0.05959958267211914, 0.05952022552490235, 0.05955855941772461, 0.05991027069091797, 0.0596085433959961, 0.05941712188720703, 0.05911958312988281, 0.05949203109741211, 0.06020694351196289, 0.059789825439453125, 0.05973974227905274, 0.059845024108886716, 0.05938585662841797, 0.0590561294555664, 0.05925888061523438, 0.05911142349243164, 0.059268352508544925, 0.05916748809814453, 0.060010784149169924, 0.059803359985351565, 0.05928316879272461, 0.059291488647460935, 0.05935945510864258, 0.05965798568725586, 0.059256542205810545, 0.06000307083129883, 0.05970057678222656, 0.05962179183959961, 0.059058433532714845, 0.05907379150390625, 0.059034175872802734, 0.05908908843994141, 0.05935308837890625, 0.059688064575195314, 0.059442047119140626, 0.059701248168945314, 0.05980979156494141, 0.0597498893737793, 0.059582977294921874, 0.05973984146118164, 0.0598691520690918, 0.060563041687011716, 0.060136192321777346, 0.05998387145996094, 0.06058134460449219, 0.060504608154296875, 0.05959785461425781, 0.05982012939453125, 0.05982633590698242, 0.0596998405456543, 0.05972742462158203, 0.060226081848144535, 0.05978112030029297, 0.0597872314453125, 0.0628062400817871, 0.05953011322021484, 0.05963382339477539, 0.060315456390380856, 0.0594741439819336, 0.060068607330322266, 0.06003302383422852, 0.05956979370117187, 0.05940851211547851, 0.05923455810546875, 0.05910284805297852, 0.05913183975219727, 0.05897679901123047, 0.05919232177734375, 0.05942726516723633, 0.05938224029541016, 0.058943489074707034, 0.0590579833984375, 0.05899190521240234, 0.05923932647705078, 0.05989990234375, 0.058982398986816405, 0.05968076705932617, 0.05968896102905273, 0.05949039840698242, 0.05950217437744141, 0.05960326385498047, 0.05971747207641601, 0.05957436752319336, 0.05955385589599609, 0.05959065628051758, 0.05969891357421875, 0.060322017669677735, 0.05950265502929687, 0.05957017517089844, 0.05965679931640625, 0.060605888366699216, 0.05997830581665039, 0.05963123321533203, 0.059913665771484374, 0.05963811111450195, 0.05938412857055664, 0.05913209533691406, 0.05935932922363281, 0.05922412872314453, 0.05924448013305664, 0.059342655181884765, 0.05956403350830078, 0.05945564651489258, 0.05932444763183594, 0.05914214324951172, 0.059153759002685546, 0.059143871307373044, 0.06000534439086914, 0.0594463996887207, 0.05954006576538086, 0.05958860778808594, 0.0592360954284668, 0.05938844680786133, 0.05925020980834961, 0.05930624008178711, 0.05904611206054688, 0.05905817413330078, 0.0593554573059082, 0.06386761474609375, 0.059300254821777344, 0.0594043197631836, 0.06028342437744141, 0.05969715118408203, 0.05907660675048828, 0.05999411010742187, 0.05969100952148437, 0.05980160140991211, 0.05993616104125977, 0.059381729125976564, 0.059400222778320313, 0.05951139068603516, 0.05954051208496094, 0.05983536148071289, 0.05972371292114258, 0.059564094543457034, 0.061009918212890625, 0.05955379104614258, 0.0595148811340332, 0.05965558242797851, 0.060009025573730466, 0.0597072639465332, 0.060372318267822266, 0.05929171371459961, 0.05949721527099609, 0.05944425582885742, 0.05928854370117188, 0.05982992172241211, 0.059682590484619144, 0.059406913757324216, 0.05935647964477539, 0.05969580841064453, 0.06168985748291016, 0.059284351348876954, 0.05928550338745117, 0.05950054550170898, 0.05925878524780273, 0.06027801513671875, 0.0593455696105957, 0.05968841552734375, 0.06140966415405273, 0.06012118530273437, 0.06172697448730469, 0.059688766479492186, 0.05922124862670899, 0.05911167907714844, 0.0591080322265625, 0.05941452789306641, 0.059379711151123046, 0.059232257843017576, 0.059150337219238285, 0.05919539260864258, 0.05971353530883789, 0.059602783203125, 0.06063276672363281, 0.06013932800292969, 0.05983504104614258, 0.06021324920654297, 0.06008211135864258, 0.05970950317382812, 0.059701248168945314, 0.05956595230102539, 0.059977855682373044, 0.05963145446777344, 0.06029651260375977, 0.059738975524902344, 0.059668479919433595, 0.05975827026367188, 0.059109695434570314, 0.05924780654907227, 0.05934163284301758, 0.06042591857910156, 0.059953441619873045, 0.05934902572631836, 0.05957632064819336, 0.05928550338745117, 0.05950431823730469, 0.05925305557250977, 0.05981980895996094, 0.059470046997070314, 0.059213214874267575, 0.06060502243041992, 0.05936742401123047, 0.05945897674560547, 0.05932463836669922, 0.05967910385131836, 0.05980160140991211, 0.05969676971435547, 0.06091609573364258, 0.06035456085205078, 0.05989555358886719, 0.060293376922607424, 0.059724864959716795, 0.05919145584106445, 0.05918777465820312, 0.05954572677612305, 0.05929033660888672, 0.05935718536376953, 0.059404289245605466, 0.06221414566040039, 0.06008768081665039, 0.060010784149169924, 0.05965606307983398, 0.060772830963134766, 0.05980160140991211, 0.06015795135498047, 0.05977468872070312, 0.05957251358032226, 0.059491424560546874, 0.05963459014892578, 0.059553665161132814, 0.05962105560302734, 0.059450847625732425, 0.05966719818115234, 0.059807968139648435, 0.05909004974365235, 0.059368320465087894, 0.05941862487792969, 0.05972092819213867, 0.059327232360839845, 0.060530719757080076, 0.05972310256958008, 0.05951145553588867, 0.05978054428100586, 0.059464256286621095, 0.059351070404052735, 0.059041759490966794, 0.05954764938354492, 0.05917020797729492, 0.05925129699707031, 0.0593583984375, 0.059318206787109376, 0.05931097412109375, 0.05923001480102539, 0.05954579162597656, 0.05947596740722656, 0.059295169830322264, 0.059875904083251955, 0.05989376068115235, 0.0594288330078125, 0.059731998443603516, 0.05977907180786133, 0.05952716827392578, 0.06014355087280274, 0.05951894378662109, 0.05956208038330078, 0.05959270477294922, 0.05936742401123047, 0.05973811340332031, 0.06004908752441406, 0.05989807891845703, 0.059703231811523434, 0.059905216217041014, 0.05989270401000977, 0.06064096069335938, 0.06003744125366211, 0.05997772979736328, 0.05985263824462891, 0.05990620803833008, 0.05955955123901367, 0.0596627197265625, 0.05947596740722656, 0.06109299087524414, 0.06283308792114257, 0.06341404724121094, 0.05963017654418945, 0.05946988677978516, 0.06367846298217773, 0.05933059310913086, 0.06079894256591797, 0.060063743591308595, 0.059643905639648435, 0.06013071823120117, 0.06000831985473633, 0.05988016128540039, 0.05937337493896484, 0.05938735961914063, 0.059175617218017576, 0.059637153625488284, 0.0593230094909668, 0.060020126342773435, 0.059345504760742185, 0.059514816284179685, 0.05937753677368164, 0.059611328125, 0.05939199829101562, 0.060375038146972655, 0.05974835205078125, 0.05980556869506836, 0.05988355255126953, 0.060477535247802736, 0.059469825744628904, 0.059490238189697266, 0.059364959716796874, 0.0597154541015625, 0.05964656066894531, 0.06093414306640625, 0.06232473754882813, 0.06012492752075195, 0.059899391174316405, 0.060771072387695316, 0.05992652893066406, 0.05950431823730469, 0.06067027282714844, 0.05958969497680664, 0.05970758438110352, 0.059796222686767576, 0.06192876815795898, 0.059849407196044924, 0.059238399505615234, 0.060063743591308595, 0.05920153427124023, 0.05944313430786133, 0.05954291152954101, 0.059222015380859375, 0.059249343872070315, 0.05934080123901367, 0.05949235153198242, 0.05922316741943359, 0.05926591873168945, 0.0601190071105957, 0.05965212631225586, 0.05925724792480469, 0.059821632385253905, 0.0593922233581543, 0.05898672103881836, 0.059233535766601564, 0.058729057312011716, 0.05907062530517578, 0.058971935272216794, 0.05903900909423828, 0.059306209564208984, 0.0593023681640625, 0.05951103973388672, 0.059322368621826174, 0.05943404769897461, 0.060171199798583985, 0.05949030303955078, 0.05929574584960937, 0.05923385620117187, 0.05959654235839844, 0.05977104187011719, 0.05941302490234375, 0.059581535339355465, 0.05954019165039062, 0.05974035263061524, 0.059633663177490234, 0.05954355239868164, 0.05946777725219726, 0.05947769546508789, 0.05983996963500977, 0.059634529113769534, 0.06034211349487305, 0.059801216125488284, 0.05905868911743164, 0.05961014556884765, 0.05971046447753906, 0.05944905471801758, 0.059035934448242185, 0.05922444915771485, 0.0590885124206543, 0.058905601501464844, 0.058935966491699215, 0.058883872985839844, 0.060015167236328125, 0.06111231994628906, 0.059701248168945314, 0.0595599365234375, 0.05981798553466797, 0.06066527938842774, 0.05939225769042969, 0.05897203063964844, 0.05904172897338867, 0.059359745025634764, 0.059264671325683596, 0.05897846221923828, 0.05904812622070312, 0.05905817413330078, 0.058788864135742185, 0.05876630401611328, 0.05926457595825195, 0.05942035293579102, 0.05929859161376953, 0.05919744110107422, 0.05952511978149414, 0.06043033599853516, 0.05974630355834961, 0.06386687850952148, 0.060061473846435544, 0.06046537780761719, 0.060014591217041016, 0.0597147216796875, 0.059884033203125, 0.05981423950195312, 0.059652095794677736, 0.059813697814941405, 0.05988729476928711, 0.06047001647949219, 0.059579391479492184, 0.0604505615234375, 0.05977916717529297, 0.059240543365478515, 0.06007686233520508, 0.05943910217285156, 0.05992144012451172, 0.060143871307373045, 0.05943164825439453, 0.05943280029296875, 0.059238174438476565, 0.05943270492553711, 0.059089534759521486, 0.05868544006347656, 0.05862124633789063, 0.059052513122558596, 0.05927753448486328, 0.059181087493896486, 0.05919126510620117, 0.05929068756103516, 0.05918406295776367, 0.05982617568969727, 0.05911324691772461, 0.05931827163696289, 0.05980972671508789, 0.059932159423828124, 0.05904054260253906, 0.05866393661499023, 0.059038719177246096, 0.05884214401245117, 0.059092960357666015, 0.059417598724365236, 0.0625376319885254, 0.060276607513427734, 0.058894561767578124, 0.06049792098999023, 0.059641761779785155, 0.06040537643432617, 0.06008019256591797, 0.0598306884765625, 0.059781024932861325, 0.059956382751464844, 0.06049795150756836, 0.05976160049438477, 0.05966432189941406, 0.059779102325439454, 0.05950857543945313, 0.06119644927978515, 0.0600266227722168, 0.059700897216796875]",tokens/s,16.750914906716744,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,23902.429184,13034.78272,0.0,12639.535104,12621.66016,s,1,48.46375,48.46375,0.0,48.46375,48.46375,48.46375,48.46375,[48.46375],,kWh,0.0012073979881250087,0.0001331777867873099,0.00045702203228401017,0.0017975978071963288,,MB,1234.599936,13909.295104,0.0,13493.075968,13249.793024,s,10,1.6881716613769533,0.16881716613769532,0.001683804255809587,0.16813304138183593,0.1713816635131836,0.17156614456176758,0.17171372940063476,"[0.16711264038085938, 0.16768089294433594, 0.1665416259765625, 0.16802146911621094, 0.17134066772460937, 0.16955641174316408, 0.17175062561035156, 0.16785340881347657, 0.16824461364746093, 0.17006930541992188]",tokens/s,1516.433463829112,kWh,4.969859327189498e-06,5.480902051208892e-07,3.285638221728996e-06,8.803587754039385e-06,tokens/kWh,29079053.58046082,MB,1256.77568,13909.295104,0.0,13493.075968,13389.080064,s,10,57.10703173828125,5.710703173828125,0.010946179836741406,5.710876708984375,5.724618115234375,5.725002416992187,5.725309858398438,"[5.71461181640625, 5.71090478515625, 5.7108486328125, 5.70460205078125, 5.72538671875, 5.70095751953125, 5.72364453125, 5.72453271484375, 5.691873046875, 5.699669921875]",tokens/s,11.031916400194978,kWh,0.00016651749440864557,1.8367533693865484e-05,0.00010979925780166894,0.00029468428590417997,tokens/kWh,213788.1217747905,,s,630,57.10336171722414,0.09064025669400655,0.0008973252111778205,0.09052087783813476,0.0912185333251953,0.09173341217041016,0.09427259468078614,"[0.08919391632080079, 0.08895750427246094, 0.09219071960449218, 0.0894422378540039, 0.08997484588623048, 0.09072640228271485, 0.09069974517822266, 0.09024515533447265, 0.09008537292480469, 0.09020211029052734, 0.09005411529541016, 0.0903419189453125, 0.0906806411743164, 0.09092166137695312, 0.09107862091064453, 0.09096931457519532, 0.09114911651611328, 0.09083289337158203, 0.09694931030273438, 0.09088710021972657, 0.09127295684814453, 0.09111577606201172, 0.0909659194946289, 0.09091081237792968, 0.09064374542236328, 0.0906247329711914, 0.09070591735839843, 0.09052774047851563, 0.09090866851806641, 0.09069158172607422, 0.09061302185058594, 0.09065507507324219, 0.09063257598876953, 0.0908431396484375, 0.09075673675537109, 0.0908878402709961, 0.09017545318603516, 0.09012095642089844, 0.09048883056640625, 0.09071001434326172, 0.09078374481201172, 0.09020198059082031, 0.09018732452392578, 0.08996543884277344, 0.09024646759033203, 0.09037651062011719, 0.09034758758544922, 0.09082675170898437, 0.09029427337646484, 0.09067724609375, 0.09117619323730469, 0.09048719787597656, 0.09127152252197265, 0.09069683074951172, 0.09068224334716797, 0.09097007751464843, 0.09071004486083985, 0.09181593322753906, 0.09058303833007812, 0.09043539428710938, 0.09072393798828125, 0.090648193359375, 0.09032323455810547, 0.09000784301757812, 0.09026057434082031, 0.09181043243408203, 0.08976121520996094, 0.09040573120117187, 0.09022844696044922, 0.09349353790283203, 0.09018163299560547, 0.09031785583496094, 0.09568764495849609, 0.09043145751953124, 0.09003759765625, 0.09010230255126953, 0.09034153747558593, 0.08994361877441406, 0.09069538879394531, 0.09085382080078125, 0.09004182434082031, 0.0907023696899414, 0.09069990539550782, 0.09033513641357421, 0.09064883422851562, 0.09060352325439452, 0.09122406768798828, 0.09040605163574218, 0.089916259765625, 0.09026150512695312, 0.09023283386230468, 0.09025126647949219, 0.09032249450683594, 0.08996927642822265, 0.0898353271484375, 0.09014272308349609, 0.08967577362060547, 0.09054988861083985, 0.09102579498291016, 0.091109375, 0.09075711822509766, 0.0905173110961914, 0.09057865905761718, 0.09003392028808593, 0.09014959716796875, 0.08996044921875, 0.09032268524169922, 0.09063849639892578, 0.09057849884033203, 0.09084162902832031, 0.09246924591064454, 0.09113529968261719, 0.09073939514160156, 0.09075414276123046, 0.09090700531005859, 0.09137820434570312, 0.09087379455566406, 0.0907979507446289, 0.09052098846435547, 0.0905123519897461, 0.09056031799316407, 0.0906137924194336, 0.09066822052001954, 0.09059203338623047, 0.09043353271484375, 0.0906954574584961, 0.08974153900146484, 0.08958541107177734, 0.09001932525634766, 0.09005542755126954, 0.09030553436279297, 0.0896890869140625, 0.092295166015625, 0.09002550506591797, 0.09048912048339844, 0.09038662719726563, 0.09009942626953125, 0.09044918060302734, 0.09039360046386719, 0.09050701141357421, 0.0904747543334961, 0.09118093109130859, 0.09085491180419922, 0.09106867218017578, 0.09059334564208985, 0.09064998626708984, 0.09066761779785157, 0.0905464324951172, 0.08998847961425781, 0.08987955474853515, 0.09010761260986327, 0.09019779205322266, 0.08981321716308593, 0.0898941421508789, 0.09013529968261719, 0.08953446197509765, 0.08997046661376953, 0.09029449462890625, 0.09019705963134765, 0.0902010269165039, 0.09019913482666016, 0.09018243408203125, 0.09036198425292968, 0.09107046508789063, 0.09024281311035157, 0.09050137329101562, 0.09012364959716797, 0.09028435516357422, 0.09055661010742187, 0.09052806091308593, 0.09083270263671875, 0.09068726348876953, 0.0907696304321289, 0.0947589111328125, 0.09082061004638672, 0.09065267181396484, 0.09105817413330078, 0.09042739105224609, 0.09375129699707031, 0.09383126068115234, 0.0917236785888672, 0.09206310272216797, 0.09053968048095704, 0.09044700622558594, 0.09066886138916015, 0.09093917083740234, 0.09084336090087891, 0.09095782470703125, 0.09037824249267579, 0.09058223724365234, 0.09105059051513673, 0.09005689239501953, 0.0898682861328125, 0.09071820831298828, 0.09040691375732422, 0.09005465698242188, 0.09075011444091798, 0.0905240936279297, 0.09096438598632813, 0.09096559906005859, 0.09092546844482421, 0.09076326751708984, 0.09064995574951172, 0.09086822509765625, 0.09091670227050781, 0.09087318420410156, 0.09105868530273438, 0.09060777282714844, 0.09018946838378906, 0.09108956909179687, 0.09061526489257812, 0.09092559814453124, 0.09036799621582031, 0.09027891540527344, 0.0901740493774414, 0.09046262359619141, 0.09064447784423828, 0.09171558380126953, 0.09077862548828125, 0.09009168243408203, 0.09029821014404296, 0.09038508605957031, 0.09054854583740235, 0.09047449493408204, 0.09069261169433594, 0.09060454559326171, 0.09049702453613281, 0.09027894592285156, 0.09044182586669922, 0.09128819274902343, 0.09062630462646484, 0.09088409423828125, 0.09106022644042969, 0.0907814712524414, 0.09072662353515625, 0.0906219482421875, 0.09066019439697266, 0.09090115356445312, 0.09053321838378907, 0.09056947326660156, 0.09054195404052734, 0.08976182556152344, 0.0901295394897461, 0.09072115325927735, 0.09016067504882813, 0.08977983856201172, 0.08987120056152344, 0.0899420166015625, 0.09046812438964844, 0.0899106216430664, 0.089936767578125, 0.09021151733398437, 0.0904365463256836, 0.09059468841552734, 0.09099533081054688, 0.09009081268310547, 0.09000611114501954, 0.09025545501708984, 0.09022259521484376, 0.09111558532714843, 0.09148818969726563, 0.09161337280273438, 0.090814208984375, 0.09099590301513671, 0.09087680053710938, 0.09058723449707032, 0.09053993225097656, 0.09001376342773437, 0.08997062683105468, 0.09059123229980469, 0.09148992156982422, 0.09018748474121094, 0.0909748764038086, 0.09063129425048828, 0.09038841247558593, 0.09013139343261718, 0.09033318328857422, 0.09016851043701171, 0.09057071685791016, 0.09074585723876953, 0.09298518371582032, 0.0903864288330078, 0.09116671752929688, 0.09035775756835937, 0.09075260925292969, 0.09435167694091796, 0.09042873382568359, 0.09998761749267578, 0.09073458862304687, 0.09083766174316406, 0.09112777709960937, 0.09103263854980469, 0.09088233947753906, 0.09115510559082031, 0.0905871353149414, 0.09085747528076171, 0.09084070587158204, 0.09137110137939453, 0.09100777435302734, 0.09105203247070312, 0.09092505645751953, 0.09055846405029297, 0.08996633911132812, 0.09026790618896484, 0.09085337829589844, 0.09017747497558594, 0.09033106994628906, 0.09045388793945312, 0.09057305908203125, 0.09007209777832031, 0.09010684967041016, 0.09021798706054687, 0.09096806335449219, 0.0904975357055664, 0.09032704162597656, 0.0909738540649414, 0.09058348846435547, 0.09069158172607422, 0.09070182037353515, 0.09134489440917969, 0.09091046142578126, 0.0904268798828125, 0.09084909057617188, 0.09490863800048828, 0.09218531036376953, 0.09024050903320313, 0.09080473327636719, 0.0901794204711914, 0.08999052429199218, 0.08992054748535157, 0.09033865356445313, 0.09033334350585938, 0.09010816192626953, 0.09039871978759766, 0.08971673583984376, 0.09021234893798828, 0.09039842987060547, 0.09021820831298828, 0.0908519058227539, 0.09321849822998046, 0.09011961364746093, 0.09025154876708984, 0.09034368133544922, 0.09014649963378907, 0.09005942535400391, 0.08976895904541016, 0.09028825378417969, 0.09019686126708984, 0.08997682952880859, 0.09163795471191406, 0.09076624298095703, 0.09086892700195312, 0.090646240234375, 0.09060352325439452, 0.09029837036132812, 0.0902652816772461, 0.09075305938720703, 0.09105846405029297, 0.0904532470703125, 0.08956594848632812, 0.09019753265380859, 0.08973155212402344, 0.08993743896484375, 0.08950806427001953, 0.08979634857177735, 0.09254723358154297, 0.09004450988769531, 0.08999993896484375, 0.09087558746337891, 0.09015090942382813, 0.0897228775024414, 0.089997314453125, 0.08971635437011719, 0.08972723388671874, 0.09023296356201171, 0.0897000961303711, 0.08990914916992188, 0.09020451354980469, 0.0915538558959961, 0.09107273864746093, 0.09046771240234375, 0.09090908813476563, 0.09078688049316407, 0.09063520050048827, 0.09101229095458985, 0.09069356536865235, 0.09222029113769531, 0.09059859466552735, 0.0903422088623047, 0.09042534637451172, 0.09063219451904297, 0.09042329406738281, 0.09384754943847656, 0.09197158050537109, 0.09185689544677735, 0.09065577697753906, 0.09056079864501954, 0.09031571197509766, 0.09051519775390625, 0.09012633514404297, 0.09065062713623047, 0.09043762969970703, 0.09041305541992188, 0.09047654724121093, 0.0901426239013672, 0.09023513793945312, 0.09052963256835937, 0.09058678436279297, 0.09143331146240234, 0.09099673461914062, 0.09073766326904296, 0.09118208312988281, 0.0914554901123047, 0.09089215850830078, 0.09105216217041015, 0.09111891174316407, 0.090657470703125, 0.09011933135986328, 0.09029254150390625, 0.09030095672607422, 0.0904781723022461, 0.09061158752441406, 0.09039107513427734, 0.09082991790771484, 0.09061878204345702, 0.09039462280273437, 0.09061170959472656, 0.09031423950195312, 0.09016973114013672, 0.09017561340332031, 0.09050521850585938, 0.09035980987548828, 0.0940789794921875, 0.0915598373413086, 0.09090057373046875, 0.09089218902587891, 0.09095145416259766, 0.0905403823852539, 0.09109478759765625, 0.09083106994628906, 0.09159593963623047, 0.09068540954589843, 0.09126502227783204, 0.09084722900390625, 0.09147174072265625, 0.0916911392211914, 0.09173401641845703, 0.09038848114013671, 0.0908779525756836, 0.09052108764648438, 0.09035990142822266, 0.09053635406494141, 0.09127117156982421, 0.09049638366699218, 0.09058163452148438, 0.09043558502197266, 0.09082195281982422, 0.09035027313232422, 0.09042649841308593, 0.09008777618408204, 0.08998307037353516, 0.09036227416992187, 0.09048681640625, 0.09004236602783203, 0.09086566162109375, 0.09085542297363282, 0.09126265716552734, 0.09088966369628906, 0.09085836791992187, 0.09075507354736329, 0.09113391876220703, 0.09063744354248048, 0.09076624298095703, 0.09030156707763672, 0.09059744262695313, 0.09054496002197265, 0.09121791839599609, 0.09037619018554688, 0.09034342193603516, 0.09026870727539063, 0.09019696044921875, 0.08981708526611328, 0.09027337646484375, 0.09014313507080078, 0.0902323226928711, 0.09050070190429688, 0.09058134460449219, 0.09044873809814453, 0.0905192642211914, 0.09073161315917969, 0.09044656372070313, 0.0902392349243164, 0.08968121337890625, 0.08995494079589844, 0.09089766693115234, 0.09399088287353516, 0.09071263885498047, 0.09143856048583984, 0.0905770263671875, 0.09113868713378906, 0.09859059143066407, 0.0914968032836914, 0.09285404968261719, 0.09127117156982421, 0.09032102203369141, 0.08976620483398437, 0.08988652801513672, 0.08986592102050782, 0.08987052917480469, 0.09013203430175781, 0.08969468688964843, 0.08973321533203125, 0.0899681625366211, 0.09061033630371093, 0.09073458862304687, 0.09024214172363282, 0.08991222381591797, 0.09167209625244141, 0.0901165771484375, 0.09041510772705078, 0.09042460632324219, 0.0903564453125, 0.0903719711303711, 0.09049919891357422, 0.09042739105224609, 0.09047449493408204, 0.09052159881591797, 0.09032294464111328, 0.09069964599609374, 0.09044595336914063, 0.09093488311767578, 0.09104630279541015, 0.09046966552734376, 0.09072099304199219, 0.09038787078857421, 0.08988323211669921, 0.09003008270263672, 0.0901053466796875, 0.09191577911376952, 0.09056768035888672, 0.09037824249267579, 0.09020620727539062, 0.0907973403930664, 0.0901618881225586, 0.09004000091552734, 0.09019149017333984, 0.08963549041748047, 0.09004838562011719, 0.0900814437866211, 0.0902446060180664, 0.09033779144287109, 0.09018982696533204, 0.09004441833496094, 0.09084928131103516, 0.09031168365478516, 0.09045449829101562, 0.09037264251708985, 0.09025740814208984, 0.09011151885986328, 0.09056944274902344, 0.09036160278320313, 0.09052076721191406, 0.0906289291381836, 0.09052489471435547, 0.09079058837890625, 0.09003542327880859, 0.0898485107421875, 0.09085081481933593, 0.0900186538696289, 0.09037964630126953, 0.08995231628417968, 0.0902252197265625, 0.08942108917236329, 0.08993046569824219, 0.09027378845214844, 0.09033436584472657, 0.089695068359375, 0.08985708618164062, 0.09123661041259766, 0.09116761779785157, 0.09004831695556641, 0.0903024673461914, 0.09023693084716797, 0.08961344146728516, 0.09017024230957031, 0.09074073791503906, 0.09107408142089844, 0.09056285095214844, 0.09056070709228516, 0.09086771392822265, 0.09050726318359376, 0.09108595275878906, 0.09106931304931641, 0.0907323226928711, 0.0895162582397461, 0.08999523162841797, 0.0904151382446289, 0.08995635223388672, 0.08919859313964844, 0.08955709075927734, 0.0899419174194336, 0.09187875366210937, 0.09256816101074218, 0.08996160125732422, 0.09040377807617188, 0.09014886474609375, 0.09075862121582032, 0.09173267364501952, 0.09214345550537109, 0.09133465576171874, 0.0900321273803711, 0.09042124938964843, 0.09030854034423828, 0.09043778991699218, 0.09023648071289063, 0.09061615753173828, 0.09084441375732422, 0.09070022583007813, 0.09056598663330079, 0.09061692810058594, 0.09066073608398438, 0.09076767730712891, 0.0909288330078125, 0.09132236480712891, 0.0903004150390625, 0.08980502319335938, 0.08989263916015625, 0.08983261108398438, 0.09004937744140624, 0.090523681640625]",tokens/s,11.032625419143628,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 60, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 60, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 60, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,12172.288,7099.84256,0.0,6704.594944,6690.791936,s,1,28.497341796875,28.497341796875,0.0,28.497341796875,28.497341796875,28.497341796875,28.497341796875,[28.497341796875],,kWh,0.0006255994436999998,6.900108954029212e-05,0.0002113479468559996,0.0009059484800962916,,MB,1412.722688,7313.752064,0.0,6897.532928,6816.50432,s,10,1.2305203475952147,0.12305203475952146,0.0007433982943910377,0.12308745574951171,0.12405353775024414,0.12416759986877442,0.12425884956359863,"[0.12309510040283203, 0.12346208190917969, 0.12428166198730468, 0.12237039947509766, 0.12226799774169922, 0.12353353881835938, 0.12194550323486328, 0.1230798110961914, 0.12402819061279297, 0.12245606231689453]",tokens/s,2080.420697636545,kWh,3.641560362294242e-06,4.015982918952097e-07,2.406383955283955e-06,6.449542609473406e-06,tokens/kWh,39692737.22201239,MB,1432.256512,7320.04352,0.0,6903.824384,6816.50688,s,10,75.43501416015624,7.543501416015625,0.011593906221494027,7.547277099609374,7.557584716796875,7.557750610351563,7.557883325195312,"[7.5575478515625, 7.5541923828125, 7.55791650390625, 7.5298935546875, 7.5258388671875, 7.53303369140625, 7.54868701171875, 7.5317109375, 7.550326171875, 7.5458671875]",tokens/s,8.351559378811086,kWh,0.00022198080847728922,2.448530608798314e-05,9.876297475791608e-05,0.0003452290893231884,tokens/kWh,182487.5190080583,,s,630,75.43231571197506,0.11973383446345252,0.0010853508873399532,0.11953097915649413,0.1208431755065918,0.12180508804321288,0.1233563981628418,"[0.12017369842529296, 0.11974950408935547, 0.12236595153808594, 0.11982217407226563, 0.11972799682617187, 0.11903129577636719, 0.11889539337158203, 0.11908914947509766, 0.11950438690185547, 0.11949842834472656, 0.12097618865966797, 0.12004380798339843, 0.11917692565917969, 0.11951513671875, 0.11868569946289062, 0.11904000091552734, 0.12015821075439453, 0.12295577239990234, 0.12003052520751953, 0.11928646087646484, 0.11939820861816407, 0.11965001678466797, 0.11915420532226563, 0.11925936126708984, 0.11907721710205078, 0.11928307342529297, 0.11932733154296875, 0.12049027252197266, 0.12084236907958984, 0.12067635345458984, 0.12052384185791015, 0.11941983795166015, 0.11929510498046875, 0.12006079864501953, 0.11981414031982422, 0.12062854766845703, 0.12023878479003906, 0.11937721252441406, 0.12015481567382813, 0.11939839935302735, 0.11896825408935546, 0.11968723297119141, 0.11998822021484375, 0.1203671646118164, 0.12025644683837891, 0.12007014465332032, 0.11990016174316406, 0.11966397094726562, 0.11902365112304687, 0.11949529266357421, 0.1200618896484375, 0.11942281341552734, 0.12074806213378907, 0.11966893005371093, 0.11984598541259765, 0.11938841247558593, 0.12033478546142579, 0.12052912139892578, 0.12127046203613281, 0.12126217651367187, 0.12033200073242188, 0.12016230773925782, 0.12303523254394531, 0.11941024017333984, 0.11940252685546875, 0.12077526092529296, 0.1200986557006836, 0.12036691284179687, 0.1194967041015625, 0.11953171539306641, 0.12077875518798828, 0.11889254760742188, 0.11883929443359376, 0.11906861114501953, 0.1190393295288086, 0.11952928161621093, 0.12067731475830078, 0.11974038696289062, 0.11956428527832032, 0.11905433654785157, 0.11932672119140625, 0.11914176177978515, 0.11939084625244141, 0.11959091186523438, 0.12280012512207031, 0.12056371307373047, 0.11980159759521485, 0.11990857696533203, 0.1194486083984375, 0.11981680297851563, 0.11970396423339844, 0.11965644836425782, 0.12053404998779296, 0.12012140655517578, 0.12037213134765624, 0.11997296142578125, 0.12016323089599609, 0.12007395172119141, 0.11935718536376953, 0.12235443115234375, 0.11959478759765625, 0.12073779296875, 0.12017974090576172, 0.1204397735595703, 0.11925484466552734, 0.11946208190917969, 0.11925299072265624, 0.12060835266113282, 0.11953807830810546, 0.12065315246582031, 0.12032272338867188, 0.11942707061767578, 0.12098774719238281, 0.12001475524902344, 0.1195335693359375, 0.11951103973388671, 0.11990857696533203, 0.12049385833740234, 0.11943917083740234, 0.11993222045898437, 0.12033631896972656, 0.11973884582519531, 0.11930636596679688, 0.11972422027587891, 0.11981228637695313, 0.11934281921386719, 0.12033773040771484, 0.11980461120605469, 0.1191523208618164, 0.11911507415771484, 0.11897714996337891, 0.119283203125, 0.12200434875488281, 0.12114329528808594, 0.12062310028076172, 0.11924249267578126, 0.11996141052246094, 0.11985945892333984, 0.1215797119140625, 0.1196605453491211, 0.12184941101074219, 0.12103110504150391, 0.120495361328125, 0.11960297393798829, 0.12009552001953125, 0.12055526733398438, 0.119275390625, 0.12351084899902344, 0.11954585266113281, 0.11976345825195313, 0.12031568145751953, 0.12047113800048828, 0.12023872375488281, 0.11945369720458984, 0.11949385833740235, 0.11954640197753906, 0.11932051086425781, 0.12116204833984374, 0.1209170913696289, 0.12039020538330078, 0.11999807739257813, 0.12185993957519531, 0.11925987243652343, 0.1195502700805664, 0.11920777893066406, 0.11951827239990234, 0.12053148651123047, 0.12157357025146484, 0.11950511932373047, 0.11962572479248047, 0.11866019439697266, 0.11882125091552734, 0.1187369613647461, 0.11877011108398437, 0.11928099060058593, 0.1208941421508789, 0.11994111633300782, 0.11931238555908204, 0.11899903869628906, 0.11833753967285156, 0.11914854431152344, 0.1184686050415039, 0.12202754974365235, 0.11965692901611329, 0.11960137939453125, 0.12070304107666016, 0.11990313720703125, 0.11891180419921875, 0.1190606689453125, 0.11922438049316406, 0.12041939544677735, 0.12306905364990234, 0.119959228515625, 0.12003385925292968, 0.11974861145019532, 0.1189080352783203, 0.11911609649658203, 0.11900572967529296, 0.11905990600585938, 0.12029312133789062, 0.11942588806152343, 0.11945919799804687, 0.1195546875, 0.11861952209472656, 0.11865366363525391, 0.11871622467041015, 0.11912214660644531, 0.12035059356689454, 0.11973216247558593, 0.11871785736083984, 0.11942160034179687, 0.11896451568603515, 0.11820003509521484, 0.11908505249023438, 0.11866031646728516, 0.12065666961669921, 0.11917212677001954, 0.11984381103515625, 0.11957453155517578, 0.1189375991821289, 0.12100198364257812, 0.11918473815917968, 0.12328412628173828, 0.11903794860839843, 0.12036441802978516, 0.11977922821044922, 0.11969510650634765, 0.11891158294677734, 0.11863283538818359, 0.11860921478271484, 0.1203944320678711, 0.11884483337402343, 0.11953024291992187, 0.11991584014892578, 0.11878627014160156, 0.11979603576660157, 0.11898473358154296, 0.11812246704101563, 0.11908441925048828, 0.11921193695068359, 0.11885027313232421, 0.12033023834228515, 0.11973139190673829, 0.1191673583984375, 0.11845244598388673, 0.11827235412597656, 0.11862207794189453, 0.11865702056884765, 0.11893059539794922, 0.1237982406616211, 0.12161446380615235, 0.11831478118896484, 0.11835533142089844, 0.11866796875, 0.11876319885253907, 0.11946809387207032, 0.11959532928466797, 0.11958806610107423, 0.11869459533691407, 0.1190832977294922, 0.11849612426757812, 0.11825456237792968, 0.11972169494628906, 0.11940675354003906, 0.12141094207763672, 0.12022438049316406, 0.11983609771728515, 0.11922489929199219, 0.11875113677978516, 0.11841065979003906, 0.11845868682861328, 0.1189458236694336, 0.11932911682128906, 0.11976294708251953, 0.11969478607177735, 0.11935174560546875, 0.11970572662353515, 0.1192973403930664, 0.11841782379150391, 0.11844841766357422, 0.12321382141113281, 0.12017638397216797, 0.12212454223632813, 0.12025577545166016, 0.11922505950927734, 0.11892940521240235, 0.11887411499023437, 0.11883312225341797, 0.11932879638671876, 0.11900284576416016, 0.12014006042480468, 0.11928959655761719, 0.11932844543457032, 0.11848063659667969, 0.11837312316894531, 0.1185567398071289, 0.11860157012939453, 0.11929821014404297, 0.11971788787841797, 0.1192959976196289, 0.1189862060546875, 0.11897100830078125, 0.11919757080078125, 0.12073561859130859, 0.11951651000976563, 0.12201042938232422, 0.11860326385498046, 0.12077107238769531, 0.1192591323852539, 0.12309404754638673, 0.1184176025390625, 0.11821036529541015, 0.11983548736572265, 0.1207410888671875, 0.11881292724609376, 0.12162969970703125, 0.12009926605224609, 0.1191410903930664, 0.11902960205078125, 0.11987776184082032, 0.11907660675048828, 0.118953857421875, 0.12059871673583984, 0.12020451354980469, 0.12092076873779296, 0.11985849761962891, 0.11920880126953125, 0.1196933135986328, 0.11943321228027344, 0.12038758087158204, 0.11987145233154296, 0.12205856323242187, 0.11998175811767578, 0.11914498901367188, 0.11816918182373047, 0.11828470611572266, 0.11852767944335937, 0.11904156494140625, 0.11864351654052735, 0.12085244750976562, 0.12006809234619141, 0.11981756591796874, 0.11900982666015625, 0.1187957763671875, 0.11894438171386719, 0.11927552032470704, 0.12119654083251953, 0.12207308959960937, 0.12013565063476563, 0.11995343780517578, 0.11986249542236328, 0.12001769256591797, 0.11947366333007813, 0.11882476806640625, 0.1192925796508789, 0.11859561920166016, 0.11991808319091797, 0.12048454284667968, 0.11933602905273437, 0.11914307403564453, 0.1182449951171875, 0.11882694244384766, 0.11893196868896484, 0.11976258850097657, 0.1197202911376953, 0.1200721893310547, 0.11922796630859375, 0.11962207794189453, 0.11897264099121094, 0.11866429138183594, 0.11856079864501953, 0.11910524749755859, 0.11958550262451172, 0.11937814331054687, 0.11987331390380859, 0.11903612518310547, 0.11957817840576172, 0.11868972778320312, 0.11839830780029297, 0.11922908782958984, 0.11951103973388671, 0.11991180419921875, 0.11948300933837891, 0.11889459228515625, 0.12053094482421875, 0.12058624267578125, 0.11993836975097656, 0.12243174743652344, 0.12015010833740235, 0.12074752044677735, 0.11937059020996094, 0.11965644836425782, 0.11974156951904297, 0.11881084442138672, 0.12347583770751953, 0.1205355224609375, 0.12010323333740235, 0.11951097869873047, 0.12101229095458985, 0.11928985595703125, 0.11854163360595703, 0.11886825561523437, 0.11954576110839844, 0.1188496322631836, 0.11849565124511718, 0.12003763580322266, 0.12017024230957031, 0.12015535736083985, 0.11934194946289063, 0.11935958099365235, 0.11947103881835938, 0.12020758056640625, 0.12015068817138672, 0.12155494689941407, 0.11986637115478516, 0.11898982238769532, 0.11916006469726563, 0.11862818908691407, 0.11830326080322266, 0.11834323120117188, 0.11905625915527343, 0.11838559722900391, 0.11991871643066407, 0.11871132659912109, 0.1193071060180664, 0.11882064056396484, 0.11860537719726562, 0.11870649719238281, 0.11976943969726563, 0.12995289611816407, 0.12005875396728516, 0.1201295394897461, 0.12266057586669922, 0.11874470520019531, 0.11855699157714844, 0.11855907440185547, 0.11875910186767578, 0.11998258972167969, 0.12102227020263671, 0.11961753845214844, 0.11972799682617187, 0.11947325134277344, 0.1183814697265625, 0.11890013122558593, 0.11888495635986328, 0.11874864196777343, 0.11902210998535157, 0.11976454162597656, 0.11972819519042968, 0.11898210906982422, 0.11856710052490234, 0.118293212890625, 0.11903929901123046, 0.11930284881591798, 0.12033392333984375, 0.12022621154785157, 0.11950694274902343, 0.11888374328613281, 0.11922492980957031, 0.11878387451171875, 0.11865094757080077, 0.11992070770263671, 0.11991859436035156, 0.11967203521728516, 0.11921692657470703, 0.12027699279785156, 0.11898473358154296, 0.12137042999267578, 0.11994332885742187, 0.12132147216796875, 0.12020313262939453, 0.12175091552734375, 0.11989708709716797, 0.11976809692382813, 0.11936835479736328, 0.11899107360839843, 0.11905007934570312, 0.12322611236572266, 0.11964006042480468, 0.12085043334960938, 0.11978956604003907, 0.11939020538330078, 0.11895193481445313, 0.11870003509521485, 0.11848499298095704, 0.11873484802246094, 0.11853756713867188, 0.11924137878417969, 0.12106969451904297, 0.11973033905029297, 0.1193919677734375, 0.11925122833251953, 0.11961417388916015, 0.11893981170654297, 0.11966345977783203, 0.11872627258300782, 0.12338591766357422, 0.11997443389892579, 0.11978892517089844, 0.11907727813720703, 0.11898880004882813, 0.11859964752197266, 0.1188288345336914, 0.1185159683227539, 0.11902928161621094, 0.11888880157470703, 0.11925107574462891, 0.118171875, 0.11781660461425782, 0.11955455780029296, 0.11839488220214844, 0.12229798126220703, 0.12013196563720703, 0.11974041748046875, 0.11957657623291015, 0.11893071746826171, 0.11974291229248046, 0.11915926361083984, 0.11900294494628906, 0.1192366714477539, 0.1213306884765625, 0.12013254547119141, 0.1199636459350586, 0.11930009460449219, 0.11869120025634766, 0.11899897766113281, 0.11831366729736328, 0.11905795288085938, 0.12022422027587891, 0.12071731567382812, 0.119046142578125, 0.11970511627197265, 0.11934767913818359, 0.11867750549316407, 0.11878399658203125, 0.11980111694335938, 0.11917759704589843, 0.12096656036376953, 0.11997641754150391, 0.11966524505615235, 0.11906985473632813, 0.11888713836669922, 0.11882003021240234, 0.11921862030029297, 0.11992054748535157, 0.12004771423339844, 0.11992237091064453, 0.11963158416748047, 0.11954070281982422, 0.1223392333984375, 0.12478288269042968, 0.11962745666503906, 0.12157520294189453, 0.12046371459960938, 0.11999215698242187, 0.12306857299804687, 0.11988172912597657, 0.11947007751464844, 0.11947212982177734, 0.12280422210693359, 0.11980729675292968, 0.12030950164794922, 0.11969631958007812, 0.11939609527587891, 0.12063081359863281, 0.12204105377197266, 0.1188331527709961, 0.11879241943359375, 0.11958943939208984, 0.12048393249511719, 0.12021334075927734, 0.11963362884521485, 0.1195976333618164, 0.11908271789550781, 0.118724609375, 0.1191805419921875, 0.1185719985961914, 0.12153040313720703, 0.12006124877929687, 0.1194986572265625, 0.12027094268798828, 0.11915644836425782, 0.11922710418701171, 0.11937324523925781, 0.12075065612792969, 0.12003123474121094, 0.12056390380859375, 0.11997574615478515, 0.11982848358154297, 0.11911081695556641, 0.11942082977294922, 0.119384765625, 0.11925939178466796, 0.11914403533935547, 0.12470108795166016, 0.12011724853515625, 0.12056781005859375, 0.11986310577392578, 0.11982428741455078, 0.11924864196777343, 0.11916342163085937, 0.11943116760253907, 0.12045516967773437, 0.1199774398803711, 0.11952003479003906, 0.12041600036621093, 0.1200396499633789, 0.11951395416259766, 0.11895606231689453, 0.11920851135253906, 0.12040841674804688, 0.11894371032714844, 0.11964627075195312, 0.1196748504638672, 0.1196640625, 0.11950342559814453, 0.11921193695068359, 0.11898067474365234, 0.11929814147949219, 0.12081065368652344, 0.11950307464599609, 0.12139167785644531, 0.1194617919921875, 0.11941487884521484, 0.1190277099609375, 0.12060374450683593, 0.11904691314697266, 0.12010675048828125, 0.11995331573486329, 0.11949100494384765]",tokens/s,8.351858140025069,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,14728.306688,10142.810112,0.0,9747.562496,9611.730944,s,1,34.2201796875,34.2201796875,0.0,34.2201796875,34.2201796875,34.2201796875,34.2201796875,[34.2201796875],,kWh,0.0007770563281250058,8.570774036808844e-05,0.00029328690129595825,0.0011560509697890525,,MB,4534.358016,10528.68608,0.0,10112.466944,9989.953536,s,10,1.3355377807617186,0.1335537780761719,0.0007079238234458477,0.1334502716064453,0.13443924865722656,0.13473276214599608,0.13496757293701173,"[0.13502627563476563, 0.1336026611328125, 0.1330015106201172, 0.1329153594970703, 0.13297628784179688, 0.1338626251220703, 0.13385903930664061, 0.1343740234375, 0.13329788208007812, 0.13262211608886718]",tokens/s,1916.8308353956963,kWh,3.955573593243509e-06,4.3622652555131006e-07,2.6355989553242327e-06,7.027399074119053e-06,tokens/kWh,36428840.499867566,MB,4534.358016,10530.783232,0.0,10114.564096,9989.956096,s,10,80.05053466796876,8.005053466796877,0.02531685858535615,8.001396240234374,8.034038037109374,8.038192846679687,8.041516694335938,"[7.952193359375, 7.99734033203125, 8.03311474609375, 7.99462353515625, 8.0054521484375, 8.03254443359375, 7.98501123046875, 8.01132421875, 8.04234765625, 7.9965830078125]",tokens/s,7.870028633950982,kWh,0.00023490137661509009,2.591078800471422e-05,0.00012195066738027532,0.00038276283200007954,tokens/kWh,164592.78365875114,,s,630,80.04725710296626,0.12705913825867668,0.0013620803168982011,0.12683967971801757,0.12818678588867188,0.12899595718383788,0.1319671975708008,"[0.12649116516113282, 0.12545555114746093, 0.12538333129882812, 0.12505817413330078, 0.12551462554931642, 0.12662364959716796, 0.12541961669921875, 0.12550348663330077, 0.12609331512451172, 0.12630445098876952, 0.1256118392944336, 0.12536217498779298, 0.12581068420410157, 0.12666675567626953, 0.12659302520751953, 0.12585747528076172, 0.12592508697509766, 0.1260813446044922, 0.12538655853271485, 0.12597090911865233, 0.12633497619628906, 0.1263250274658203, 0.12574281311035157, 0.12630016326904298, 0.12559359741210938, 0.12646195220947265, 0.13100604248046874, 0.1264193572998047, 0.12596189117431641, 0.126712158203125, 0.12619481658935547, 0.1268048629760742, 0.1268652801513672, 0.126609375, 0.12605878448486327, 0.12571772766113282, 0.12595820617675782, 0.12764841461181642, 0.12527814483642577, 0.12552191925048828, 0.1249053726196289, 0.1253561248779297, 0.126959228515625, 0.1260752639770508, 0.12618697357177736, 0.1267790069580078, 0.12664720153808592, 0.12654796600341797, 0.12597862243652344, 0.12626739501953124, 0.1260481948852539, 0.12601094055175782, 0.12568831634521485, 0.12668041229248048, 0.12605715179443358, 0.12601446533203126, 0.1261987533569336, 0.12671385955810546, 0.1270456314086914, 0.12625263977050782, 0.12705219268798829, 0.12717884826660156, 0.12660294342041015, 0.12774195098876953, 0.1273446044921875, 0.12821098327636718, 0.12652134704589843, 0.12745113372802735, 0.1281640625, 0.12818409729003907, 0.1298101806640625, 0.13154920959472657, 0.12657481384277344, 0.1287598114013672, 0.1274464340209961, 0.1335650177001953, 0.12803890991210937, 0.12666265869140625, 0.13019287109375, 0.12668163299560548, 0.1264739532470703, 0.1282596435546875, 0.125899169921875, 0.12760665893554687, 0.1260835189819336, 0.12578819274902345, 0.12661551666259765, 0.12666284942626954, 0.12578797149658202, 0.1274449920654297, 0.12633087921142577, 0.12660921478271484, 0.12563670349121095, 0.12578966522216797, 0.1257890853881836, 0.12621526336669922, 0.1261164779663086, 0.12610963439941406, 0.1271539535522461, 0.1268800354003906, 0.12502425384521484, 0.12535142517089845, 0.12551980590820314, 0.12851402282714844, 0.1266951675415039, 0.12675312042236328, 0.1258604507446289, 0.12747299194335937, 0.12592918395996094, 0.12617132568359374, 0.12609337615966798, 0.1262816619873047, 0.1256678695678711, 0.1273079071044922, 0.12688531494140626, 0.1264911346435547, 0.12661331176757812, 0.12654003143310547, 0.12668873596191407, 0.1259727325439453, 0.12722000122070312, 0.12524063873291016, 0.12676390075683594, 0.12656118774414063, 0.12633590698242186, 0.12491161346435548, 0.12725308990478515, 0.12686150360107423, 0.12669747161865236, 0.12809429931640626, 0.1320928955078125, 0.1259980163574219, 0.12838706970214844, 0.12708573150634767, 0.1264293746948242, 0.12899754333496094, 0.1289272918701172, 0.12822569274902343, 0.1265669403076172, 0.12716441345214843, 0.12686675262451172, 0.12775494384765626, 0.12632592010498048, 0.1285804443359375, 0.12642892456054688, 0.127219970703125, 0.12674588775634765, 0.1269378204345703, 0.12834121704101562, 0.1308168029785156, 0.1271329574584961, 0.1267453155517578, 0.1269188766479492, 0.12718057250976564, 0.12930400085449217, 0.12838557434082032, 0.13739004516601563, 0.1263863983154297, 0.12533539581298828, 0.12798902130126952, 0.1272814712524414, 0.12999722290039062, 0.13082829284667968, 0.12764524841308594, 0.1275457305908203, 0.12752041625976562, 0.12770883178710937, 0.1266604766845703, 0.12734681701660155, 0.1268087387084961, 0.12625289916992188, 0.126281982421875, 0.1268875198364258, 0.12698870086669922, 0.12653135681152344, 0.12620646667480467, 0.12657023620605468, 0.12632870483398437, 0.12506492614746093, 0.12639065551757814, 0.126382080078125, 0.12764125061035156, 0.12761328125, 0.12553116607666015, 0.1279333724975586, 0.1272393569946289, 0.1266146240234375, 0.12666646575927734, 0.12677731323242186, 0.1271214065551758, 0.12734668731689452, 0.13081382751464843, 0.1256736297607422, 0.12571180725097655, 0.12634700775146485, 0.12658735656738282, 0.12598652648925782, 0.12669120025634767, 0.1273002243041992, 0.12740338897705078, 0.1284917755126953, 0.12654332733154297, 0.12675564575195314, 0.12715436553955078, 0.1259306869506836, 0.12638905334472655, 0.12634127807617188, 0.12785852813720702, 0.1263677444458008, 0.1265864028930664, 0.1260157470703125, 0.12689020538330079, 0.12804710388183593, 0.12587213134765626, 0.12829029846191406, 0.12704560089111328, 0.1268045120239258, 0.12610559844970703, 0.126023681640625, 0.12735641479492188, 0.12574934387207032, 0.127048095703125, 0.12612403106689454, 0.12627731323242186, 0.1257616958618164, 0.12623200225830078, 0.1289940185546875, 0.12815359497070314, 0.12667001342773437, 0.1251233901977539, 0.12625305938720705, 0.1297257537841797, 0.1290185546875, 0.12845465087890626, 0.12857139587402344, 0.12759468841552735, 0.1264392318725586, 0.1261480941772461, 0.12576588439941405, 0.12654208374023437, 0.12618256378173828, 0.12730387115478517, 0.12676493072509765, 0.12622108459472656, 0.12669276428222656, 0.1265320281982422, 0.12790595245361328, 0.12589430236816407, 0.12678179168701173, 0.12693910217285156, 0.12657052612304687, 0.12602108764648437, 0.1269273910522461, 0.12696275329589843, 0.12670047760009764, 0.12592908477783202, 0.12537657928466797, 0.1253288345336914, 0.12832850646972657, 0.12700998687744142, 0.12632685089111328, 0.12740665435791015, 0.1276246109008789, 0.12834060668945313, 0.1271968994140625, 0.12635596466064453, 0.12646342468261718, 0.1258317413330078, 0.12493824005126954, 0.1279273910522461, 0.12657350158691405, 0.12676297760009766, 0.12626700592041015, 0.12670365142822265, 0.12760918426513673, 0.12791216278076173, 0.12755126190185548, 0.12724649810791017, 0.1270208969116211, 0.12647628784179688, 0.12739552307128907, 0.1270572509765625, 0.12701590728759765, 0.12668723297119142, 0.12617113494873047, 0.1276570587158203, 0.12700569915771484, 0.12662364959716796, 0.12676464080810546, 0.12714556884765624, 0.12687648010253907, 0.12641484832763672, 0.12619366455078124, 0.1271398391723633, 0.1267116470336914, 0.12721974182128906, 0.12651238250732422, 0.12714278411865235, 0.126823486328125, 0.12666483306884765, 0.12629856109619142, 0.1274493408203125, 0.12670169830322264, 0.12700582122802734, 0.12966761779785158, 0.12836457824707032, 0.1275149459838867, 0.12661555480957032, 0.12851405334472657, 0.1298041229248047, 0.127401123046875, 0.12813209533691405, 0.1283524169921875, 0.1269390411376953, 0.12801164245605468, 0.12712989044189454, 0.1270214080810547, 0.12682879638671876, 0.1270023651123047, 0.1269844512939453, 0.12798566436767578, 0.1280307159423828, 0.1323351287841797, 0.12742610931396484, 0.12845257568359375, 0.12756473541259766, 0.1275014114379883, 0.128787353515625, 0.1275533142089844, 0.128272705078125, 0.1274059829711914, 0.12705792236328126, 0.12691267395019531, 0.12571222686767577, 0.12474285125732422, 0.13084072875976563, 0.12620252990722655, 0.1264353256225586, 0.12670172882080077, 0.12781346893310547, 0.12749199676513673, 0.12737750244140625, 0.127421630859375, 0.12795785522460937, 0.12852940368652344, 0.12772988891601564, 0.12761110687255858, 0.1274043197631836, 0.12747545623779297, 0.12663017272949217, 0.1277496337890625, 0.1275144958496094, 0.12865350341796875, 0.1280047607421875, 0.12893798828125, 0.12760889434814454, 0.1281383056640625, 0.1283162841796875, 0.12760387420654296, 0.12738601684570314, 0.12795539093017577, 0.1274286117553711, 0.12753715515136718, 0.12708175659179688, 0.12740476989746094, 0.12761027526855467, 0.12720829010009765, 0.12727616119384766, 0.12790557098388672, 0.12633773040771484, 0.12668534088134767, 0.12684083557128906, 0.12672525024414064, 0.12667378997802733, 0.12666883087158204, 0.1268080291748047, 0.12755165100097657, 0.12623046112060546, 0.1278726043701172, 0.12691305541992187, 0.13165945434570311, 0.1277740478515625, 0.12698095703125, 0.12597862243652344, 0.12592127990722657, 0.1281249237060547, 0.12802653503417968, 0.12632073974609376, 0.12982791137695313, 0.12613053131103516, 0.12624246215820312, 0.12543670654296876, 0.1250057907104492, 0.12553644561767577, 0.12720681762695313, 0.12676080322265626, 0.1259362564086914, 0.12617945861816407, 0.1264754867553711, 0.12457561492919922, 0.12570057678222657, 0.12595244598388672, 0.12682444763183592, 0.1258711395263672, 0.12613075256347656, 0.12583773040771484, 0.12711116790771484, 0.12596224212646484, 0.12602095794677734, 0.12560185241699218, 0.1262200927734375, 0.1257520980834961, 0.12574489593505858, 0.12815589904785157, 0.1264005126953125, 0.1257850875854492, 0.12623145294189453, 0.12673414611816405, 0.1271995162963867, 0.1265519027709961, 0.12600768280029298, 0.12684060668945313, 0.12770918273925783, 0.1269078369140625, 0.13011820983886718, 0.12739584350585936, 0.12715817260742188, 0.12646409606933592, 0.1264353256225586, 0.12657244873046875, 0.12594390106201173, 0.12697420501708984, 0.12629376220703126, 0.12624636840820314, 0.12814306640625, 0.12722486114501952, 0.12750771331787109, 0.12684092712402345, 0.12763590240478515, 0.12678758239746094, 0.12680806732177735, 0.1280128936767578, 0.1276006393432617, 0.12689817810058593, 0.12746342468261718, 0.1271357421875, 0.1271357421875, 0.12662783813476564, 0.12694937896728514, 0.12766617584228515, 0.12811468505859375, 0.12797456359863282, 0.12768956756591796, 0.1278908462524414, 0.12688236999511718, 0.12651113891601562, 0.12710486602783203, 0.12768067169189454, 0.13018316650390624, 0.12640460968017578, 0.12690870666503906, 0.12695318603515626, 0.12619065856933595, 0.13767366027832031, 0.12683058929443358, 0.1276804504394531, 0.13040771484375, 0.12647459411621093, 0.12775801849365234, 0.12641542053222657, 0.12630786895751953, 0.12656707000732423, 0.12717874908447266, 0.12827381896972656, 0.12689043426513671, 0.126210205078125, 0.12740560150146485, 0.12616486358642579, 0.12706400299072265, 0.12647081756591796, 0.126501953125, 0.1275381088256836, 0.12665184020996093, 0.12670550537109376, 0.12683747100830078, 0.12617298889160156, 0.1270540771484375, 0.12573693084716797, 0.12631993865966798, 0.12690089416503905, 0.12744703674316407, 0.12767964935302734, 0.1268376007080078, 0.12544796752929688, 0.1254832305908203, 0.1262449264526367, 0.12742649841308593, 0.1262998046875, 0.1261182403564453, 0.12658287811279298, 0.12662774658203124, 0.12582281494140626, 0.12650717163085937, 0.1262808303833008, 0.1264625930786133, 0.12685699462890626, 0.12705391693115234, 0.12739798736572266, 0.1268019485473633, 0.1265739517211914, 0.12800099182128907, 0.12821670532226562, 0.12783673858642577, 0.12719699096679687, 0.1272845458984375, 0.1279086380004883, 0.1270885467529297, 0.12740755462646483, 0.12698271942138672, 0.1275555877685547, 0.12719087982177735, 0.12695977783203125, 0.12734003448486328, 0.12680652618408203, 0.1269614715576172, 0.12716464233398436, 0.12683875274658202, 0.12712754821777345, 0.12732621002197267, 0.12679759979248048, 0.12772310638427734, 0.1277221145629883, 0.12638800048828125, 0.12651881408691407, 0.1279813461303711, 0.12848016357421874, 0.131235107421875, 0.1263458251953125, 0.1275141143798828, 0.12699404907226564, 0.13524044799804688, 0.13757008361816406, 0.1280619201660156, 0.12792217254638671, 0.12945408630371094, 0.12721766662597656, 0.12676505279541014, 0.12680397033691407, 0.12685517120361328, 0.12711526489257813, 0.12762079620361327, 0.12784877014160156, 0.1269516830444336, 0.12766531372070314, 0.12742716979980467, 0.12698419189453125, 0.12757401275634767, 0.12773094177246094, 0.12768118286132812, 0.1276655044555664, 0.12768720245361329, 0.1276539535522461, 0.12869573974609375, 0.12751305389404297, 0.1261647644042969, 0.12559529876708983, 0.12651602935791015, 0.12801632690429687, 0.12506527709960938, 0.12434786987304687, 0.12576131439208985, 0.12668211364746093, 0.12708223724365234, 0.12663926696777345, 0.1268580780029297, 0.12575926208496094, 0.12725062561035155, 0.12702342224121094, 0.12741990661621094, 0.12759664154052736, 0.12732166290283203, 0.1270052490234375, 0.12753510284423827, 0.12722994995117187, 0.12735874938964845, 0.12684925079345702, 0.12648652648925782, 0.1266841583251953, 0.1280369873046875, 0.12664630126953125, 0.12703215789794922, 0.12669068908691405, 0.12759465789794922, 0.1266990737915039, 0.12612086486816407, 0.1264680938720703, 0.12721695709228514, 0.12626998138427734, 0.12640681457519531, 0.126536865234375, 0.1272369613647461, 0.12636160278320313, 0.12615042877197266, 0.1269697952270508, 0.12692483520507813, 0.12669773101806642, 0.1266728973388672, 0.12652543640136718, 0.12617436981201172, 0.12767654418945312, 0.1273947525024414, 0.1271703338623047, 0.1276374740600586, 0.12987091064453124, 0.12684796905517579, 0.12650847625732423, 0.12627410888671875, 0.12585158538818358, 0.12682041931152344, 0.1270203552246094, 0.1270074234008789, 0.12680985260009767, 0.13043122863769532, 0.12649267578125, 0.12600633239746092, 0.12930758666992187, 0.1258138885498047, 0.12746431732177735, 0.1279815673828125, 0.12647792053222656]",tokens/s,7.870350875228854,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 60, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1097.445376,709.820416,0.0,314.5728,299.62752,s,1,8.251154296875,8.251154296875,0.0,8.251154296875,8.251154296875,8.251154296875,8.251154296875,[8.251154296875],,kWh,3.137829248750374e-05,3.4540586563329293e-06,1.0579730685994493e-05,4.541208182983116e-05,,MB,1266.520064,818.87232,0.0,402.653184,359.612416,s,10,0.3170804176330566,0.03170804176330567,0.00022690988150239584,0.03174241638183594,0.031969792556762695,0.03197713623046875,0.0319830111694336,"[0.03198447990417481, 0.031940736770629884, 0.03196816062927246, 0.03174582481384278, 0.03183510398864746, 0.031739007949829104, 0.03159184074401856, 0.0313450870513916, 0.03159705543518066, 0.03133312034606934]",tokens/s,8073.661625369045,kWh,9.299740253450393e-07,1.025596733217605e-07,4.2864949366239933e-07,1.4611831923291992e-06,tokens/kWh,175200482.28307578,MB,1300.754432,841.940992,0.0,425.721856,360.53248,s,10,19.3361962890625,1.93361962890625,0.010603024772909005,1.9284808349609377,1.9499725585937502,1.950110546875,1.9502209375,"[1.94994189453125, 1.9247216796875, 1.95024853515625, 1.9486453857421875, 1.9264437255859375, 1.92957958984375, 1.9292215576171876, 1.9277401123046876, 1.9248831787109375, 1.9247706298828124]",tokens/s,32.581382117865594,kWh,5.587296759256952e-05,6.162513693174058e-06,2.1699062652938576e-05,8.373454393868214e-05,tokens/kWh,752377.657256176,,s,630,19.330464256286625,0.030683276597280353,0.0004544658682586854,0.030625375747680664,0.031081328582763672,0.03126024866104126,0.03246809272766115,"[0.030281471252441405, 0.03037238311767578, 0.030021631240844726, 0.030042015075683593, 0.030056543350219726, 0.030045984268188476, 0.030284000396728517, 0.03070319938659668, 0.030895679473876954, 0.03063216018676758, 0.030739072799682618, 0.03137059211730957, 0.03217891311645508, 0.03151600074768066, 0.031154783248901367, 0.03147529602050781, 0.03094099235534668, 0.031064672470092772, 0.030953535079956053, 0.030599103927612305, 0.030621952056884765, 0.03084262466430664, 0.030508447647094726, 0.03057891273498535, 0.030730623245239258, 0.030427007675170897, 0.03045199966430664, 0.03071574401855469, 0.030662656784057617, 0.03100467109680176, 0.031178976058959963, 0.030877248764038086, 0.030712032318115236, 0.030627840042114256, 0.03081625556945801, 0.03078963279724121, 0.0307893123626709, 0.03063225555419922, 0.03073843193054199, 0.03081964874267578, 0.031077056884765624, 0.03114147186279297, 0.03092889595031738, 0.030979808807373048, 0.0310064640045166, 0.030968767166137695, 0.03106719970703125, 0.031080543518066408, 0.0310784969329834, 0.030921472549438476, 0.03143654441833496, 0.031029504776000978, 0.031117311477661135, 0.03136422348022461, 0.031000640869140624, 0.031144767761230468, 0.03504537582397461, 0.0311910400390625, 0.031229888916015625, 0.03147545623779297, 0.031080768585205077, 0.031006719589233397, 0.031122880935668944, 0.03089619255065918, 0.030849407196044922, 0.030539552688598634, 0.030498367309570312, 0.03055683135986328, 0.030631935119628906, 0.03061564826965332, 0.03057872009277344, 0.0304899845123291, 0.03042355155944824, 0.03028976058959961, 0.030458015441894533, 0.030522848129272463, 0.03063248062133789, 0.030864736557006837, 0.030747295379638672, 0.03077849578857422, 0.030429855346679687, 0.030340543746948244, 0.030425600051879883, 0.030222463607788085, 0.03029599952697754, 0.03016873550415039, 0.030132863998413088, 0.0302073917388916, 0.030236480712890625, 0.030417503356933592, 0.030826623916625977, 0.031233152389526366, 0.03082966423034668, 0.030920480728149412, 0.030965728759765623, 0.030707679748535156, 0.03134694480895996, 0.030478303909301757, 0.030318431854248047, 0.030552480697631838, 0.03049760055541992, 0.030303232192993163, 0.030392095565795897, 0.03041916847229004, 0.030457151412963866, 0.030312543869018556, 0.030615936279296874, 0.030663999557495117, 0.03063382339477539, 0.03064713668823242, 0.030658559799194338, 0.030457855224609375, 0.03060438346862793, 0.03019580841064453, 0.030532447814941407, 0.03044588851928711, 0.03038787269592285, 0.03029520034790039, 0.03022729682922363, 0.030104896545410157, 0.03027014350891113, 0.030410560607910156, 0.03049478340148926, 0.030797952651977538, 0.030900447845458985, 0.030946271896362305, 0.030674367904663085, 0.03073683166503906, 0.030707839965820313, 0.030923999786376954, 0.031034143447875976, 0.03094528007507324, 0.030891008377075195, 0.030863872528076174, 0.030956031799316407, 0.030819583892822265, 0.03089036750793457, 0.030966144561767578, 0.03092633628845215, 0.031016799926757814, 0.031054431915283204, 0.03124995231628418, 0.031113759994506836, 0.031356576919555665, 0.031268672943115236, 0.03180803108215332, 0.031013151168823243, 0.03109449577331543, 0.031029247283935548, 0.030908416748046875, 0.030891263961791992, 0.031104927062988282, 0.031234912872314453, 0.03106800079345703, 0.031154336929321288, 0.031195199966430665, 0.031532991409301755, 0.031210559844970703, 0.03099535942077637, 0.030863391876220704, 0.030682912826538088, 0.03148342323303223, 0.030949344635009767, 0.031054271697998046, 0.030797887802124023, 0.03070787239074707, 0.030611520767211915, 0.03056764793395996, 0.030561248779296876, 0.030670591354370117, 0.030664800643920898, 0.030762496948242186, 0.03072617530822754, 0.030781024932861327, 0.03192508888244629, 0.03082838439941406, 0.0312291202545166, 0.030749664306640626, 0.031202655792236328, 0.030743200302124022, 0.03079529571533203, 0.030480159759521484, 0.030503616333007813, 0.030511104583740234, 0.03079167938232422, 0.030949695587158203, 0.03073606491088867, 0.030642175674438478, 0.031086368560791014, 0.030702655792236327, 0.030596416473388673, 0.030536096572875978, 0.030541952133178712, 0.03077324867248535, 0.030560192108154298, 0.030467872619628907, 0.030322975158691406, 0.03034832000732422, 0.03069593620300293, 0.03258620834350586, 0.030707296371459962, 0.031177120208740236, 0.030778879165649413, 0.030644927978515625, 0.030706527709960938, 0.03097923278808594, 0.03069523239135742, 0.0310435848236084, 0.030795936584472657, 0.03116534423828125, 0.030826847076416017, 0.03054243278503418, 0.030822656631469728, 0.03067840003967285, 0.03089446449279785, 0.03103116798400879, 0.03102511978149414, 0.030967071533203126, 0.03100364875793457, 0.031340831756591796, 0.03165920066833496, 0.03120518493652344, 0.03130380821228027, 0.030951904296875, 0.03096486473083496, 0.031349632263183595, 0.030934560775756837, 0.030840831756591795, 0.030859743118286133, 0.030934207916259764, 0.030817472457885742, 0.03090598487854004, 0.031024351119995117, 0.030904991149902344, 0.030974079132080078, 0.031005823135375976, 0.031054431915283204, 0.03107811164855957, 0.030912256240844725, 0.03127193641662598, 0.030947328567504883, 0.03090640068054199, 0.031299360275268556, 0.030877824783325195, 0.031035263061523436, 0.031025375366210937, 0.03077507209777832, 0.030889984130859374, 0.03091561508178711, 0.03074905586242676, 0.030786304473876952, 0.03094646453857422, 0.030737119674682616, 0.03072323226928711, 0.030710336685180663, 0.031189184188842773, 0.030703231811523436, 0.030824192047119142, 0.030362560272216798, 0.03031235122680664, 0.03057744026184082, 0.03088662338256836, 0.03024105644226074, 0.030525440216064452, 0.030300159454345704, 0.030365695953369142, 0.030600959777832032, 0.03062156867980957, 0.030501056671142578, 0.03046009635925293, 0.030681087493896485, 0.030351648330688475, 0.03039398384094238, 0.030701663970947264, 0.030621023178100587, 0.030500640869140627, 0.030448511123657228, 0.030320640563964843, 0.030369728088378907, 0.030604671478271485, 0.030321344375610352, 0.03015884780883789, 0.030277631759643556, 0.03046928024291992, 0.030390975952148437, 0.030424703598022462, 0.030253887176513672, 0.030369504928588868, 0.030329183578491212, 0.03031376075744629, 0.030170976638793947, 0.030257856369018555, 0.030459423065185547, 0.030505279541015624, 0.030537727355957032, 0.03045487976074219, 0.03042755126953125, 0.030325056076049805, 0.03044937515258789, 0.030551904678344725, 0.03347520065307617, 0.03093708801269531, 0.030636032104492186, 0.030551647186279295, 0.031021087646484376, 0.030540159225463867, 0.030751903533935546, 0.03047305679321289, 0.030624959945678713, 0.03045984077453613, 0.030507583618164063, 0.030744928359985352, 0.03064169692993164, 0.030651968002319337, 0.030729087829589843, 0.0306527042388916, 0.03064089584350586, 0.03054207992553711, 0.03067568016052246, 0.03062579154968262, 0.030583040237426758, 0.03077299118041992, 0.030666175842285155, 0.030486623764038087, 0.03041267204284668, 0.030580415725708007, 0.03049776077270508, 0.03069126319885254, 0.030814207077026368, 0.030810111999511718, 0.031161535263061525, 0.033751873016357424, 0.030863359451293947, 0.03094233512878418, 0.03089308738708496, 0.031235776901245117, 0.03082255935668945, 0.03088912010192871, 0.030720767974853517, 0.030542112350463866, 0.030544960021972656, 0.030548736572265624, 0.03057254409790039, 0.03061257553100586, 0.03069635200500488, 0.030697471618652345, 0.030705663681030275, 0.03071574401855469, 0.030554655075073243, 0.03064713668823242, 0.03074502372741699, 0.030666112899780273, 0.030725088119506836, 0.03056025505065918, 0.030386560440063475, 0.030324575424194335, 0.03025708770751953, 0.03019513511657715, 0.030183839797973632, 0.030293407440185546, 0.030425504684448244, 0.030314687728881837, 0.03028963279724121, 0.030361888885498046, 0.030281919479370117, 0.03028665542602539, 0.030333951950073244, 0.030547584533691406, 0.03106217575073242, 0.03034339141845703, 0.03070534324645996, 0.03018783950805664, 0.03021571159362793, 0.03012041664123535, 0.030218175888061524, 0.030386240005493163, 0.030396703720092774, 0.030605152130126954, 0.030218143463134766, 0.030325632095336914, 0.03040009689331055, 0.030400480270385742, 0.03138179206848145, 0.030402559280395508, 0.03037139129638672, 0.03039267158508301, 0.030416704177856444, 0.030326400756835938, 0.030496831893920898, 0.03051375961303711, 0.030515296936035156, 0.03050281524658203, 0.030608800888061522, 0.03052400016784668, 0.030518272399902343, 0.0306997127532959, 0.030583616256713866, 0.03081785583496094, 0.030515935897827147, 0.03139590454101562, 0.030652063369750977, 0.030543872833251953, 0.030797695159912108, 0.030588640213012695, 0.03058527946472168, 0.030909984588623048, 0.030722496032714843, 0.03056230354309082, 0.03072380828857422, 0.030771488189697264, 0.03060121536254883, 0.030724096298217773, 0.03076313591003418, 0.03078335952758789, 0.03062326431274414, 0.03073891258239746, 0.030683135986328124, 0.030654464721679688, 0.030700927734375, 0.030556800842285157, 0.030552064895629883, 0.03059916877746582, 0.03072822380065918, 0.030592159271240236, 0.03066691207885742, 0.030616224288940428, 0.03076300811767578, 0.030756864547729492, 0.030799776077270507, 0.03052137565612793, 0.030517311096191407, 0.030530624389648438, 0.031003135681152344, 0.03063225555419922, 0.030403039932250978, 0.03086649513244629, 0.03036627197265625, 0.03024668884277344, 0.030417152404785156, 0.030633983612060548, 0.030448991775512694, 0.03015100860595703, 0.030436416625976563, 0.03111849594116211, 0.03123356819152832, 0.030482368469238283, 0.030579008102416993, 0.030422367095947266, 0.030368127822875977, 0.03028611183166504, 0.03046112060546875, 0.03047097587585449, 0.03038934326171875, 0.030499359130859376, 0.03053001594543457, 0.030404512405395507, 0.030428768157958985, 0.030536096572875978, 0.03034511947631836, 0.03059312057495117, 0.030405855178833006, 0.031025951385498046, 0.030601568222045898, 0.0309529914855957, 0.030681215286254882, 0.03058483123779297, 0.030734336853027344, 0.030459680557250977, 0.030634208679199217, 0.030312320709228516, 0.03021836853027344, 0.03024892807006836, 0.030234655380249022, 0.03042416000366211, 0.030649248123168944, 0.03041279983520508, 0.03043971252441406, 0.030359039306640623, 0.030527999877929687, 0.030569375991821288, 0.030500991821289063, 0.030370208740234376, 0.030503231048583983, 0.030525407791137694, 0.030658559799194338, 0.03056025505065918, 0.03068012809753418, 0.030559167861938477, 0.030580448150634765, 0.030576351165771485, 0.03050044822692871, 0.030611583709716797, 0.030817119598388672, 0.030772319793701174, 0.030837663650512694, 0.03173904037475586, 0.0312390079498291, 0.030682720184326173, 0.030659231185913086, 0.030973695755004884, 0.030666751861572264, 0.03063199996948242, 0.030631423950195313, 0.030653087615966797, 0.030483488082885743, 0.030589920043945312, 0.030521343231201172, 0.030507328033447266, 0.030664384841918944, 0.030447616577148437, 0.030529535293579102, 0.03047756767272949, 0.030513919830322266, 0.030531871795654298, 0.030658559799194338, 0.030631647109985352, 0.030627552032470702, 0.030615455627441408, 0.030315103530883788, 0.031026975631713867, 0.03260416030883789, 0.030616704940795898, 0.030356128692626952, 0.03041878318786621, 0.030318944931030274, 0.030266847610473633, 0.03038470458984375, 0.030554239273071288, 0.03058470344543457, 0.030410943984985353, 0.03035081672668457, 0.030613536834716796, 0.030311071395874023, 0.03088921546936035, 0.030595712661743164, 0.03086089515686035, 0.030742719650268556, 0.03078758430480957, 0.03166550445556641, 0.03057142448425293, 0.031127328872680663, 0.03262051010131836, 0.03105753517150879, 0.0307142391204834, 0.030441471099853516, 0.030390592575073243, 0.030219968795776368, 0.03016499137878418, 0.030523584365844725, 0.030860992431640626, 0.03043984031677246, 0.030096511840820312, 0.03030112075805664, 0.030364799499511718, 0.030173919677734376, 0.030137855529785155, 0.03018998336791992, 0.030166463851928713, 0.030276159286499023, 0.030244768142700194, 0.030174816131591797, 0.030511135101318358, 0.03006502342224121, 0.02994790458679199, 0.02998886489868164, 0.02998201560974121, 0.03011859130859375, 0.02995712089538574, 0.030079072952270507, 0.030020511627197266, 0.03038640022277832, 0.030219520568847656, 0.03042153549194336, 0.03072982406616211, 0.030522943496704102, 0.03038822364807129, 0.030524255752563477, 0.030430335998535157, 0.03043008041381836, 0.030584800720214845, 0.03058684730529785, 0.03059244728088379, 0.030970624923706055, 0.030689407348632812, 0.03071308708190918, 0.03060326385498047, 0.03052400016784668, 0.030680992126464843, 0.030912031173706056, 0.03074640083312988, 0.030769855499267577, 0.03076710319519043, 0.030873600006103515, 0.030619647979736327, 0.030619455337524415, 0.030759008407592773, 0.03064841651916504, 0.03141427230834961, 0.03123404884338379, 0.0307587833404541, 0.03060758399963379, 0.03056835174560547, 0.030674943923950194, 0.030685184478759765, 0.030653791427612306, 0.030657024383544923, 0.030552511215209962, 0.03050499153137207, 0.030217248916625975, 0.03015331268310547, 0.030006336212158202, 0.02993257522583008, 0.02990412712097168, 0.02996281623840332, 0.03018988800048828, 0.030218080520629884, 0.030113792419433592, 0.030343135833740233, 0.030023263931274413, 0.029884864807128906, 0.03014784049987793, 0.030324735641479493, 0.030281984329223632, 0.034619422912597654, 0.03074662399291992, 0.030825279235839845, 0.030555904388427733, 0.030314399719238282, 0.03031235122680664, 0.030097375869750975]",tokens/s,32.591043424894075,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,3864.723456,2152.660992,0.0,1757.413376,1736.37632,s,1,12.5585712890625,12.5585712890625,0.0,12.5585712890625,12.5585712890625,12.5585712890625,12.5585712890625,[12.5585712890625],,kWh,0.000157039415020832,1.731543629477273e-05,5.904226945598057e-05,0.0002333971207715853,,MB,3873.869824,2389.639168,0.0,1973.420032,1922.784256,s,10,0.635660041809082,0.06356600418090821,0.0005778853990760676,0.06343187141418458,0.06426354904174805,0.06442192039489746,0.06454861747741698,"[0.062454689025878904, 0.06324585723876953, 0.06330374526977539, 0.0635445442199707, 0.06458029174804687, 0.06390063858032227, 0.06422835540771485, 0.06392291259765626, 0.06315980911254883, 0.06331919860839844]",tokens/s,4027.3099323881765,kWh,1.8526656114979043e-06,2.0427130066203017e-07,1.1502136487214724e-06,3.2071505608814066e-06,tokens/kWh,79821634.54454246,MB,3876.315136,2410.610688,0.0,1994.391552,1971.314176,s,10,38.39297583007813,3.839297583007813,0.018174484599691544,3.844256469726562,3.8540014892578123,3.861956799316406,3.868321047363281,"[3.80193359375, 3.84812841796875, 3.84927783203125, 3.852233642578125, 3.869912109375, 3.850801513671875, 3.823289794921875, 3.840384521484375, 3.829564453125, 3.827449951171875]",tokens/s,16.409251598216578,kWh,0.00011158585627516668,1.2308142151414446e-05,4.911332656207862e-05,0.00017300732498865974,tokens/kWh,364146.4313960667,,s,630,38.38869422912598,0.060934435284326946,0.0007122517880582249,0.06083739280700684,0.061808459854125974,0.062165083312988284,0.06350617919921876,"[0.059291648864746097, 0.05961318588256836, 0.059856895446777345, 0.06021920013427735, 0.06000044631958008, 0.060630462646484376, 0.05995782470703125, 0.05987737655639649, 0.05959881591796875, 0.05912934494018555, 0.059335201263427735, 0.05983625411987305, 0.060623008728027346, 0.06044672012329102, 0.06023948669433594, 0.060146240234375, 0.06016390228271484, 0.0603504638671875, 0.06021843338012695, 0.060310462951660156, 0.06053273773193359, 0.06044876861572265, 0.061153247833251954, 0.06085635375976563, 0.060499969482421874, 0.06114060974121094, 0.06075635147094727, 0.0605777587890625, 0.060631072998046875, 0.06044876861572265, 0.06047948837280273, 0.060485633850097656, 0.06055116653442383, 0.06176515197753906, 0.06024035263061524, 0.06013951873779297, 0.060466625213623046, 0.06007606506347656, 0.06049846267700195, 0.06016169738769531, 0.06026176071166992, 0.060042495727539065, 0.05960265731811523, 0.06001375961303711, 0.06102508926391602, 0.06027468872070312, 0.05995075225830078, 0.060231201171875, 0.05997647857666016, 0.060308704376220705, 0.06084403228759765, 0.06220064163208008, 0.060393470764160156, 0.06033817672729492, 0.06003302383422852, 0.06009439849853516, 0.060110305786132814, 0.060014686584472655, 0.06055987167358398, 0.059641857147216794, 0.06018832015991211, 0.06171068954467773, 0.061955646514892576, 0.06022143936157227, 0.06018003082275391, 0.05993638229370117, 0.06020899200439453, 0.06060335922241211, 0.06074934387207031, 0.060477920532226566, 0.06094233703613281, 0.061241310119628904, 0.0617042236328125, 0.06184054565429688, 0.061911712646484374, 0.06164633560180664, 0.06165555191040039, 0.061228862762451174, 0.06194419097900391, 0.061341697692871094, 0.06092390441894531, 0.06188236618041992, 0.06077849578857422, 0.06077376174926758, 0.06100559997558594, 0.060656352996826174, 0.060919937133789064, 0.060872703552246096, 0.06098854446411133, 0.06094527816772461, 0.06083776092529297, 0.060729598999023436, 0.060566497802734376, 0.06158428955078125, 0.06048972702026367, 0.060278785705566405, 0.060409854888916016, 0.06029107284545898, 0.061212352752685543, 0.06143212890625, 0.06094438552856445, 0.060361984252929685, 0.06061686325073242, 0.061177600860595704, 0.06185865783691406, 0.06116966247558594, 0.06079283142089844, 0.06041177749633789, 0.06078620910644531, 0.060789440155029295, 0.06215024185180664, 0.06159772872924805, 0.06064976119995117, 0.060622238159179685, 0.06095523071289063, 0.062312446594238284, 0.06128620910644531, 0.06211155319213867, 0.06183155059814453, 0.06164275360107422, 0.061843456268310545, 0.06095872116088867, 0.06184960174560547, 0.06080454254150391, 0.06080527877807617, 0.06193791961669922, 0.060219390869140625, 0.06033283233642578, 0.0604334716796875, 0.060400161743164066, 0.06222275161743164, 0.0607825927734375, 0.060469249725341796, 0.06051225662231445, 0.06033718490600586, 0.061356639862060545, 0.06084236907958984, 0.06070272064208984, 0.06107136154174805, 0.06373580932617187, 0.06077203369140625, 0.062404926300048826, 0.060663009643554686, 0.06050896072387695, 0.060184574127197264, 0.060501632690429685, 0.061247871398925784, 0.06100377655029297, 0.060733409881591795, 0.06091068649291992, 0.06081631851196289, 0.06260921478271485, 0.06404249572753906, 0.0608422737121582, 0.061094112396240234, 0.060866752624511716, 0.06110531234741211, 0.06308278274536133, 0.061722335815429685, 0.0611945915222168, 0.06098160171508789, 0.06103670501708985, 0.060843582153320315, 0.06179065704345703, 0.06097071838378906, 0.060712257385253904, 0.06083273696899414, 0.06112384033203125, 0.061284927368164065, 0.06254995346069336, 0.06101222229003906, 0.060704193115234374, 0.06095110321044922, 0.060681983947753905, 0.061233409881591795, 0.06118550491333008, 0.06071459197998047, 0.060427200317382815, 0.060657054901123046, 0.06067161560058594, 0.06161481475830078, 0.0611514892578125, 0.060674049377441405, 0.06046310424804688, 0.060620704650878904, 0.06087484741210938, 0.061274112701416014, 0.06101113510131836, 0.06105702209472656, 0.06061414337158203, 0.06222467041015625, 0.06091795349121094, 0.060606494903564456, 0.06066998291015625, 0.06259503936767578, 0.061312255859375, 0.06105574417114258, 0.0607006721496582, 0.060825408935546874, 0.06153030395507812, 0.0607907829284668, 0.060954143524169925, 0.06184592056274414, 0.06147078323364258, 0.061183998107910156, 0.06133555221557617, 0.061134849548339844, 0.06238003158569336, 0.061181598663330075, 0.06079904174804687, 0.06090576171875, 0.06411264038085937, 0.061233150482177735, 0.061042686462402344, 0.061851646423339846, 0.061159423828125, 0.06030131149291992, 0.06029625701904297, 0.06022377777099609, 0.06073807907104492, 0.06060995101928711, 0.06082633590698242, 0.06278332901000977, 0.06092563247680664, 0.06021289443969727, 0.06024652862548828, 0.06014803314208984, 0.06029926300048828, 0.06052249526977539, 0.06029107284545898, 0.06029919815063477, 0.060176448822021486, 0.06072524642944336, 0.0603256950378418, 0.06038320159912109, 0.06107139205932617, 0.061929183959960936, 0.061085472106933596, 0.06175187301635742, 0.06152540969848633, 0.060889537811279294, 0.061774112701416015, 0.06127983856201172, 0.06193398284912109, 0.06119424057006836, 0.06132940673828125, 0.061317119598388675, 0.06158335876464844, 0.062042110443115236, 0.0612498550415039, 0.06142329788208008, 0.06162633514404297, 0.06127443313598633, 0.06182672119140625, 0.06168611145019531, 0.06073062515258789, 0.061407806396484375, 0.06187027359008789, 0.061054241180419924, 0.06108233642578125, 0.06128380966186524, 0.060985984802246096, 0.0619703369140625, 0.061658687591552734, 0.06128041458129883, 0.06117814254760742, 0.062134273529052736, 0.06137651062011719, 0.06217932891845703, 0.061884414672851565, 0.06112575912475586, 0.06117670440673828, 0.06154444885253906, 0.060870655059814455, 0.061684959411621096, 0.06138515090942383, 0.060967262268066404, 0.06236179351806641, 0.06318182373046875, 0.06099625778198242, 0.0619769287109375, 0.060835647583007815, 0.060889087677001956, 0.061141342163085935, 0.06092915344238281, 0.06107360076904297, 0.06135638427734375, 0.061017822265625, 0.06120476913452148, 0.060989280700683594, 0.06078806304931641, 0.061309761047363284, 0.06158335876464844, 0.061015296936035156, 0.06161484909057617, 0.06128054428100586, 0.061705951690673826, 0.06231353759765625, 0.06107231903076172, 0.06102582550048828, 0.06149168014526367, 0.0613039665222168, 0.06202249526977539, 0.06195788955688476, 0.061067073822021485, 0.061352127075195315, 0.06095008087158203, 0.06115158462524414, 0.061622623443603516, 0.06120175933837891, 0.060957344055175784, 0.06109593582153321, 0.06127001571655273, 0.062494720458984375, 0.06216019058227539, 0.06112870407104492, 0.06100377655029297, 0.06177740859985351, 0.06090393447875977, 0.06084198379516602, 0.06238617706298828, 0.061118465423583984, 0.061034496307373044, 0.06116556930541992, 0.06076416015625, 0.06084640121459961, 0.0617053108215332, 0.06085283279418945, 0.061504959106445316, 0.06065235137939453, 0.06063801574707031, 0.06159203338623047, 0.06148758316040039, 0.0610750732421875, 0.06127449417114258, 0.06061827087402344, 0.061585121154785157, 0.06146329498291016, 0.061329216003417966, 0.06094457626342773, 0.06081299209594727, 0.06093827056884766, 0.0628267822265625, 0.06098515319824219, 0.06240995025634766, 0.060751903533935545, 0.060910526275634765, 0.061755199432373044, 0.06150960159301758, 0.061595870971679685, 0.06143590545654297, 0.06065948867797852, 0.060766433715820314, 0.06191513442993164, 0.06104822540283203, 0.06216908645629883, 0.06061936187744141, 0.0607636489868164, 0.06158796691894531, 0.06127001571655273, 0.06130435180664062, 0.06041212844848633, 0.06049577713012695, 0.060628448486328125, 0.06087481689453125, 0.060711166381835935, 0.06078521728515625, 0.060835166931152346, 0.06061865615844726, 0.060648193359375, 0.060854270935058595, 0.06029260635375976, 0.06136678314208984, 0.06255327987670899, 0.06072150421142578, 0.06045334243774414, 0.06024009704589844, 0.0602531852722168, 0.059959358215332034, 0.06119164657592773, 0.06322367858886718, 0.06057609558105469, 0.06256412887573243, 0.060857120513916015, 0.060636417388916015, 0.060701438903808594, 0.060641281127929686, 0.0606416015625, 0.06060800170898437, 0.06067577743530273, 0.06052710342407226, 0.060739585876464844, 0.060905216217041015, 0.060690689086914065, 0.06058582305908203, 0.06069878387451172, 0.06108160018920898, 0.060602302551269534, 0.06078265762329101, 0.060365825653076174, 0.06000128173828125, 0.0599705924987793, 0.05969968032836914, 0.05977958297729492, 0.05950019073486328, 0.0599060173034668, 0.06058399963378906, 0.06036227035522461, 0.06051715087890625, 0.060510208129882816, 0.06026393508911133, 0.06018304061889648, 0.05987667083740234, 0.059677375793457034, 0.0604304313659668, 0.06071196746826172, 0.06066265487670899, 0.06016819381713867, 0.06013324737548828, 0.06022751998901367, 0.06013868713378906, 0.059956161499023435, 0.06037510299682617, 0.060539905548095706, 0.06056857681274414, 0.06068633651733398, 0.06054297637939453, 0.06062934494018555, 0.060642208099365234, 0.06118886566162109, 0.06098739242553711, 0.06125363159179688, 0.061530113220214844, 0.061468257904052734, 0.06093648147583008, 0.060945728302001956, 0.060664638519287106, 0.060763198852539065, 0.060942848205566405, 0.06362156677246093, 0.06204006576538086, 0.06055763244628906, 0.060493824005126956, 0.06109743881225586, 0.060708927154541015, 0.06037465667724609, 0.060791454315185546, 0.060955902099609376, 0.06053766250610351, 0.06020256042480469, 0.06040019226074219, 0.06042572784423828, 0.06035696029663086, 0.060403072357177734, 0.060592830657958986, 0.06082316970825195, 0.06056924819946289, 0.06075008010864258, 0.06035670471191406, 0.060461536407470706, 0.06019414520263672, 0.06065350341796875, 0.06090147018432617, 0.061016704559326174, 0.06107340621948242, 0.06080716705322266, 0.06088044738769531, 0.0606223030090332, 0.06086860656738281, 0.060805824279785155, 0.06092828750610352, 0.0608370246887207, 0.06090985488891602, 0.06103062438964844, 0.06124095916748047, 0.06513734436035157, 0.06132457733154297, 0.06180643081665039, 0.06090639877319336, 0.061144927978515624, 0.06142377471923828, 0.061060863494873045, 0.06114310455322266, 0.06096607971191406, 0.06099456024169922, 0.061726497650146483, 0.06107926559448242, 0.061302879333496096, 0.061024673461914064, 0.060851551055908205, 0.06114985656738281, 0.060903423309326174, 0.06077030563354492, 0.060903423309326174, 0.06085599899291992, 0.06075628662109375, 0.06142771148681641, 0.060657470703125, 0.060620223999023434, 0.060521343231201175, 0.060508033752441404, 0.06467788696289062, 0.060360225677490234, 0.06033660888671875, 0.0604925422668457, 0.061249759674072264, 0.06042816162109375, 0.06043414306640625, 0.06098540878295899, 0.06067193603515625, 0.06060483169555664, 0.060545310974121094, 0.06054198455810547, 0.06066451263427734, 0.060738975524902344, 0.06085228729248047, 0.061437950134277344, 0.06044316864013672, 0.06037267303466797, 0.06086073684692383, 0.060821502685546876, 0.06119424057006836, 0.060999454498291014, 0.06082787322998047, 0.060851871490478514, 0.060948543548583985, 0.06079107284545898, 0.061663230895996096, 0.06095439910888672, 0.060997600555419924, 0.06069200134277344, 0.06103897476196289, 0.060617057800292966, 0.06108979034423828, 0.06290415954589844, 0.061139137268066406, 0.060533790588378905, 0.06045695877075195, 0.06145264053344727, 0.06046777725219726, 0.06107305526733398, 0.06077824020385742, 0.060893825531005856, 0.060313438415527346, 0.06201971054077148, 0.06192950439453125, 0.06065129470825195, 0.060502239227294925, 0.06026444625854492, 0.06012054443359375, 0.059820575714111326, 0.060217342376708984, 0.06055260848999024, 0.06035721588134765, 0.06051424026489258, 0.06033414459228516, 0.06015974426269531, 0.05999155044555664, 0.05987609481811523, 0.06269776153564453, 0.060933952331542966, 0.060544513702392576, 0.06042051315307617, 0.06036172866821289, 0.06049280166625977, 0.06031340789794922, 0.06118214416503906, 0.060633087158203126, 0.06081846237182617, 0.06088803100585938, 0.060923358917236325, 0.0607158088684082, 0.06088473510742187, 0.06084377670288086, 0.06144137573242187, 0.06082857513427734, 0.060870433807373045, 0.061042240142822266, 0.06086313629150391, 0.06010262298583984, 0.05970127868652344, 0.05975606536865234, 0.0601297607421875, 0.060361888885498045, 0.060645313262939454, 0.060537761688232425, 0.060849632263183594, 0.060215839385986326, 0.060120094299316404, 0.06030640029907226, 0.06021529769897461, 0.06040480041503906, 0.06052755355834961, 0.06075392150878906, 0.06071062469482422, 0.06095286560058594, 0.06065151977539063, 0.06039955139160156, 0.0601416015625, 0.06009040069580078, 0.06098886489868164, 0.06199148941040039, 0.060663806915283204, 0.06071078491210938, 0.060612510681152344, 0.06073980712890625, 0.06057196807861328, 0.06076147079467773, 0.060647071838378905, 0.060635807037353516, 0.06066524887084961, 0.06094704055786133, 0.060794303894042966, 0.06123107147216797, 0.060687232971191406, 0.06086569595336914, 0.060862110137939456, 0.060939167022705076, 0.060680191040039064, 0.06112006378173828, 0.06082809448242187, 0.06080470275878906, 0.0612149429321289, 0.06126335906982422, 0.06506156921386719, 0.06097318267822266, 0.06041775894165039, 0.06052259063720703, 0.06027679824829101, 0.06031299209594727]",tokens/s,16.41108176901759,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,5245.898752,3461.28384,0.0,3066.036224,2865.160192,s,1,13.449072265625,13.449072265625,0.0,13.449072265625,13.449072265625,13.449072265625,13.449072265625,[13.449072265625],,kWh,0.0001751543946833332,1.931352334526294e-05,6.485394077201945e-05,0.0002593218588006156,,MB,5298.737152,3798.925312,0.0,3382.706176,3158.448128,s,10,0.9427298507690429,0.09427298507690429,0.0004287602052404247,0.09424558258056641,0.0946105682373047,0.09491861114501954,0.09516504547119141,"[0.09522665405273438, 0.09449411010742187, 0.09451715087890625, 0.093910400390625, 0.0945421142578125, 0.09384623718261718, 0.09432064056396484, 0.09417052459716797, 0.09367011260986328, 0.09403190612792969]",tokens/s,2715.518128456047,kWh,2.76874553930794e-06,3.0534151131535305e-07,1.556185731320564e-06,4.630272781943857e-06,tokens/kWh,55288319.2969308,MB,5302.882304,3798.925312,0.0,3382.706176,3158.450688,s,10,56.98800439453125,5.698800439453126,0.015593749483295881,5.695649658203125,5.715228124999999,5.721299609375,5.726156796875,"[5.666302734375, 5.69577392578125, 5.70157568359375, 5.68772216796875, 5.695525390625, 5.72737109375, 5.71081103515625, 5.6939208984375, 5.69512255859375, 5.71387890625]",tokens/s,11.05495808623993,kWh,0.00016488129306569104,1.8186982917514692e-05,7.059048309708114e-05,0.0002536587590802869,tokens/kWh,248365.1667635082,,s,630,56.98545406341552,0.09045310168796115,0.0011146891581628154,0.09024007797241211,0.09134676818847656,0.09213751525878906,0.09454165992736817,"[0.08968605041503906, 0.08971260833740234, 0.08952556610107422, 0.08906617736816407, 0.08922316741943359, 0.08949964904785156, 0.08911430358886718, 0.09006463623046874, 0.09064096069335938, 0.08963890838623047, 0.08953651428222656, 0.09333964538574219, 0.09175859069824219, 0.08936038208007813, 0.08958566284179688, 0.0897804183959961, 0.08917817687988282, 0.08923545837402344, 0.08925567626953125, 0.0894527359008789, 0.08956813049316406, 0.08997369384765624, 0.0896626205444336, 0.0895005111694336, 0.09261017608642579, 0.08917375946044923, 0.09015974426269531, 0.09012822723388672, 0.0900240936279297, 0.08954265594482422, 0.08973513793945312, 0.08937065887451172, 0.08998422241210938, 0.09084188842773437, 0.08923689270019532, 0.08978316497802734, 0.08978112030029296, 0.08964752197265625, 0.09041260528564453, 0.09004940795898438, 0.08969833374023438, 0.09047036743164062, 0.08974736022949219, 0.08973731231689454, 0.08982342529296874, 0.09008927917480469, 0.09005875396728516, 0.08941964721679688, 0.08937305450439453, 0.090212158203125, 0.08958560180664063, 0.08950784301757812, 0.0895692138671875, 0.0903043212890625, 0.08969859313964844, 0.08989286041259766, 0.08940294647216797, 0.09046086120605469, 0.09075494384765626, 0.09033859252929688, 0.09165267181396484, 0.09034732818603515, 0.09006297302246094, 0.0917606430053711, 0.09150259399414062, 0.08984780883789062, 0.08934528350830079, 0.09013116455078125, 0.0895589141845703, 0.08977327728271485, 0.0900618896484375, 0.09017945861816407, 0.0914776611328125, 0.08999152374267579, 0.08980025482177735, 0.09080601501464844, 0.09085385894775391, 0.09026764678955078, 0.09011427307128907, 0.08976793670654297, 0.09000291442871093, 0.09255171203613281, 0.0908448028564453, 0.09116505432128906, 0.0898653106689453, 0.08953334045410156, 0.08950784301757812, 0.0895283203125, 0.09028975677490235, 0.09052191925048828, 0.08989641571044922, 0.08996927642822265, 0.09094758605957032, 0.0903024673461914, 0.08976902770996094, 0.0904151382446289, 0.08941603088378906, 0.09003065490722656, 0.08934809875488281, 0.08947718048095703, 0.09004825592041016, 0.08998521423339843, 0.08981913757324218, 0.09037551879882813, 0.09034732818603515, 0.08961945343017579, 0.08959715270996094, 0.08981910705566407, 0.09006147003173828, 0.09084681701660156, 0.09063670349121093, 0.09121382141113281, 0.09021990203857422, 0.09131072235107422, 0.09187091064453125, 0.09068576049804687, 0.0896552963256836, 0.09023385620117187, 0.09018057250976562, 0.09010179138183594, 0.09034870147705078, 0.09456111907958985, 0.09081241607666016, 0.0912916488647461, 0.09234636688232421, 0.09089024353027343, 0.0900060806274414, 0.08999571228027343, 0.08985382080078125, 0.09007308959960937, 0.08956928253173828, 0.09059097290039063, 0.09013273620605469, 0.09430016326904297, 0.0902715835571289, 0.09114844512939453, 0.0913583984375, 0.09006368255615234, 0.0898325424194336, 0.09005967712402344, 0.09075917053222657, 0.09047411346435547, 0.09083484649658204, 0.09061328125, 0.08987741088867188, 0.08961027526855468, 0.09022179412841796, 0.09050777435302734, 0.09155596923828126, 0.0913174057006836, 0.09060399627685548, 0.09087026977539063, 0.09222886657714843, 0.09193551635742188, 0.08987033843994141, 0.0908779525756836, 0.08987455749511719, 0.0898435821533203, 0.09076652526855469, 0.09061990356445312, 0.09025318145751954, 0.09061881256103516, 0.09070796966552734, 0.09055846405029297, 0.09007695770263671, 0.09031078338623047, 0.08999267578125, 0.09006511688232421, 0.09028031921386719, 0.09021392059326172, 0.0903741455078125, 0.08972544097900391, 0.08988057708740234, 0.09074073791503906, 0.0896674575805664, 0.09005305480957031, 0.0893655014038086, 0.08983622741699218, 0.08909190368652344, 0.08969209289550781, 0.0903662109375, 0.09327935791015625, 0.08970738983154297, 0.08980480194091797, 0.0894744644165039, 0.09334998321533203, 0.09082502746582032, 0.0904705581665039, 0.09199961853027344, 0.09024310302734374, 0.09082061004638672, 0.10126249694824219, 0.09058493041992187, 0.09043436431884766, 0.08972911834716797, 0.09009696197509766, 0.08971327972412109, 0.0893186264038086, 0.08988153839111328, 0.08981404876708984, 0.08971568298339844, 0.09490620422363281, 0.09056620788574218, 0.08984432220458985, 0.09010995483398437, 0.08984371185302735, 0.09000141143798829, 0.08936653137207032, 0.08981641387939453, 0.09067088317871094, 0.09157107543945313, 0.09022029113769531, 0.08970060729980468, 0.08928665924072265, 0.08964915466308594, 0.08943545532226563, 0.08986201477050781, 0.08951289367675781, 0.09022041320800782, 0.08995945739746093, 0.08963990020751954, 0.08993920135498047, 0.09027597045898438, 0.09034815979003906, 0.08998092651367187, 0.08941270446777344, 0.08972380828857422, 0.08946435546875, 0.08916835021972656, 0.09043968200683594, 0.09152921295166015, 0.08972697448730468, 0.0897798080444336, 0.08991942596435547, 0.09008175659179687, 0.08910438537597656, 0.08950099182128907, 0.08917043304443359, 0.09000160217285157, 0.09057443237304688, 0.08898397064208985, 0.08888044738769531, 0.08918428802490234, 0.09224691009521484, 0.08962847900390625, 0.0935874252319336, 0.09057075500488282, 0.09003008270263672, 0.08971068572998046, 0.08980636596679688, 0.08956352233886719, 0.091340576171875, 0.08974867248535157, 0.08952301025390624, 0.08948735809326172, 0.09013855743408203, 0.09082428741455079, 0.09040870666503906, 0.09026387023925782, 0.09010832214355469, 0.08950374603271484, 0.09034957122802735, 0.08947711944580078, 0.08999664306640626, 0.09027641296386718, 0.0911258544921875, 0.09056690979003906, 0.089746337890625, 0.09001455688476563, 0.08958975982666016, 0.08982112121582031, 0.09112950134277344, 0.09219261169433594, 0.09070012664794921, 0.09026563262939453, 0.08980883026123047, 0.09010173034667969, 0.0915235824584961, 0.09104156494140625, 0.09029341125488281, 0.08990396881103516, 0.09011808013916016, 0.08980646514892578, 0.09058963012695312, 0.09127043151855468, 0.09009225463867188, 0.0898677749633789, 0.08968179321289063, 0.09069222259521484, 0.0906424331665039, 0.09250816345214843, 0.09106022644042969, 0.09028928375244141, 0.09139699554443359, 0.09588057708740234, 0.09126783752441406, 0.09034265899658203, 0.08998489379882812, 0.09031478118896484, 0.09181651306152344, 0.0910716781616211, 0.09185993957519531, 0.09025865936279297, 0.0897380142211914, 0.09030592346191406, 0.08947161865234375, 0.08986150360107421, 0.09024143981933594, 0.09010368347167969, 0.09028854370117187, 0.09022991943359375, 0.08924034881591797, 0.08884630584716798, 0.0892541732788086, 0.08895622253417969, 0.09024716949462891, 0.09016115570068359, 0.09003948974609376, 0.09034835052490234, 0.09103075408935547, 0.0917154541015625, 0.09049180603027343, 0.08923308563232422, 0.09007135772705079, 0.09092915344238281, 0.0899399642944336, 0.09044786834716798, 0.09039670562744141, 0.09196137237548828, 0.09060150146484375, 0.09027574157714843, 0.09055232238769531, 0.09018982696533204, 0.09007465362548828, 0.09115814208984375, 0.09168982696533202, 0.0920025634765625, 0.09076505279541015, 0.0927674560546875, 0.09076348876953125, 0.09042348480224609, 0.09124697875976563, 0.09162957000732422, 0.09101110076904297, 0.09062601470947265, 0.09083699035644531, 0.0912384033203125, 0.09142179107666015, 0.09244560241699219, 0.09168057250976562, 0.09125619506835937, 0.09110201263427735, 0.0944940185546875, 0.09075782775878906, 0.09167667388916016, 0.09044377899169923, 0.0907874526977539, 0.09046041870117187, 0.09059728240966797, 0.09049520111083985, 0.09064393615722656, 0.0909927978515625, 0.09082099151611328, 0.0905129623413086, 0.0908272933959961, 0.09064873504638672, 0.09068310546875, 0.09120361328125, 0.09045782470703125, 0.09060995483398437, 0.09124409484863281, 0.09037049865722656, 0.09112322998046875, 0.09089469146728515, 0.09073177337646485, 0.09106317138671875, 0.09104793548583984, 0.09075711822509766, 0.09037404632568359, 0.08997628784179687, 0.09021459197998047, 0.09015078735351563, 0.09073222351074219, 0.09042134094238281, 0.09001811218261718, 0.09018319702148438, 0.09040108489990234, 0.09018025970458984, 0.09000281524658203, 0.09022147369384766, 0.09029385375976562, 0.09118259429931641, 0.09095024108886719, 0.09065062713623047, 0.09016457366943359, 0.09062179565429687, 0.091351806640625, 0.09075225830078125, 0.09233817291259766, 0.09012079620361328, 0.09081597137451172, 0.09030636596679688, 0.09145629119873047, 0.09058512115478516, 0.09080633544921875, 0.09123846435546876, 0.09120697784423829, 0.09497062683105469, 0.09103968048095704, 0.09424877166748047, 0.09023439788818359, 0.0907209243774414, 0.09041680145263672, 0.09036547088623047, 0.090823486328125, 0.08988617706298828, 0.09031938934326172, 0.08999321746826172, 0.09092649841308593, 0.09134674835205078, 0.09069033813476562, 0.08996665954589844, 0.09070585632324218, 0.09101900482177734, 0.0912938232421875, 0.09091852569580078, 0.09110572814941406, 0.09057491302490234, 0.09113753509521484, 0.09042380523681641, 0.09127760314941406, 0.09023040008544922, 0.08959394836425781, 0.08942189025878906, 0.09005449676513672, 0.08955654144287109, 0.0898620834350586, 0.0902039337158203, 0.09021727752685547, 0.08956313323974609, 0.08970982360839844, 0.08956422424316406, 0.08929539489746094, 0.08968572998046875, 0.0899959716796875, 0.09014067077636718, 0.09439437103271485, 0.09054962921142579, 0.08911526489257812, 0.08970457458496094, 0.08998067474365234, 0.08958812713623047, 0.08929251098632812, 0.08888102722167969, 0.08913318634033203, 0.08970649719238281, 0.09007718658447265, 0.08935964965820313, 0.08927510070800782, 0.08949350738525391, 0.09262659454345704, 0.08973283386230468, 0.09009011077880859, 0.08979046630859375, 0.08927964782714844, 0.08982745361328125, 0.09207017517089844, 0.09151737976074219, 0.09085104370117188, 0.09079420471191406, 0.0910623016357422, 0.09066684722900391, 0.09070406341552735, 0.0910561294555664, 0.09044898986816406, 0.09054246520996094, 0.09061634826660156, 0.09185266876220703, 0.09059033966064453, 0.09101824188232421, 0.09102950286865234, 0.0909024658203125, 0.09029049682617188, 0.08984703826904297, 0.09086160278320313, 0.09087606048583985, 0.09050511932373047, 0.09025766754150391, 0.09389004516601562, 0.09057286071777344, 0.09088265228271485, 0.09004236602783203, 0.08998297882080078, 0.08988262176513671, 0.09018163299560547, 0.09036905670166015, 0.09010594940185547, 0.08958380889892578, 0.08982189178466797, 0.09024915313720704, 0.09023903656005859, 0.09010755157470703, 0.09039715576171875, 0.09042655944824218, 0.0903597412109375, 0.0905291519165039, 0.0908642578125, 0.09082653045654297, 0.09008354949951172, 0.09030860900878906, 0.09258735656738282, 0.09081718444824219, 0.09034751892089844, 0.08987648010253907, 0.09021849822998047, 0.09050236511230468, 0.09045238494873047, 0.09238361358642579, 0.0899788818359375, 0.08943180847167968, 0.08983372497558594, 0.0905871353149414, 0.09008953857421875, 0.08999040222167969, 0.09061017608642578, 0.09012271881103516, 0.08994950103759766, 0.09043516540527344, 0.09052857971191407, 0.09009152221679688, 0.09001983642578125, 0.08976505279541015, 0.09081529235839844, 0.09115010833740235, 0.09067648315429687, 0.09013142395019531, 0.09074393463134765, 0.09027059173583984, 0.09088419342041015, 0.09019792175292969, 0.09005606079101562, 0.09010768127441406, 0.0898056640625, 0.09019391632080079, 0.09046825408935547, 0.0900506591796875, 0.0913469467163086, 0.08984780883789062, 0.09010176086425781, 0.09030598449707031, 0.09022054290771485, 0.0898115234375, 0.09005900573730469, 0.08998886108398438, 0.09042329406738281, 0.09055955505371094, 0.09308274841308593, 0.09062703704833984, 0.09028284454345703, 0.09005820465087891, 0.09012073516845703, 0.090355712890625, 0.09018367767333985, 0.08972605133056641, 0.09025833892822266, 0.08997273254394532, 0.09039600372314453, 0.09112528228759766, 0.09114380645751953, 0.09529183959960938, 0.09382867431640625, 0.09088086700439453, 0.08991305541992188, 0.08990544128417968, 0.08972697448730468, 0.08993587493896485, 0.09005792236328125, 0.09099308776855469, 0.0908139877319336, 0.09036067199707032, 0.09033113861083984, 0.09039974212646484, 0.09159353637695312, 0.09015030670166016, 0.09039513397216797, 0.09005903625488282, 0.09043968200683594, 0.09042329406738281, 0.09027174377441406, 0.0902739486694336, 0.09050492858886719, 0.09015625762939453, 0.09024111938476563, 0.09013945770263672, 0.08995571136474609, 0.08975222778320313, 0.0896839370727539, 0.08993382263183594, 0.0900505599975586, 0.09006412506103516, 0.09018450927734376, 0.09002492523193359, 0.0898338851928711, 0.09006956481933594, 0.09012633514404297, 0.09080774688720702, 0.09001407623291016, 0.09011148834228516, 0.09007791900634765, 0.0902221450805664, 0.09061116790771484, 0.09024928283691407, 0.09015113830566407, 0.09010198211669922, 0.08986873626708984, 0.09019187164306641, 0.09128345489501953, 0.09073458862304687, 0.09016320037841796, 0.09132646179199219, 0.09377184295654296, 0.09038841247558593, 0.0904089584350586, 0.09077760314941406, 0.090818115234375, 0.09026195526123047, 0.09037596893310547, 0.10164399719238282, 0.09004815673828125, 0.09018873596191407]",tokens/s,11.055452840630394,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 148, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8bit-bnb-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,bnb,0.0,True,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,1099.497472,634.322944,0.0,239.075328,216.531968,s,1,8.0439541015625,8.0439541015625,0.0,8.0439541015625,8.0439541015625,8.0439541015625,8.0439541015625,[8.0439541015625],,kWh,3.223055435832786e-05,3.5478752692028186e-06,1.1524175886002547e-05,4.730260551353322e-05,,MB,1245.77792,720.306176,0.0,304.08704,261.878272,s,10,0.39191001510620116,0.03919100151062012,0.0002534118295146534,0.03913985633850098,0.03950299415588379,0.039584153938293454,0.03964908176422119,"[0.03966531372070312, 0.038930496215820315, 0.03888412857055664, 0.0389372787475586, 0.039473918914794924, 0.039484958648681644, 0.03917206573486328, 0.03906051254272461, 0.03910764694213867, 0.039193695068359374]",tokens/s,6532.111712700891,kWh,1.1507683333825479e-06,1.2690596006270026e-07,5.114396042519743e-07,1.7891138976972225e-06,tokens/kWh,143087592.31567028,MB,1279.717376,732.889088,0.0,316.669952,261.880832,s,10,23.84888134765625,2.3848881347656254,0.007355478053398507,2.3853553466796873,2.393446435546875,2.39452265625,2.3953836328125,"[2.389367919921875, 2.38419921875, 2.382704833984375, 2.38542041015625, 2.385290283203125, 2.379917724609375, 2.3673994140625, 2.395598876953125, 2.385775390625, 2.393207275390625]",tokens/s,26.416333362399538,kWh,7.005794998745129e-05,7.726870229722669e-06,2.666772102794837e-05,0.00010445254124512234,tokens/kWh,603144.7320382159,,s,630,23.843350658416735,0.03784658834669325,0.0006985298675227379,0.03780875205993652,0.03830564613342285,0.03857856121063232,0.03927424137115479,"[0.037569313049316405, 0.03796480178833008, 0.03774262237548828, 0.03781760025024414, 0.03812931060791016, 0.03788832092285156, 0.03765542221069336, 0.0375711669921875, 0.03786576080322265, 0.03745792007446289, 0.037572608947753904, 0.03774185562133789, 0.037722976684570315, 0.037728126525878904, 0.038363391876220704, 0.03798400115966797, 0.037617664337158206, 0.03752329635620117, 0.03761174392700195, 0.03756025695800781, 0.03746358489990234, 0.03739644622802735, 0.037491199493408206, 0.037369857788085936, 0.03793920135498047, 0.038158336639404294, 0.03844710540771484, 0.03828668975830078, 0.03822572708129883, 0.03808095932006836, 0.0392380485534668, 0.0381110725402832, 0.0380272331237793, 0.03818751907348633, 0.03843859100341797, 0.03819420623779297, 0.03848291015625, 0.03910166549682617, 0.038433567047119144, 0.038141952514648435, 0.03815254211425781, 0.03822956848144531, 0.03816236877441406, 0.03813308715820313, 0.03801520156860352, 0.03789065551757813, 0.03767257690429687, 0.03780441665649414, 0.03817398452758789, 0.03784473419189453, 0.03773126220703125, 0.03780160140991211, 0.037666336059570316, 0.03755097579956055, 0.03774054336547852, 0.03773440170288086, 0.03784473419189453, 0.03803366470336914, 0.037586399078369144, 0.03740339279174805, 0.03767075347900391, 0.03791251373291016, 0.03776716613769531, 0.03773440170288086, 0.03766070556640625, 0.038439041137695314, 0.0378901138305664, 0.03770710372924805, 0.03740428924560547, 0.037458751678466795, 0.03743334579467773, 0.03773440170288086, 0.037709217071533206, 0.037524063110351565, 0.03748233413696289, 0.03847516632080078, 0.03820006561279297, 0.037887966156005856, 0.037807487487792966, 0.037999263763427736, 0.03771619033813477, 0.03761446380615235, 0.03801590347290039, 0.037733505249023434, 0.037781822204589845, 0.037902782440185544, 0.03807654571533203, 0.0373043212890625, 0.03712393569946289, 0.03720518493652344, 0.037784191131591795, 0.03777881622314453, 0.03852105712890625, 0.03868659210205078, 0.03793395233154297, 0.03801497650146484, 0.03797129440307617, 0.03810969543457031, 0.037955680847167966, 0.03812768173217773, 0.037953792572021486, 0.03797139358520508, 0.03792070388793945, 0.037911102294921874, 0.038074176788330076, 0.03799244689941406, 0.038080352783203125, 0.037797279357910156, 0.038263553619384764, 0.03808201599121094, 0.038023712158203125, 0.03794755172729492, 0.03798204803466797, 0.03788800048828125, 0.0378996810913086, 0.03783676910400391, 0.03883440017700195, 0.03760780715942383, 0.0375880012512207, 0.03780710220336914, 0.03738623809814453, 0.03712944030761719, 0.03721894454956055, 0.03754512023925781, 0.03741593551635742, 0.03760537719726562, 0.03666873550415039, 0.0368798713684082, 0.03693049621582031, 0.03717695999145508, 0.03726492691040039, 0.03762377548217773, 0.0377393913269043, 0.037824001312255856, 0.039024673461914065, 0.03786127853393555, 0.03766944122314453, 0.038196704864501954, 0.03799692916870117, 0.03770998382568359, 0.03744099044799805, 0.03759337615966797, 0.03759110260009765, 0.037568702697753906, 0.03748044967651367, 0.03750713729858399, 0.03713836669921875, 0.03720601654052735, 0.03735334396362305, 0.03720118331909179, 0.037235553741455076, 0.03715033721923828, 0.03706508636474609, 0.03724492645263672, 0.03727990341186523, 0.037169055938720705, 0.03754905700683594, 0.03779654312133789, 0.03790873718261719, 0.03777526473999023, 0.037857376098632815, 0.0384266242980957, 0.038088638305664065, 0.037779102325439455, 0.03783030319213867, 0.03771292877197266, 0.03766278457641602, 0.037878623962402345, 0.0381038703918457, 0.0379576301574707, 0.038084606170654296, 0.0384266242980957, 0.03799836730957031, 0.03821590423583984, 0.03842838287353516, 0.038287487030029294, 0.038303905487060544, 0.03815836715698242, 0.038111198425292966, 0.0380682258605957, 0.03786576080322265, 0.038096126556396485, 0.03822844696044922, 0.03911913681030273, 0.039479007720947264, 0.03835007858276367, 0.03845391845703125, 0.03843695831298828, 0.03791030502319336, 0.037262016296386716, 0.03729571151733398, 0.03725990295410156, 0.03706243133544922, 0.03702524948120117, 0.03759183883666992, 0.03735340881347656, 0.03772601699829101, 0.038194911956787106, 0.03763792037963867, 0.0375376968383789, 0.037508033752441404, 0.037187232971191406, 0.03723468780517578, 0.03720102310180664, 0.03746432113647461, 0.03755072021484375, 0.037157920837402346, 0.037450721740722656, 0.03794473648071289, 0.03802140808105469, 0.038013023376464845, 0.03741465759277344, 0.03785929489135742, 0.037399166107177736, 0.03735334396362305, 0.03729747009277344, 0.037165889739990236, 0.03762694549560547, 0.03710047912597656, 0.037199550628662106, 0.03785542297363281, 0.04685027313232422, 0.03798115158081055, 0.03782844924926758, 0.03733135986328125, 0.0370972785949707, 0.03719443130493164, 0.037074142456054685, 0.03731727981567383, 0.037776607513427735, 0.03760838317871094, 0.037491039276123045, 0.037404510498046876, 0.0374554557800293, 0.037658592224121094, 0.04119136047363281, 0.03814348983764648, 0.03836604690551758, 0.038209312438964846, 0.037967777252197264, 0.038332672119140626, 0.03796192169189453, 0.038168449401855466, 0.03794527816772461, 0.03789823913574219, 0.03808467102050781, 0.038016094207763675, 0.03839478302001953, 0.03860604858398437, 0.03817478561401367, 0.03794150543212891, 0.03841686248779297, 0.03785318374633789, 0.038102081298828125, 0.03818796920776367, 0.03860262298583984, 0.03804921722412109, 0.03784569549560547, 0.03781631851196289, 0.037738494873046875, 0.038219615936279296, 0.03800915145874023, 0.038147937774658205, 0.03802521514892578, 0.038226272583007814, 0.03756403350830078, 0.03759942245483398, 0.037453662872314455, 0.037371967315673826, 0.03737510299682617, 0.03728047943115234, 0.03708844757080078, 0.037917312622070314, 0.03788623809814453, 0.03806617736816406, 0.03798780822753906, 0.037656318664550784, 0.037445823669433595, 0.03747011184692383, 0.03747910308837891, 0.03743532943725586, 0.03756579208374024, 0.03737238311767578, 0.037423358917236325, 0.03760771179199219, 0.03751500701904297, 0.03797155380249023, 0.038395454406738284, 0.038125953674316405, 0.037970367431640624, 0.03787776184082031, 0.03775513458251953, 0.03771804809570312, 0.03756412887573242, 0.03757567977905273, 0.03748147201538086, 0.03741900634765625, 0.03773545455932617, 0.03912803268432617, 0.037781600952148435, 0.03786108779907227, 0.03928902435302734, 0.038199295043945314, 0.037674625396728514, 0.0378392333984375, 0.037914592742919924, 0.03766483306884766, 0.037713886260986325, 0.03771596908569336, 0.03791791915893555, 0.03811811065673828, 0.03813702392578125, 0.03813999938964844, 0.037997344970703124, 0.038594558715820314, 0.03785558319091797, 0.03832796859741211, 0.037904735565185546, 0.03781001663208008, 0.037859745025634765, 0.03780278396606445, 0.03774303817749024, 0.03796355056762695, 0.0379598388671875, 0.03823593521118164, 0.037964481353759766, 0.0379024658203125, 0.038045665740966794, 0.03779126358032227, 0.03766697692871094, 0.037474655151367185, 0.03755353546142578, 0.03742319869995117, 0.037288734436035156, 0.03711155319213867, 0.0385986557006836, 0.03859225463867187, 0.03777500915527344, 0.03756092834472656, 0.03791571044921875, 0.03792575836181641, 0.03791468811035156, 0.037865150451660154, 0.0376036491394043, 0.03742710494995117, 0.03725260925292969, 0.03748518371582031, 0.03725104141235352, 0.03719987106323242, 0.037253120422363284, 0.03773820877075195, 0.037775646209716796, 0.03896470260620117, 0.038806049346923825, 0.03835027313232422, 0.0378842544555664, 0.037740768432617186, 0.03775088119506836, 0.03734444808959961, 0.03744611358642578, 0.03728355026245117, 0.0373458251953125, 0.03757670211791992, 0.0375109748840332, 0.03766828918457031, 0.03766502380371094, 0.03818307113647461, 0.03798255920410156, 0.038152286529541016, 0.03797318267822265, 0.03770032119750977, 0.037576801300048826, 0.03755596923828125, 0.03754819107055664, 0.03754393768310547, 0.037410560607910155, 0.03760380935668945, 0.0379815673828125, 0.03749529647827148, 0.03801702499389648, 0.038027263641357424, 0.037959232330322265, 0.03801747131347656, 0.03801520156860352, 0.037907806396484375, 0.038012863159179684, 0.037911041259765625, 0.03785334396362305, 0.037875553131103516, 0.03798015975952149, 0.037822463989257815, 0.03808287811279297, 0.038561824798583985, 0.03828211212158203, 0.038361503601074216, 0.037969566345214846, 0.037800926208496094, 0.03796758270263672, 0.03784236907958984, 0.03731747055053711, 0.03730377578735351, 0.03746384048461914, 0.0379683837890625, 0.03772934341430664, 0.03775379180908203, 0.03770070266723633, 0.03728067016601563, 0.036956512451171875, 0.03694966506958008, 0.036859264373779295, 0.036921630859375, 0.037042720794677735, 0.037136478424072264, 0.03709497451782227, 0.03703231811523437, 0.03722608184814453, 0.037826625823974606, 0.03761939239501953, 0.037745185852050785, 0.03816233444213867, 0.037806079864501956, 0.03786137771606445, 0.037117279052734375, 0.037276256561279295, 0.03736070251464844, 0.03723929595947266, 0.037227008819580076, 0.037203582763671875, 0.03715110397338867, 0.03744128036499023, 0.03769772720336914, 0.03774675369262695, 0.03759545516967774, 0.03745561599731445, 0.03732851028442383, 0.03727596664428711, 0.0372305908203125, 0.036939777374267575, 0.03694182586669922, 0.037072158813476565, 0.03706752014160156, 0.03768998336791992, 0.03760652923583984, 0.040088161468505856, 0.03803801727294922, 0.03824614334106445, 0.03824367904663086, 0.03812745666503906, 0.03787295913696289, 0.03784259033203125, 0.03786547088623047, 0.03807436752319336, 0.037943294525146484, 0.03804300689697265, 0.03816435241699219, 0.038392833709716793, 0.03832131195068359, 0.03842108917236328, 0.03806982421875, 0.03790892791748047, 0.0380211181640625, 0.03801804733276367, 0.03795251083374023, 0.03800064086914062, 0.03828940963745117, 0.037971935272216796, 0.03796556854248047, 0.038107425689697265, 0.038100990295410156, 0.03781232070922851, 0.03761769485473633, 0.037906017303466794, 0.038271488189697264, 0.03791439819335937, 0.03732880020141602, 0.03745801544189453, 0.03744908905029297, 0.037744319915771485, 0.037725120544433596, 0.03769772720336914, 0.03790150451660156, 0.03808489608764649, 0.03818547058105469, 0.03773596954345703, 0.03786892700195312, 0.03775174331665039, 0.037739681243896483, 0.037472190856933596, 0.03745868682861328, 0.03731705474853515, 0.0371360969543457, 0.03725289535522461, 0.037402847290039065, 0.037506145477294923, 0.03758278274536133, 0.03745481491088867, 0.04710403060913086, 0.03776480102539063, 0.03859632110595703, 0.03775116729736328, 0.03743670272827149, 0.03751731109619141, 0.037352031707763675, 0.03736608123779297, 0.03722963333129883, 0.03777030563354492, 0.03796652984619141, 0.037515262603759765, 0.03754537582397461, 0.03775296020507812, 0.03806256103515625, 0.03768550491333008, 0.03761331176757812, 0.03766387176513672, 0.03748953628540039, 0.037689342498779296, 0.03752364730834961, 0.037934913635253906, 0.03793913650512695, 0.03810924911499024, 0.038125568389892575, 0.03806208038330078, 0.038071327209472657, 0.03786646270751953, 0.03882393646240234, 0.03818905639648437, 0.03798425674438476, 0.038201343536376955, 0.037950817108154296, 0.037934814453125, 0.03793638229370117, 0.038700736999511716, 0.03810300827026367, 0.038133792877197266, 0.03790444946289063, 0.038653377532958985, 0.038025726318359376, 0.038098270416259766, 0.0387242546081543, 0.03779545593261719, 0.038082847595214846, 0.03745536041259766, 0.03734527969360352, 0.03746892929077148, 0.037189472198486326, 0.0371992301940918, 0.037351646423339845, 0.03745238494873047, 0.0374736328125, 0.03773855972290039, 0.03803734588623047, 0.03803376007080078, 0.037539039611816406, 0.03754703903198242, 0.037684799194335934, 0.03767705535888672, 0.03759775924682617, 0.0375334701538086, 0.03920832061767578, 0.03832617568969727, 0.037727294921875, 0.03796752166748047, 0.03777347183227539, 0.03781391906738281, 0.03772848129272461, 0.03745775985717773, 0.038059425354003903, 0.03687628936767578, 0.037185535430908204, 0.03719168090820312, 0.03734732818603516, 0.03781801605224609, 0.03781391906738281, 0.038795936584472654, 0.03787104034423828, 0.037851905822753905, 0.038694465637207034, 0.03839353561401367, 0.03769334411621094, 0.03774233627319336, 0.037704414367675784, 0.0374233283996582, 0.037392383575439454, 0.037697662353515626, 0.03780412673950195, 0.03792668914794922, 0.03819724655151367, 0.03834463882446289, 0.037829822540283206, 0.037862239837646486, 0.03807353591918945, 0.0379422721862793, 0.03787964630126953, 0.03794694519042969, 0.03791686248779297, 0.03846169662475586, 0.039175167083740234, 0.038214942932128904, 0.038163551330566405, 0.038158592224121095, 0.03801331329345703, 0.0379832649230957, 0.03840534210205078, 0.03819734573364258, 0.0381416015625, 0.038010879516601564, 0.03801833724975586, 0.03803414535522461, 0.0379576301574707, 0.038133758544921875, 0.04145971298217774, 0.038113121032714845, 0.039220703125, 0.039076030731201174, 0.03800848007202148, 0.03755510330200195, 0.03751283264160156, 0.03772208023071289, 0.03759369659423828, 0.037502815246582034, 0.037287841796875, 0.03733411026000977, 0.037392734527587894, 0.037485118865966796, 0.037864479064941406, 0.03779888153076172, 0.037908191680908206, 0.03762204742431641, 0.03773440170288086, 0.0381684799194336]",tokens/s,26.422460879155373,,,